diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index e594cc1aa805..e64889095549 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python-mono-repo:latest - digest: sha256:c1a7cf36e5949106e7772393804ea1e19e38c691c8ad4d3af3faa13c3aedda73 + digest: sha256:2504e243aeb46a96e5012409500fd35b01b86855bc4b916646d8b179b2908741 diff --git a/.kokoro/continuous/common.cfg b/.kokoro/continuous/common.cfg new file mode 100644 index 000000000000..175a4e8fd7a2 --- /dev/null +++ b/.kokoro/continuous/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "google-cloud-python/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/google-cloud-python/.kokoro/system.sh" +} diff --git a/.kokoro/continuous/system.cfg b/.kokoro/continuous/system.cfg new file mode 100644 index 000000000000..5ff31a60322b --- /dev/null +++ b/.kokoro/continuous/system.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "system-3.11" +} diff --git a/.kokoro/presubmit/common.cfg b/.kokoro/presubmit/common.cfg new file mode 100644 index 000000000000..175a4e8fd7a2 --- /dev/null +++ b/.kokoro/presubmit/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "google-cloud-python/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/google-cloud-python/.kokoro/system.sh" +} diff --git a/.kokoro/presubmit/system.cfg b/.kokoro/presubmit/system.cfg new file mode 100644 index 000000000000..5ff31a60322b --- /dev/null +++ b/.kokoro/presubmit/system.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "system-3.11" +} diff --git a/.kokoro/system-single.sh b/.kokoro/system-single.sh new file mode 100755 index 000000000000..1fd3123b43b1 --- /dev/null +++ b/.kokoro/system-single.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to non-zero if any command fails, +# or zero if all commands in the pipeline exit successfully. +set -eo pipefail + +pwd + +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +SESSION_ARG="" + +[[ -z "${NOX_SESSION}" ]] || SESSION_ARG="-s ${NOX_SESSION}" +python3 -m nox ${SESSION_ARG} diff --git a/.kokoro/system.sh b/.kokoro/system.sh new file mode 100755 index 000000000000..34e74bda4b49 --- /dev/null +++ b/.kokoro/system.sh @@ -0,0 +1,68 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to non-zero if any command fails, +# or zero if all commands in the pipeline exit successfully. +set -eo pipefail + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Setup service account credentials. +export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json + +# Setup project id. +export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") + +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi + +RETVAL=0 + +export PROJECT_ROOT=$(realpath $(dirname "${BASH_SOURCE[0]}")/..) + +cd "$PROJECT_ROOT" + +pwd + +# A file for running system tests +system_test_script="${PROJECT_ROOT}/.kokoro/system-single.sh" + +# Run system tests for each package with directory packages/*/tests/system +for dir in `find 'packages' -type d -wholename 'packages/*/tests/system'`; do + # Get the path to the package by removing the suffix /tests/system + package=$(echo $dir | cut -f -2 -d '/') + echo "Running system tests for ${package}" + pushd ${package} + # Temporarily allow failure. + set +e + ${system_test_script} + ret=$? + set -e + if [ ${ret} -ne 0 ]; then + RETVAL=${ret} + fi + popd +done + +exit ${RETVAL} diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 8e66c6972361..a2adf758b82c 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -21,10 +21,13 @@ "packages/google-cloud-beyondcorp-clientgateways": "0.4.2", "packages/google-cloud-bigquery-analyticshub": "0.4.2", "packages/google-cloud-bigquery-biglake": "0.4.2", + "packages/google-cloud-bigquery-connection": "1.13.1", "packages/google-cloud-bigquery-data-exchange": "0.5.6", "packages/google-cloud-bigquery-datapolicies": "0.6.1", + "packages/google-cloud-bigquery-datatransfer": "3.12.0", "packages/google-cloud-bigquery-logging": "1.3.0", "packages/google-cloud-bigquery-migration": "0.11.2", + "packages/google-cloud-bigquery-reservation": "1.11.2", "packages/google-cloud-billing": "1.11.5", "packages/google-cloud-billing-budgets": "1.12.1", "packages/google-cloud-certificate-manager": "1.4.4", @@ -33,6 +36,7 @@ "packages/google-cloud-confidentialcomputing": "0.4.1", "packages/google-cloud-config": "0.1.1", "packages/google-cloud-contact-center-insights": "1.12.1", + "packages/google-cloud-container": "2.31.0", "packages/google-cloud-contentwarehouse": "0.7.1", "packages/google-cloud-data-fusion": "1.8.3", "packages/google-cloud-data-qna": "0.10.4", @@ -41,14 +45,16 @@ "packages/google-cloud-dataform": "0.5.3", "packages/google-cloud-datalabeling": "1.8.4", "packages/google-cloud-dataplex": "1.6.3", + "packages/google-cloud-dataproc": "5.6.0", "packages/google-cloud-dataproc-metastore": "1.12.1", "packages/google-cloud-datastream": "1.7.1", "packages/google-cloud-deploy": "1.12.1", - "packages/google-cloud-dialogflow": "2.24.0", + "packages/google-cloud-dialogflow": "2.24.1", "packages/google-cloud-dialogflow-cx": "1.27.1", "packages/google-cloud-discoveryengine": "0.11.1", + "packages/google-cloud-dlp": "3.12.3", "packages/google-cloud-dms": "1.7.2", - "packages/google-cloud-documentai": "2.19.0", + "packages/google-cloud-documentai": "2.20.0", "packages/google-cloud-domains": "1.5.3", "packages/google-cloud-edgecontainer": "0.5.3", "packages/google-cloud-enterpriseknowledgegraph": "0.3.4", @@ -59,6 +65,7 @@ "packages/google-cloud-gke-backup": "0.5.2", "packages/google-cloud-gke-connect-gateway": "0.8.4", "packages/google-cloud-gke-multicloud": "0.6.3", + "packages/google-cloud-gsuiteaddons": "0.3.3", "packages/google-cloud-iam": "2.12.2", "packages/google-cloud-iam-logging": "1.2.2", "packages/google-cloud-iap": "1.11.0", @@ -79,6 +86,7 @@ "packages/google-cloud-notebooks": "1.8.1", "packages/google-cloud-optimization": "1.5.0", "packages/google-cloud-orchestration-airflow": "1.9.2", + "packages/google-cloud-os-config": "1.15.2", "packages/google-cloud-phishing-protection": "1.9.2", "packages/google-cloud-policy-troubleshooter": "1.9.1", "packages/google-cloud-policysimulator": "0.1.1", @@ -93,6 +101,7 @@ "packages/google-cloud-resource-manager": "1.10.4", "packages/google-cloud-resource-settings": "1.7.2", "packages/google-cloud-retail": "1.16.3", + "packages/google-cloud-scheduler": "2.11.1", "packages/google-cloud-secret-manager": "2.16.4", "packages/google-cloud-securitycenter": "1.23.3", "packages/google-cloud-service-control": "1.9.2", @@ -104,10 +113,13 @@ "packages/google-cloud-storageinsights": "0.1.3", "packages/google-cloud-support": "0.1.2", "packages/google-cloud-talent": "2.11.2", + "packages/google-cloud-tasks": "2.14.2", + "packages/google-cloud-texttospeech": "2.14.1", "packages/google-cloud-tpu": "1.16.0", "packages/google-cloud-video-live-stream": "1.5.2", "packages/google-cloud-video-stitcher": "0.7.4", "packages/google-cloud-video-transcoder": "1.10.1", + "packages/google-cloud-videointelligence": "2.11.4", "packages/google-cloud-vm-migration": "1.6.3", "packages/google-cloud-vmwareengine": "1.1.2", "packages/google-cloud-vpc-access": "1.8.3", diff --git a/CHANGELOG.md b/CHANGELOG.md index 99f743354310..a1f4a4b6743d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,12 +2,12 @@ Please refer to each API's `CHANGELOG.md` file under the `packages/` directory Changelogs ----- -- [google-ai-generativelanguage==0.3.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) +- [google-ai-generativelanguage==0.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) - [google-apps-script-type==0.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) - [google-area120-tables==0.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-area120-tables/CHANGELOG.md) - [google-cloud-access-approval==1.11.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-approval/CHANGELOG.md) - [google-cloud-advisorynotifications==0.3.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-advisorynotifications/CHANGELOG.md) -- [google-cloud-alloydb==0.3.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) +- [google-cloud-alloydb==0.3.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) - [google-cloud-api-gateway==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-gateway/CHANGELOG.md) - [google-cloud-api-keys==0.5.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-keys/CHANGELOG.md) - [google-cloud-apigee-connect==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-connect/CHANGELOG.md) @@ -23,97 +23,104 @@ Changelogs - [google-cloud-beyondcorp-clientconnectorservices==0.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md) - [google-cloud-beyondcorp-clientgateways==0.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md) - [google-cloud-bigquery-analyticshub==0.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md) -- [google-cloud-bigquery-biglake==0.4.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-biglake/CHANGELOG.md) -- [google-cloud-bigquery-data-exchange==0.5.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) -- [google-cloud-bigquery-datapolicies==0.6.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) -- [google-cloud-bigquery-logging==1.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) -- [google-cloud-bigquery-migration==0.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) -- [google-cloud-billing-budgets==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) -- [google-cloud-billing==1.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) -- [google-cloud-certificate-manager==1.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) -- [google-cloud-channel==1.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) -- [google-cloud-confidentialcomputing==0.4.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) -- [google-cloud-config==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) -- [google-cloud-contact-center-insights==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) -- [google-cloud-contentwarehouse==0.7.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) -- [google-cloud-data-fusion==1.8.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) -- [google-cloud-data-qna==0.10.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-qna/CHANGELOG.md) -- [google-cloud-datacatalog-lineage==0.2.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog-lineage/CHANGELOG.md) -- [google-cloud-datacatalog==3.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog/CHANGELOG.md) -- [google-cloud-dataform==0.5.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataform/CHANGELOG.md) -- [google-cloud-datalabeling==1.8.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) -- [google-cloud-dataplex==1.6.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) -- [google-cloud-dataproc-metastore==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) -- [google-cloud-datastream==1.7.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) -- [google-cloud-deploy==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) -- [google-cloud-dialogflow-cx==1.27.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) -- [google-cloud-discoveryengine==0.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) -- [google-cloud-dms==1.7.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) -- [google-cloud-documentai==2.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) -- [google-cloud-domains==1.5.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) -- [google-cloud-edgecontainer==0.5.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) -- [google-cloud-enterpriseknowledgegraph==0.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md) -- [google-cloud-essential-contacts==1.5.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-essential-contacts/CHANGELOG.md) -- [google-cloud-eventarc-publishing==0.6.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) -- [google-cloud-eventarc==1.9.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) -- [google-cloud-functions==1.13.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) +- [google-cloud-bigquery-biglake==0.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-biglake/CHANGELOG.md) +- [google-cloud-bigquery-data-exchange==0.5.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) +- [google-cloud-bigquery-datapolicies==0.6.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) +- [google-cloud-bigquery-logging==1.3.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) +- [google-cloud-bigquery-migration==0.11.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) +- [google-cloud-billing-budgets==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) +- [google-cloud-billing==1.11.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) +- [google-cloud-certificate-manager==1.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) +- [google-cloud-channel==1.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) +- [google-cloud-commerce-consumer-procurement==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md) +- [google-cloud-confidentialcomputing==0.4.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) +- [google-cloud-config==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) +- [google-cloud-contact-center-insights==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) +- [google-cloud-contentwarehouse==0.7.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) +- [google-cloud-data-fusion==1.8.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) +- [google-cloud-data-qna==0.10.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-qna/CHANGELOG.md) +- [google-cloud-datacatalog-lineage==0.2.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog-lineage/CHANGELOG.md) +- [google-cloud-datacatalog==3.15.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog/CHANGELOG.md) +- [google-cloud-dataform==0.5.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataform/CHANGELOG.md) +- [google-cloud-datalabeling==1.8.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) +- [google-cloud-dataplex==1.6.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) +- [google-cloud-dataproc-metastore==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) +- [google-cloud-datastream==1.7.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) +- [google-cloud-deploy==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) +- [google-cloud-dialogflow-cx==1.27.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) +- [google-cloud-dialogflow==2.24.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) +- [google-cloud-discoveryengine==0.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) +- [google-cloud-dms==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) +- [google-cloud-documentai==2.20.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) +- [google-cloud-domains==1.5.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) +- [google-cloud-edgecontainer==0.5.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) +- [google-cloud-enterpriseknowledgegraph==0.3.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md) +- [google-cloud-essential-contacts==1.5.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-essential-contacts/CHANGELOG.md) +- [google-cloud-eventarc-publishing==0.6.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) +- [google-cloud-eventarc==1.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) +- [google-cloud-functions==1.13.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) - [google-cloud-gke-backup==0.5.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) -- [google-cloud-gke-connect-gateway==0.8.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) -- [google-cloud-gke-multicloud==0.6.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) -- [google-cloud-iam-logging==1.2.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam-logging/CHANGELOG.md) -- [google-cloud-iam==2.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) -- [google-cloud-iap==1.10.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) -- [google-cloud-ids==1.5.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) -- [google-cloud-kms-inventory==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) -- [google-cloud-language==2.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) -- [google-cloud-life-sciences==0.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) -- [google-cloud-managed-identities==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) -- [google-cloud-media-translation==0.11.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-media-translation/CHANGELOG.md) -- [google-cloud-memcache==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memcache/CHANGELOG.md) -- [google-cloud-migrationcenter==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-migrationcenter/CHANGELOG.md) -- [google-cloud-monitoring-metrics-scopes==1.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) -- [google-cloud-netapp==0.3.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) -- [google-cloud-network-connectivity==2.0.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) -- [google-cloud-network-management==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) -- [google-cloud-network-security==0.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) +- [google-cloud-gke-connect-gateway==0.8.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) +- [google-cloud-gke-multicloud==0.6.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) +- [google-cloud-iam-logging==1.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam-logging/CHANGELOG.md) +- [google-cloud-iam==2.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) +- [google-cloud-iap==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) +- [google-cloud-ids==1.5.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) +- [google-cloud-kms-inventory==0.2.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) +- [google-cloud-language==2.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) +- [google-cloud-life-sciences==0.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) +- [google-cloud-managed-identities==1.7.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) +- [google-cloud-media-translation==0.11.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-media-translation/CHANGELOG.md) +- [google-cloud-memcache==1.7.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memcache/CHANGELOG.md) +- [google-cloud-migrationcenter==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-migrationcenter/CHANGELOG.md) +- [google-cloud-monitoring-metrics-scopes==1.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) +- [google-cloud-netapp==0.3.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) +- [google-cloud-network-connectivity==2.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) +- [google-cloud-network-management==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) +- [google-cloud-network-security==0.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) - [google-cloud-network-services==0.5.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) -- [google-cloud-notebooks==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) +- [google-cloud-notebooks==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) - [google-cloud-optimization==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-optimization/CHANGELOG.md) -- [google-cloud-orchestration-airflow==1.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) -- [google-cloud-phishing-protection==1.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-phishing-protection/CHANGELOG.md) -- [google-cloud-policy-troubleshooter==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policy-troubleshooter/CHANGELOG.md) -- [google-cloud-policysimulator==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policysimulator/CHANGELOG.md) -- [google-cloud-private-ca==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-ca/CHANGELOG.md) -- [google-cloud-private-catalog==0.9.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-catalog/CHANGELOG.md) -- [google-cloud-public-ca==0.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) -- [google-cloud-rapidmigrationassessment==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) -- [google-cloud-recaptcha-enterprise==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) -- [google-cloud-recommendations-ai==0.10.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) -- [google-cloud-recommender==2.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) -- [google-cloud-redis==2.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis/CHANGELOG.md) -- [google-cloud-resource-manager==1.10.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-manager/CHANGELOG.md) -- [google-cloud-resource-settings==1.7.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-settings/CHANGELOG.md) -- [google-cloud-secret-manager==2.16.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-secret-manager/CHANGELOG.md) -- [google-cloud-securitycenter==1.23.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) -- [google-cloud-service-control==1.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) -- [google-cloud-service-directory==1.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) -- [google-cloud-service-usage==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) -- [google-cloud-shell==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) -- [google-cloud-source-context==1.4.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-source-context/CHANGELOG.md) -- [google-cloud-storageinsights==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storageinsights/CHANGELOG.md) -- [google-cloud-support==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-support/CHANGELOG.md) -- [google-cloud-talent==2.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-talent/CHANGELOG.md) -- [google-cloud-tpu==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) -- [google-cloud-vm-migration==1.6.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) -- [google-cloud-vmwareengine==1.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) -- [google-cloud-vpc-access==1.8.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) -- [google-cloud-webrisk==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-webrisk/CHANGELOG.md) -- [google-cloud-websecurityscanner==1.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-websecurityscanner/CHANGELOG.md) -- [google-cloud-workflows==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workflows/CHANGELOG.md) -- [google-cloud-workstations==0.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) -- [google-geo-type==0.3.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) -- [google-maps-addressvalidation==0.3.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) -- [google-maps-mapsplatformdatasets==0.3.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) -- [google-maps-places==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) -- [google-maps-routing==0.6.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) +- [google-cloud-orchestration-airflow==1.9.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) +- [google-cloud-phishing-protection==1.9.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-phishing-protection/CHANGELOG.md) +- [google-cloud-policy-troubleshooter==1.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policy-troubleshooter/CHANGELOG.md) +- [google-cloud-policysimulator==0.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policysimulator/CHANGELOG.md) +- [google-cloud-private-ca==1.8.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-ca/CHANGELOG.md) +- [google-cloud-private-catalog==0.9.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-catalog/CHANGELOG.md) +- [google-cloud-public-ca==0.3.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) +- [google-cloud-rapidmigrationassessment==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) +- [google-cloud-recaptcha-enterprise==1.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) +- [google-cloud-recommendations-ai==0.10.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) +- [google-cloud-recommender==2.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) +- [google-cloud-redis==2.13.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis/CHANGELOG.md) +- [google-cloud-resource-manager==1.10.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-manager/CHANGELOG.md) +- [google-cloud-resource-settings==1.7.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-settings/CHANGELOG.md) +- [google-cloud-retail==1.16.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-retail/CHANGELOG.md) +- [google-cloud-secret-manager==2.16.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-secret-manager/CHANGELOG.md) +- [google-cloud-securitycenter==1.23.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) +- [google-cloud-service-control==1.9.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) +- [google-cloud-service-directory==1.9.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) +- [google-cloud-service-usage==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) +- [google-cloud-shell==1.7.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) +- [google-cloud-source-context==1.4.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-source-context/CHANGELOG.md) +- [google-cloud-storage-transfer==1.9.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-transfer/CHANGELOG.md) +- [google-cloud-storageinsights==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storageinsights/CHANGELOG.md) +- [google-cloud-support==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-support/CHANGELOG.md) +- [google-cloud-talent==2.11.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-talent/CHANGELOG.md) +- [google-cloud-tpu==1.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) +- [google-cloud-video-live-stream==1.5.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-live-stream/CHANGELOG.md) +- [google-cloud-video-stitcher==0.7.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-stitcher/CHANGELOG.md) +- [google-cloud-video-transcoder==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-transcoder/CHANGELOG.md) +- [google-cloud-vm-migration==1.6.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) +- [google-cloud-vmwareengine==1.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) +- [google-cloud-vpc-access==1.8.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) +- [google-cloud-webrisk==1.12.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-webrisk/CHANGELOG.md) +- [google-cloud-websecurityscanner==1.12.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-websecurityscanner/CHANGELOG.md) +- [google-cloud-workflows==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workflows/CHANGELOG.md) +- [google-cloud-workstations==0.5.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) +- [google-geo-type==0.3.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) +- [google-maps-addressvalidation==0.3.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) +- [google-maps-mapsplatformdatasets==0.3.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) +- [google-maps-places==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) +- [google-maps-routing==0.6.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) diff --git a/packages/google-cloud-access-approval/noxfile.py b/packages/google-cloud-access-approval/noxfile.py index 9a2acd8b6787..be54712bfa8f 100644 --- a/packages/google-cloud-access-approval/noxfile.py +++ b/packages/google-cloud-access-approval/noxfile.py @@ -46,7 +46,7 @@ UNIT_TEST_EXTRAS = [] UNIT_TEST_EXTRAS_BY_PYTHON = {} -SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -405,24 +405,3 @@ def prerelease_deps(session): session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) diff --git a/packages/google-cloud-access-approval/tests/system/__init__.py b/packages/google-cloud-access-approval/tests/system/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-access-approval/tests/system/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-access-approval/tests/system/smoke_test.py b/packages/google-cloud-access-approval/tests/system/smoke_test.py new file mode 100644 index 000000000000..39ed3bb198be --- /dev/null +++ b/packages/google-cloud-access-approval/tests/system/smoke_test.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + +from google.cloud import accessapproval_v1 + + +@pytest.fixture(scope="session") +def project_id(): + return os.environ["PROJECT_ID"] + + +@pytest.mark.parametrize("transport", ["grpc", "rest"]) +def test_list_approval_requests(project_id: str, transport: str): + client = accessapproval_v1.AccessApprovalClient(transport=transport) + + parent = client.common_project_path(project_id) + client.list_approval_requests(parent=parent) + + # The purpose of this smoke test is to test the communication with the API server, + # rather than API-specific functionality. + # If the smoke test fails, we won't reach this line. + assert True diff --git a/packages/google-cloud-advisorynotifications/CONTRIBUTING.rst b/packages/google-cloud-advisorynotifications/CONTRIBUTING.rst index 697fc4fbab5e..337164ecce0c 100644 --- a/packages/google-cloud-advisorynotifications/CONTRIBUTING.rst +++ b/packages/google-cloud-advisorynotifications/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system- -- -k + $ nox -s system-3.11 -- -k .. note:: - System tests are only configured to run under Python. + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/__init__.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/__init__.py index 10fa11e2a263..d370bf948d60 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/__init__.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/__init__.py @@ -28,15 +28,19 @@ Attachment, Csv, GetNotificationRequest, + GetSettingsRequest, ListNotificationsRequest, ListNotificationsResponse, LocalizationState, Message, Notification, + NotificationSettings, NotificationType, NotificationView, + Settings, Subject, Text, + UpdateSettingsRequest, ) __all__ = ( @@ -45,12 +49,16 @@ "Attachment", "Csv", "GetNotificationRequest", + "GetSettingsRequest", "ListNotificationsRequest", "ListNotificationsResponse", "Message", "Notification", + "NotificationSettings", + "Settings", "Subject", "Text", + "UpdateSettingsRequest", "LocalizationState", "NotificationType", "NotificationView", diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/__init__.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/__init__.py index ae83dd0396de..e8f30f1f4441 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/__init__.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/__init__.py @@ -26,15 +26,19 @@ Attachment, Csv, GetNotificationRequest, + GetSettingsRequest, ListNotificationsRequest, ListNotificationsResponse, LocalizationState, Message, Notification, + NotificationSettings, NotificationType, NotificationView, + Settings, Subject, Text, + UpdateSettingsRequest, ) __all__ = ( @@ -43,13 +47,17 @@ "Attachment", "Csv", "GetNotificationRequest", + "GetSettingsRequest", "ListNotificationsRequest", "ListNotificationsResponse", "LocalizationState", "Message", "Notification", + "NotificationSettings", "NotificationType", "NotificationView", + "Settings", "Subject", "Text", + "UpdateSettingsRequest", ) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_metadata.json b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_metadata.json index 5c1c2efdc5bb..166869d9247d 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_metadata.json +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_metadata.json @@ -15,10 +15,20 @@ "get_notification" ] }, + "GetSettings": { + "methods": [ + "get_settings" + ] + }, "ListNotifications": { "methods": [ "list_notifications" ] + }, + "UpdateSettings": { + "methods": [ + "update_settings" + ] } } }, @@ -30,10 +40,20 @@ "get_notification" ] }, + "GetSettings": { + "methods": [ + "get_settings" + ] + }, "ListNotifications": { "methods": [ "list_notifications" ] + }, + "UpdateSettings": { + "methods": [ + "update_settings" + ] } } }, @@ -45,10 +65,20 @@ "get_notification" ] }, + "GetSettings": { + "methods": [ + "get_settings" + ] + }, "ListNotifications": { "methods": [ "list_notifications" ] + }, + "UpdateSettings": { + "methods": [ + "update_settings" + ] } } } diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py index 4e611285fcba..a137d7a383ce 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/async_client.py @@ -68,6 +68,10 @@ class AdvisoryNotificationsServiceAsyncClient: parse_notification_path = staticmethod( AdvisoryNotificationsServiceClient.parse_notification_path ) + settings_path = staticmethod(AdvisoryNotificationsServiceClient.settings_path) + parse_settings_path = staticmethod( + AdvisoryNotificationsServiceClient.parse_settings_path + ) common_billing_account_path = staticmethod( AdvisoryNotificationsServiceClient.common_billing_account_path ) @@ -468,6 +472,209 @@ async def sample_get_notification(): # Done; return the response. return response + async def get_settings( + self, + request: Optional[Union[service.GetSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Settings: + r"""Get notification settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import advisorynotifications_v1 + + async def sample_get_settings(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient() + + # Initialize request argument(s) + request = advisorynotifications_v1.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.advisorynotifications_v1.types.GetSettingsRequest, dict]]): + The request object. Request of GetSettings endpoint. + name (:class:`str`): + Required. The resource name of the + settings to retrieve. Format: + + organizations/{organization}/locations/{location}/settings. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.advisorynotifications_v1.types.Settings: + Settings for Advisory Notifications. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_settings( + self, + request: Optional[Union[service.UpdateSettingsRequest, dict]] = None, + *, + settings: Optional[service.Settings] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Settings: + r"""Update notification settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import advisorynotifications_v1 + + async def sample_update_settings(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient() + + # Initialize request argument(s) + settings = advisorynotifications_v1.Settings() + settings.etag = "etag_value" + + request = advisorynotifications_v1.UpdateSettingsRequest( + settings=settings, + ) + + # Make the request + response = await client.update_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.advisorynotifications_v1.types.UpdateSettingsRequest, dict]]): + The request object. Request of UpdateSettings endpoint. + settings (:class:`google.cloud.advisorynotifications_v1.types.Settings`): + Required. New settings. + This corresponds to the ``settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.advisorynotifications_v1.types.Settings: + Settings for Advisory Notifications. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([settings]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.UpdateSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if settings is not None: + request.settings = settings + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("settings.name", request.settings.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "AdvisoryNotificationsServiceAsyncClient": return self diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py index 7b1fc84870a1..206cc2793c33 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py @@ -207,6 +207,26 @@ def parse_notification_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def settings_path( + organization: str, + location: str, + ) -> str: + """Returns a fully-qualified settings string.""" + return "organizations/{organization}/locations/{location}/settings".format( + organization=organization, + location=location, + ) + + @staticmethod + def parse_settings_path(path: str) -> Dict[str, str]: + """Parses a settings path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/settings$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -672,6 +692,209 @@ def sample_get_notification(): # Done; return the response. return response + def get_settings( + self, + request: Optional[Union[service.GetSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Settings: + r"""Get notification settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import advisorynotifications_v1 + + def sample_get_settings(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceClient() + + # Initialize request argument(s) + request = advisorynotifications_v1.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.advisorynotifications_v1.types.GetSettingsRequest, dict]): + The request object. Request of GetSettings endpoint. + name (str): + Required. The resource name of the + settings to retrieve. Format: + + organizations/{organization}/locations/{location}/settings. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.advisorynotifications_v1.types.Settings: + Settings for Advisory Notifications. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetSettingsRequest): + request = service.GetSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_settings( + self, + request: Optional[Union[service.UpdateSettingsRequest, dict]] = None, + *, + settings: Optional[service.Settings] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Settings: + r"""Update notification settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import advisorynotifications_v1 + + def sample_update_settings(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceClient() + + # Initialize request argument(s) + settings = advisorynotifications_v1.Settings() + settings.etag = "etag_value" + + request = advisorynotifications_v1.UpdateSettingsRequest( + settings=settings, + ) + + # Make the request + response = client.update_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.advisorynotifications_v1.types.UpdateSettingsRequest, dict]): + The request object. Request of UpdateSettings endpoint. + settings (google.cloud.advisorynotifications_v1.types.Settings): + Required. New settings. + This corresponds to the ``settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.advisorynotifications_v1.types.Settings: + Settings for Advisory Notifications. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([settings]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateSettingsRequest): + request = service.UpdateSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if settings is not None: + request.settings = settings + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("settings.name", request.settings.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "AdvisoryNotificationsServiceClient": return self diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/base.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/base.py index ed345a2f9d4a..d1f08c4972c8 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/base.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/base.py @@ -150,6 +150,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.get_settings: gapic_v1.method.wrap_method( + self.get_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_settings: gapic_v1.method.wrap_method( + self.update_settings, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -182,6 +192,24 @@ def get_notification( ]: raise NotImplementedError() + @property + def get_settings( + self, + ) -> Callable[ + [service.GetSettingsRequest], + Union[service.Settings, Awaitable[service.Settings]], + ]: + raise NotImplementedError() + + @property + def update_settings( + self, + ) -> Callable[ + [service.UpdateSettingsRequest], + Union[service.Settings, Awaitable[service.Settings]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/grpc.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/grpc.py index 6d1706334360..978420255e6f 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/grpc.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/grpc.py @@ -282,6 +282,56 @@ def get_notification( ) return self._stubs["get_notification"] + @property + def get_settings(self) -> Callable[[service.GetSettingsRequest], service.Settings]: + r"""Return a callable for the get settings method over gRPC. + + Get notification settings. + + Returns: + Callable[[~.GetSettingsRequest], + ~.Settings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_settings" not in self._stubs: + self._stubs["get_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.advisorynotifications.v1.AdvisoryNotificationsService/GetSettings", + request_serializer=service.GetSettingsRequest.serialize, + response_deserializer=service.Settings.deserialize, + ) + return self._stubs["get_settings"] + + @property + def update_settings( + self, + ) -> Callable[[service.UpdateSettingsRequest], service.Settings]: + r"""Return a callable for the update settings method over gRPC. + + Update notification settings. + + Returns: + Callable[[~.UpdateSettingsRequest], + ~.Settings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_settings" not in self._stubs: + self._stubs["update_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.advisorynotifications.v1.AdvisoryNotificationsService/UpdateSettings", + request_serializer=service.UpdateSettingsRequest.serialize, + response_deserializer=service.Settings.deserialize, + ) + return self._stubs["update_settings"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/grpc_asyncio.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/grpc_asyncio.py index e18f6eef74c2..88dbdb71331e 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/grpc_asyncio.py @@ -287,6 +287,58 @@ def get_notification( ) return self._stubs["get_notification"] + @property + def get_settings( + self, + ) -> Callable[[service.GetSettingsRequest], Awaitable[service.Settings]]: + r"""Return a callable for the get settings method over gRPC. + + Get notification settings. + + Returns: + Callable[[~.GetSettingsRequest], + Awaitable[~.Settings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_settings" not in self._stubs: + self._stubs["get_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.advisorynotifications.v1.AdvisoryNotificationsService/GetSettings", + request_serializer=service.GetSettingsRequest.serialize, + response_deserializer=service.Settings.deserialize, + ) + return self._stubs["get_settings"] + + @property + def update_settings( + self, + ) -> Callable[[service.UpdateSettingsRequest], Awaitable[service.Settings]]: + r"""Return a callable for the update settings method over gRPC. + + Update notification settings. + + Returns: + Callable[[~.UpdateSettingsRequest], + Awaitable[~.Settings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_settings" not in self._stubs: + self._stubs["update_settings"] = self.grpc_channel.unary_unary( + "/google.cloud.advisorynotifications.v1.AdvisoryNotificationsService/UpdateSettings", + request_serializer=service.UpdateSettingsRequest.serialize, + response_deserializer=service.Settings.deserialize, + ) + return self._stubs["update_settings"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py index fc61e55048db..5e6bca71b7e6 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py @@ -71,6 +71,14 @@ def post_get_notification(self, response): logging.log(f"Received response: {response}") return response + def pre_get_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_settings(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_notifications(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -79,6 +87,14 @@ def post_list_notifications(self, response): logging.log(f"Received response: {response}") return response + def pre_update_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_settings(self, response): + logging.log(f"Received response: {response}") + return response + transport = AdvisoryNotificationsServiceRestTransport(interceptor=MyCustomAdvisoryNotificationsServiceInterceptor()) client = AdvisoryNotificationsServiceClient(transport=transport) @@ -108,6 +124,25 @@ def post_get_notification( """ return response + def pre_get_settings( + self, request: service.GetSettingsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[service.GetSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the AdvisoryNotificationsService server. + """ + return request, metadata + + def post_get_settings(self, response: service.Settings) -> service.Settings: + """Post-rpc interceptor for get_settings + + Override in a subclass to manipulate the response + after it is returned by the AdvisoryNotificationsService server but before + it is returned to user code. + """ + return response + def pre_list_notifications( self, request: service.ListNotificationsRequest, @@ -131,6 +166,27 @@ def post_list_notifications( """ return response + def pre_update_settings( + self, + request: service.UpdateSettingsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[service.UpdateSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the AdvisoryNotificationsService server. + """ + return request, metadata + + def post_update_settings(self, response: service.Settings) -> service.Settings: + """Post-rpc interceptor for update_settings + + Override in a subclass to manipulate the response + after it is returned by the AdvisoryNotificationsService server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class AdvisoryNotificationsServiceRestStub: @@ -319,6 +375,92 @@ def __call__( resp = self._interceptor.post_get_notification(resp) return resp + class _GetSettings(AdvisoryNotificationsServiceRestStub): + def __hash__(self): + return hash("GetSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.GetSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Settings: + r"""Call the get settings method over HTTP. + + Args: + request (~.service.GetSettingsRequest): + The request object. Request of GetSettings endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.Settings: + Settings for Advisory Notifications. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/settings}", + }, + ] + request, metadata = self._interceptor.pre_get_settings(request, metadata) + pb_request = service.GetSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.Settings() + pb_resp = service.Settings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_settings(resp) + return resp + class _ListNotifications(AdvisoryNotificationsServiceRestStub): def __hash__(self): return hash("ListNotifications") @@ -410,6 +552,101 @@ def __call__( resp = self._interceptor.post_list_notifications(resp) return resp + class _UpdateSettings(AdvisoryNotificationsServiceRestStub): + def __hash__(self): + return hash("UpdateSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: service.UpdateSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> service.Settings: + r"""Call the update settings method over HTTP. + + Args: + request (~.service.UpdateSettingsRequest): + The request object. Request of UpdateSettings endpoint. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.service.Settings: + Settings for Advisory Notifications. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{settings.name=organizations/*/locations/*/settings}", + "body": "settings", + }, + ] + request, metadata = self._interceptor.pre_update_settings(request, metadata) + pb_request = service.UpdateSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = service.Settings() + pb_resp = service.Settings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_settings(resp) + return resp + @property def get_notification( self, @@ -418,6 +655,12 @@ def get_notification( # In C++ this would require a dynamic_cast return self._GetNotification(self._session, self._host, self._interceptor) # type: ignore + @property + def get_settings(self) -> Callable[[service.GetSettingsRequest], service.Settings]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSettings(self._session, self._host, self._interceptor) # type: ignore + @property def list_notifications( self, @@ -428,6 +671,14 @@ def list_notifications( # In C++ this would require a dynamic_cast return self._ListNotifications(self._session, self._host, self._interceptor) # type: ignore + @property + def update_settings( + self, + ) -> Callable[[service.UpdateSettingsRequest], service.Settings]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSettings(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/__init__.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/__init__.py index a24e4ea74224..1ef8ad933103 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/__init__.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/__init__.py @@ -17,27 +17,35 @@ Attachment, Csv, GetNotificationRequest, + GetSettingsRequest, ListNotificationsRequest, ListNotificationsResponse, LocalizationState, Message, Notification, + NotificationSettings, NotificationType, NotificationView, + Settings, Subject, Text, + UpdateSettingsRequest, ) __all__ = ( "Attachment", "Csv", "GetNotificationRequest", + "GetSettingsRequest", "ListNotificationsRequest", "ListNotificationsResponse", "Message", "Notification", + "NotificationSettings", + "Settings", "Subject", "Text", + "UpdateSettingsRequest", "LocalizationState", "NotificationType", "NotificationView", diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/service.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/service.py index af5006748b19..3ff314dd441b 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/service.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/types/service.py @@ -35,6 +35,10 @@ "ListNotificationsRequest", "ListNotificationsResponse", "GetNotificationRequest", + "Settings", + "NotificationSettings", + "GetSettingsRequest", + "UpdateSettingsRequest", }, ) @@ -435,4 +439,90 @@ class GetNotificationRequest(proto.Message): ) +class Settings(proto.Message): + r"""Settings for Advisory Notifications. + + Attributes: + name (str): + Output only. The resource name of the + settings to retrieve. Format: + + organizations/{organization}/locations/{location}/settings. + notification_settings (MutableMapping[str, google.cloud.advisorynotifications_v1.types.NotificationSettings]): + Required. Map of each notification type and + its settings to get/set all settings at once. + The server will validate the value for each + notification type. + etag (str): + Required. Fingerprint for optimistic + concurrency returned in Get requests. Must be + provided for Update requests. If the value + provided does not match the value known to the + server, ABORTED will be thrown, and the client + should retry the read-modify-write cycle. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + notification_settings: MutableMapping[str, "NotificationSettings"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message="NotificationSettings", + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + + +class NotificationSettings(proto.Message): + r"""Settings for each NotificationType. + + Attributes: + enabled (bool): + Whether the associated NotificationType is + enabled. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class GetSettingsRequest(proto.Message): + r"""Request of GetSettings endpoint. + + Attributes: + name (str): + Required. The resource name of the settings + to retrieve. Format: + + organizations/{organization}/locations/{location}/settings. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateSettingsRequest(proto.Message): + r"""Request of UpdateSettings endpoint. + + Attributes: + settings (google.cloud.advisorynotifications_v1.types.Settings): + Required. New settings. + """ + + settings: "Settings" = proto.Field( + proto.MESSAGE, + number=1, + message="Settings", + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-advisorynotifications/noxfile.py b/packages/google-cloud-advisorynotifications/noxfile.py index 9a2acd8b6787..be54712bfa8f 100644 --- a/packages/google-cloud-advisorynotifications/noxfile.py +++ b/packages/google-cloud-advisorynotifications/noxfile.py @@ -46,7 +46,7 @@ UNIT_TEST_EXTRAS = [] UNIT_TEST_EXTRAS_BY_PYTHON = {} -SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -405,24 +405,3 @@ def prerelease_deps(session): session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_get_settings_async.py b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_get_settings_async.py new file mode 100644 index 000000000000..083e2e9495d4 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_get_settings_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-advisorynotifications + + +# [START advisorynotifications_v1_generated_AdvisoryNotificationsService_GetSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import advisorynotifications_v1 + + +async def sample_get_settings(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient() + + # Initialize request argument(s) + request = advisorynotifications_v1.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_settings(request=request) + + # Handle the response + print(response) + +# [END advisorynotifications_v1_generated_AdvisoryNotificationsService_GetSettings_async] diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_get_settings_sync.py b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_get_settings_sync.py new file mode 100644 index 000000000000..710ed7cc2892 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_get_settings_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-advisorynotifications + + +# [START advisorynotifications_v1_generated_AdvisoryNotificationsService_GetSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import advisorynotifications_v1 + + +def sample_get_settings(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceClient() + + # Initialize request argument(s) + request = advisorynotifications_v1.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_settings(request=request) + + # Handle the response + print(response) + +# [END advisorynotifications_v1_generated_AdvisoryNotificationsService_GetSettings_sync] diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_update_settings_async.py b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_update_settings_async.py new file mode 100644 index 000000000000..8b045cb42564 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_update_settings_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-advisorynotifications + + +# [START advisorynotifications_v1_generated_AdvisoryNotificationsService_UpdateSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import advisorynotifications_v1 + + +async def sample_update_settings(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient() + + # Initialize request argument(s) + settings = advisorynotifications_v1.Settings() + settings.etag = "etag_value" + + request = advisorynotifications_v1.UpdateSettingsRequest( + settings=settings, + ) + + # Make the request + response = await client.update_settings(request=request) + + # Handle the response + print(response) + +# [END advisorynotifications_v1_generated_AdvisoryNotificationsService_UpdateSettings_async] diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_update_settings_sync.py b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_update_settings_sync.py new file mode 100644 index 000000000000..ca4b17d97a46 --- /dev/null +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/advisorynotifications_v1_generated_advisory_notifications_service_update_settings_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-advisorynotifications + + +# [START advisorynotifications_v1_generated_AdvisoryNotificationsService_UpdateSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import advisorynotifications_v1 + + +def sample_update_settings(): + # Create a client + client = advisorynotifications_v1.AdvisoryNotificationsServiceClient() + + # Initialize request argument(s) + settings = advisorynotifications_v1.Settings() + settings.etag = "etag_value" + + request = advisorynotifications_v1.UpdateSettingsRequest( + settings=settings, + ) + + # Make the request + response = client.update_settings(request=request) + + # Handle the response + print(response) + +# [END advisorynotifications_v1_generated_AdvisoryNotificationsService_UpdateSettings_sync] diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json b/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json index 77e960d626b1..d786a8bfb8cb 100644 --- a/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json @@ -172,6 +172,167 @@ ], "title": "advisorynotifications_v1_generated_advisory_notifications_service_get_notification_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient", + "shortName": "AdvisoryNotificationsServiceAsyncClient" + }, + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient.get_settings", + "method": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService.GetSettings", + "service": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService", + "shortName": "AdvisoryNotificationsService" + }, + "shortName": "GetSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.advisorynotifications_v1.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.advisorynotifications_v1.types.Settings", + "shortName": "get_settings" + }, + "description": "Sample for GetSettings", + "file": "advisorynotifications_v1_generated_advisory_notifications_service_get_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "advisorynotifications_v1_generated_AdvisoryNotificationsService_GetSettings_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "advisorynotifications_v1_generated_advisory_notifications_service_get_settings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceClient", + "shortName": "AdvisoryNotificationsServiceClient" + }, + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceClient.get_settings", + "method": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService.GetSettings", + "service": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService", + "shortName": "AdvisoryNotificationsService" + }, + "shortName": "GetSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.advisorynotifications_v1.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.advisorynotifications_v1.types.Settings", + "shortName": "get_settings" + }, + "description": "Sample for GetSettings", + "file": "advisorynotifications_v1_generated_advisory_notifications_service_get_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "advisorynotifications_v1_generated_AdvisoryNotificationsService_GetSettings_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "advisorynotifications_v1_generated_advisory_notifications_service_get_settings_sync.py" + }, { "canonical": true, "clientMethod": { @@ -332,6 +493,167 @@ } ], "title": "advisorynotifications_v1_generated_advisory_notifications_service_list_notifications_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient", + "shortName": "AdvisoryNotificationsServiceAsyncClient" + }, + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceAsyncClient.update_settings", + "method": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService.UpdateSettings", + "service": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService", + "shortName": "AdvisoryNotificationsService" + }, + "shortName": "UpdateSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.advisorynotifications_v1.types.UpdateSettingsRequest" + }, + { + "name": "settings", + "type": "google.cloud.advisorynotifications_v1.types.Settings" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.advisorynotifications_v1.types.Settings", + "shortName": "update_settings" + }, + "description": "Sample for UpdateSettings", + "file": "advisorynotifications_v1_generated_advisory_notifications_service_update_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "advisorynotifications_v1_generated_AdvisoryNotificationsService_UpdateSettings_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "advisorynotifications_v1_generated_advisory_notifications_service_update_settings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceClient", + "shortName": "AdvisoryNotificationsServiceClient" + }, + "fullName": "google.cloud.advisorynotifications_v1.AdvisoryNotificationsServiceClient.update_settings", + "method": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService.UpdateSettings", + "service": { + "fullName": "google.cloud.advisorynotifications.v1.AdvisoryNotificationsService", + "shortName": "AdvisoryNotificationsService" + }, + "shortName": "UpdateSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.advisorynotifications_v1.types.UpdateSettingsRequest" + }, + { + "name": "settings", + "type": "google.cloud.advisorynotifications_v1.types.Settings" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.advisorynotifications_v1.types.Settings", + "shortName": "update_settings" + }, + "description": "Sample for UpdateSettings", + "file": "advisorynotifications_v1_generated_advisory_notifications_service_update_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "advisorynotifications_v1_generated_AdvisoryNotificationsService_UpdateSettings_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "advisorynotifications_v1_generated_advisory_notifications_service_update_settings_sync.py" } ] } diff --git a/packages/google-cloud-advisorynotifications/scripts/fixup_advisorynotifications_v1_keywords.py b/packages/google-cloud-advisorynotifications/scripts/fixup_advisorynotifications_v1_keywords.py index 6279b84a7bb4..706081de89ae 100644 --- a/packages/google-cloud-advisorynotifications/scripts/fixup_advisorynotifications_v1_keywords.py +++ b/packages/google-cloud-advisorynotifications/scripts/fixup_advisorynotifications_v1_keywords.py @@ -40,7 +40,9 @@ class advisorynotificationsCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'get_notification': ('name', 'language_code', ), + 'get_settings': ('name', ), 'list_notifications': ('parent', 'page_size', 'page_token', 'view', 'language_code', ), + 'update_settings': ('settings', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py index c1aeb84feded..9f3c61af54d9 100644 --- a/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py +++ b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py @@ -1448,6 +1448,470 @@ async def test_get_notification_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + service.GetSettingsRequest, + dict, + ], +) +def test_get_settings(request_type, transport: str = "grpc"): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.Settings( + name="name_value", + etag="etag_value", + ) + response = client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Settings) + assert response.name == "name_value" + assert response.etag == "etag_value" + + +def test_get_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + client.get_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSettingsRequest() + + +@pytest.mark.asyncio +async def test_get_settings_async( + transport: str = "grpc_asyncio", request_type=service.GetSettingsRequest +): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.Settings( + name="name_value", + etag="etag_value", + ) + ) + response = await client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Settings) + assert response.name == "name_value" + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_settings_async_from_dict(): + await test_get_settings_async(request_type=dict) + + +def test_get_settings_field_headers(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = service.Settings() + client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_settings_field_headers_async(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.Settings()) + await client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_settings_flattened(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.Settings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_settings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_settings_flattened_error(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_settings( + service.GetSettingsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_settings_flattened_async(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.Settings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.Settings()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_settings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_settings_flattened_error_async(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_settings( + service.GetSettingsRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateSettingsRequest, + dict, + ], +) +def test_update_settings(request_type, transport: str = "grpc"): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.Settings( + name="name_value", + etag="etag_value", + ) + response = client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Settings) + assert response.name == "name_value" + assert response.etag == "etag_value" + + +def test_update_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + client.update_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateSettingsRequest() + + +@pytest.mark.asyncio +async def test_update_settings_async( + transport: str = "grpc_asyncio", request_type=service.UpdateSettingsRequest +): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.Settings( + name="name_value", + etag="etag_value", + ) + ) + response = await client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Settings) + assert response.name == "name_value" + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_update_settings_async_from_dict(): + await test_update_settings_async(request_type=dict) + + +def test_update_settings_field_headers(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateSettingsRequest() + + request.settings.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = service.Settings() + client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "settings.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_settings_field_headers_async(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateSettingsRequest() + + request.settings.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.Settings()) + await client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "settings.name=name_value", + ) in kw["metadata"] + + +def test_update_settings_flattened(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.Settings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_settings( + settings=service.Settings(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].settings + mock_val = service.Settings(name="name_value") + assert arg == mock_val + + +def test_update_settings_flattened_error(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_settings( + service.UpdateSettingsRequest(), + settings=service.Settings(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_update_settings_flattened_async(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.Settings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.Settings()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_settings( + settings=service.Settings(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].settings + mock_val = service.Settings(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_settings_flattened_error_async(): + client = AdvisoryNotificationsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_settings( + service.UpdateSettingsRequest(), + settings=service.Settings(name="name_value"), + ) + + @pytest.mark.parametrize( "request_type", [ @@ -1455,47 +1919,667 @@ async def test_get_notification_flattened_error_async(): dict, ], ) -def test_list_notifications_rest(request_type): +def test_list_notifications_rest(request_type): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListNotificationsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListNotificationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_notifications(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListNotificationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_notifications_rest_required_fields( + request_type=service.ListNotificationsRequest, +): + transport_class = transports.AdvisoryNotificationsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_notifications._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_notifications._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "language_code", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListNotificationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.ListNotificationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_notifications(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_notifications_rest_unset_required_fields(): + transport = transports.AdvisoryNotificationsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_notifications._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "languageCode", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_notifications_rest_interceptors(null_interceptor): + transport = transports.AdvisoryNotificationsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AdvisoryNotificationsServiceRestInterceptor(), + ) + client = AdvisoryNotificationsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AdvisoryNotificationsServiceRestInterceptor, + "post_list_notifications", + ) as post, mock.patch.object( + transports.AdvisoryNotificationsServiceRestInterceptor, "pre_list_notifications" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.ListNotificationsRequest.pb( + service.ListNotificationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.ListNotificationsResponse.to_json( + service.ListNotificationsResponse() + ) + + request = service.ListNotificationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.ListNotificationsResponse() + + client.list_notifications( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_notifications_rest_bad_request( + transport: str = "rest", request_type=service.ListNotificationsRequest +): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_notifications(request) + + +def test_list_notifications_rest_flattened(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListNotificationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.ListNotificationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_notifications(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=organizations/*/locations/*}/notifications" + % client.transport._host, + args[1], + ) + + +def test_list_notifications_rest_flattened_error(transport: str = "rest"): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_notifications( + service.ListNotificationsRequest(), + parent="parent_value", + ) + + +def test_list_notifications_rest_pager(transport: str = "rest"): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + service.Notification(), + service.Notification(), + ], + next_page_token="abc", + ), + service.ListNotificationsResponse( + notifications=[], + next_page_token="def", + ), + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + ], + next_page_token="ghi", + ), + service.ListNotificationsResponse( + notifications=[ + service.Notification(), + service.Notification(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListNotificationsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + pager = client.list_notifications(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, service.Notification) for i in results) + + pages = list(client.list_notifications(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetNotificationRequest, + dict, + ], +) +def test_get_notification_rest(request_type): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/notifications/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.Notification( + name="name_value", + notification_type=service.NotificationType.NOTIFICATION_TYPE_SECURITY_PRIVACY_ADVISORY, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.Notification.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_notification(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, service.Notification) + assert response.name == "name_value" + assert ( + response.notification_type + == service.NotificationType.NOTIFICATION_TYPE_SECURITY_PRIVACY_ADVISORY + ) + + +def test_get_notification_rest_required_fields( + request_type=service.GetNotificationRequest, +): + transport_class = transports.AdvisoryNotificationsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_notification._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_notification._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("language_code",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.Notification() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = service.Notification.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_notification(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_notification_rest_unset_required_fields(): + transport = transports.AdvisoryNotificationsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_notification._get_unset_required_fields({}) + assert set(unset_fields) == (set(("languageCode",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_notification_rest_interceptors(null_interceptor): + transport = transports.AdvisoryNotificationsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AdvisoryNotificationsServiceRestInterceptor(), + ) + client = AdvisoryNotificationsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AdvisoryNotificationsServiceRestInterceptor, "post_get_notification" + ) as post, mock.patch.object( + transports.AdvisoryNotificationsServiceRestInterceptor, "pre_get_notification" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = service.GetNotificationRequest.pb(service.GetNotificationRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = service.Notification.to_json(service.Notification()) + + request = service.GetNotificationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = service.Notification() + + client.get_notification( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_notification_rest_bad_request( + transport: str = "rest", request_type=service.GetNotificationRequest +): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/notifications/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_notification(request) + + +def test_get_notification_rest_flattened(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.Notification() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/notifications/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = service.Notification.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_notification(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=organizations/*/locations/*/notifications/*}" + % client.transport._host, + args[1], + ) + + +def test_get_notification_rest_flattened_error(transport: str = "rest"): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_notification( + service.GetNotificationRequest(), + name="name_value", + ) + + +def test_get_notification_rest_error(): + client = AdvisoryNotificationsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetSettingsRequest, + dict, + ], +) +def test_get_settings_rest(request_type): client = AdvisoryNotificationsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "organizations/sample1/locations/sample2"} + request_init = {"name": "organizations/sample1/locations/sample2/settings"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListNotificationsResponse( - next_page_token="next_page_token_value", - total_size=1086, + return_value = service.Settings( + name="name_value", + etag="etag_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = service.ListNotificationsResponse.pb(return_value) + pb_return_value = service.Settings.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_notifications(request) + response = client.get_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNotificationsPager) - assert response.next_page_token == "next_page_token_value" - assert response.total_size == 1086 + assert isinstance(response, service.Settings) + assert response.name == "name_value" + assert response.etag == "etag_value" -def test_list_notifications_rest_required_fields( - request_type=service.ListNotificationsRequest, -): +def test_get_settings_rest_required_fields(request_type=service.GetSettingsRequest): transport_class = transports.AdvisoryNotificationsServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -1510,30 +2594,21 @@ def test_list_notifications_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_notifications._get_unset_required_fields(jsonified_request) + ).get_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_notifications._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "language_code", - "page_size", - "page_token", - "view", - ) - ) + ).get_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = AdvisoryNotificationsServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1542,7 +2617,7 @@ def test_list_notifications_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListNotificationsResponse() + return_value = service.Settings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1562,40 +2637,30 @@ def test_list_notifications_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = service.ListNotificationsResponse.pb(return_value) + pb_return_value = service.Settings.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_notifications(request) + response = client.get_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_notifications_rest_unset_required_fields(): +def test_get_settings_rest_unset_required_fields(): transport = transports.AdvisoryNotificationsServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_notifications._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "languageCode", - "pageSize", - "pageToken", - "view", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_notifications_rest_interceptors(null_interceptor): +def test_get_settings_rest_interceptors(null_interceptor): transport = transports.AdvisoryNotificationsServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1608,16 +2673,13 @@ def test_list_notifications_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AdvisoryNotificationsServiceRestInterceptor, - "post_list_notifications", + transports.AdvisoryNotificationsServiceRestInterceptor, "post_get_settings" ) as post, mock.patch.object( - transports.AdvisoryNotificationsServiceRestInterceptor, "pre_list_notifications" + transports.AdvisoryNotificationsServiceRestInterceptor, "pre_get_settings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.ListNotificationsRequest.pb( - service.ListNotificationsRequest() - ) + pb_message = service.GetSettingsRequest.pb(service.GetSettingsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -1628,19 +2690,17 @@ def test_list_notifications_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.ListNotificationsResponse.to_json( - service.ListNotificationsResponse() - ) + req.return_value._content = service.Settings.to_json(service.Settings()) - request = service.ListNotificationsRequest() + request = service.GetSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListNotificationsResponse() + post.return_value = service.Settings() - client.list_notifications( + client.get_settings( request, metadata=[ ("key", "val"), @@ -1652,8 +2712,8 @@ def test_list_notifications_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_notifications_rest_bad_request( - transport: str = "rest", request_type=service.ListNotificationsRequest +def test_get_settings_rest_bad_request( + transport: str = "rest", request_type=service.GetSettingsRequest ): client = AdvisoryNotificationsServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1661,7 +2721,7 @@ def test_list_notifications_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "organizations/sample1/locations/sample2"} + request_init = {"name": "organizations/sample1/locations/sample2/settings"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1673,10 +2733,10 @@ def test_list_notifications_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_notifications(request) + client.get_settings(request) -def test_list_notifications_rest_flattened(): +def test_get_settings_rest_flattened(): client = AdvisoryNotificationsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1685,39 +2745,39 @@ def test_list_notifications_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListNotificationsResponse() + return_value = service.Settings() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "organizations/sample1/locations/sample2"} + sample_request = {"name": "organizations/sample1/locations/sample2/settings"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = service.ListNotificationsResponse.pb(return_value) + pb_return_value = service.Settings.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_notifications(**mock_args) + client.get_settings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=organizations/*/locations/*}/notifications" + "%s/v1/{name=organizations/*/locations/*/settings}" % client.transport._host, args[1], ) -def test_list_notifications_rest_flattened_error(transport: str = "rest"): +def test_get_settings_rest_flattened_error(transport: str = "rest"): client = AdvisoryNotificationsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1726,81 +2786,26 @@ def test_list_notifications_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_notifications( - service.ListNotificationsRequest(), - parent="parent_value", + client.get_settings( + service.GetSettingsRequest(), + name="name_value", ) -def test_list_notifications_rest_pager(transport: str = "rest"): +def test_get_settings_rest_error(): client = AdvisoryNotificationsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListNotificationsResponse( - notifications=[ - service.Notification(), - service.Notification(), - service.Notification(), - ], - next_page_token="abc", - ), - service.ListNotificationsResponse( - notifications=[], - next_page_token="def", - ), - service.ListNotificationsResponse( - notifications=[ - service.Notification(), - ], - next_page_token="ghi", - ), - service.ListNotificationsResponse( - notifications=[ - service.Notification(), - service.Notification(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListNotificationsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "organizations/sample1/locations/sample2"} - - pager = client.list_notifications(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, service.Notification) for i in results) - - pages = list(client.list_notifications(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - service.GetNotificationRequest, + service.UpdateSettingsRequest, dict, ], ) -def test_get_notification_rest(request_type): +def test_update_settings_rest(request_type): client = AdvisoryNotificationsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -1808,44 +2813,45 @@ def test_get_notification_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "organizations/sample1/locations/sample2/notifications/sample3" + "settings": {"name": "organizations/sample1/locations/sample2/settings"} + } + request_init["settings"] = { + "name": "organizations/sample1/locations/sample2/settings", + "notification_settings": {}, + "etag": "etag_value", } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.Notification( + return_value = service.Settings( name="name_value", - notification_type=service.NotificationType.NOTIFICATION_TYPE_SECURITY_PRIVACY_ADVISORY, + etag="etag_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = service.Notification.pb(return_value) + pb_return_value = service.Settings.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_notification(request) + response = client.update_settings(request) # Establish that the response is the type that we expect. - assert isinstance(response, service.Notification) + assert isinstance(response, service.Settings) assert response.name == "name_value" - assert ( - response.notification_type - == service.NotificationType.NOTIFICATION_TYPE_SECURITY_PRIVACY_ADVISORY - ) + assert response.etag == "etag_value" -def test_get_notification_rest_required_fields( - request_type=service.GetNotificationRequest, +def test_update_settings_rest_required_fields( + request_type=service.UpdateSettingsRequest, ): transport_class = transports.AdvisoryNotificationsServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -1860,23 +2866,17 @@ def test_get_notification_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_notification._get_unset_required_fields(jsonified_request) + ).update_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_notification._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("language_code",)) + ).update_settings._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = AdvisoryNotificationsServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1885,7 +2885,7 @@ def test_get_notification_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.Notification() + return_value = service.Settings() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -1897,38 +2897,39 @@ def test_get_notification_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - pb_return_value = service.Notification.pb(return_value) + pb_return_value = service.Settings.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_notification(request) + response = client.update_settings(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_notification_rest_unset_required_fields(): +def test_update_settings_rest_unset_required_fields(): transport = transports.AdvisoryNotificationsServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_notification._get_unset_required_fields({}) - assert set(unset_fields) == (set(("languageCode",)) & set(("name",))) + unset_fields = transport.update_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("settings",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_notification_rest_interceptors(null_interceptor): +def test_update_settings_rest_interceptors(null_interceptor): transport = transports.AdvisoryNotificationsServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -1941,13 +2942,13 @@ def test_get_notification_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AdvisoryNotificationsServiceRestInterceptor, "post_get_notification" + transports.AdvisoryNotificationsServiceRestInterceptor, "post_update_settings" ) as post, mock.patch.object( - transports.AdvisoryNotificationsServiceRestInterceptor, "pre_get_notification" + transports.AdvisoryNotificationsServiceRestInterceptor, "pre_update_settings" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetNotificationRequest.pb(service.GetNotificationRequest()) + pb_message = service.UpdateSettingsRequest.pb(service.UpdateSettingsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -1958,17 +2959,17 @@ def test_get_notification_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = service.Notification.to_json(service.Notification()) + req.return_value._content = service.Settings.to_json(service.Settings()) - request = service.GetNotificationRequest() + request = service.UpdateSettingsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.Notification() + post.return_value = service.Settings() - client.get_notification( + client.update_settings( request, metadata=[ ("key", "val"), @@ -1980,8 +2981,8 @@ def test_get_notification_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_notification_rest_bad_request( - transport: str = "rest", request_type=service.GetNotificationRequest +def test_update_settings_rest_bad_request( + transport: str = "rest", request_type=service.UpdateSettingsRequest ): client = AdvisoryNotificationsServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1990,7 +2991,12 @@ def test_get_notification_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "organizations/sample1/locations/sample2/notifications/sample3" + "settings": {"name": "organizations/sample1/locations/sample2/settings"} + } + request_init["settings"] = { + "name": "organizations/sample1/locations/sample2/settings", + "notification_settings": {}, + "etag": "etag_value", } request = request_type(**request_init) @@ -2003,10 +3009,10 @@ def test_get_notification_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_notification(request) + client.update_settings(request) -def test_get_notification_rest_flattened(): +def test_update_settings_rest_flattened(): client = AdvisoryNotificationsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2015,41 +3021,41 @@ def test_get_notification_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.Notification() + return_value = service.Settings() # get arguments that satisfy an http rule for this method sample_request = { - "name": "organizations/sample1/locations/sample2/notifications/sample3" + "settings": {"name": "organizations/sample1/locations/sample2/settings"} } # get truthy value for each flattened field mock_args = dict( - name="name_value", + settings=service.Settings(name="name_value"), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = service.Notification.pb(return_value) + pb_return_value = service.Settings.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_notification(**mock_args) + client.update_settings(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=organizations/*/locations/*/notifications/*}" + "%s/v1/{settings.name=organizations/*/locations/*/settings}" % client.transport._host, args[1], ) -def test_get_notification_rest_flattened_error(transport: str = "rest"): +def test_update_settings_rest_flattened_error(transport: str = "rest"): client = AdvisoryNotificationsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2058,13 +3064,13 @@ def test_get_notification_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_notification( - service.GetNotificationRequest(), - name="name_value", + client.update_settings( + service.UpdateSettingsRequest(), + settings=service.Settings(name="name_value"), ) -def test_get_notification_rest_error(): +def test_update_settings_rest_error(): client = AdvisoryNotificationsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2211,6 +3217,8 @@ def test_advisory_notifications_service_base_transport(): methods = ( "list_notifications", "get_notification", + "get_settings", + "update_settings", ) for method in methods: with pytest.raises(NotImplementedError): @@ -2486,6 +3494,12 @@ def test_advisory_notifications_service_client_transport_session_collision( session1 = client1.transport.get_notification._session session2 = client2.transport.get_notification._session assert session1 != session2 + session1 = client1.transport.get_settings._session + session2 = client2.transport.get_settings._session + assert session1 != session2 + session1 = client1.transport.update_settings._session + session2 = client2.transport.update_settings._session + assert session1 != session2 def test_advisory_notifications_service_grpc_transport_channel(): @@ -2644,8 +3658,31 @@ def test_parse_notification_path(): assert expected == actual +def test_settings_path(): + organization = "cuttlefish" + location = "mussel" + expected = "organizations/{organization}/locations/{location}/settings".format( + organization=organization, + location=location, + ) + actual = AdvisoryNotificationsServiceClient.settings_path(organization, location) + assert expected == actual + + +def test_parse_settings_path(): + expected = { + "organization": "winkle", + "location": "nautilus", + } + path = AdvisoryNotificationsServiceClient.settings_path(**expected) + + # Check that the path construction is reversible. + actual = AdvisoryNotificationsServiceClient.parse_settings_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2657,7 +3694,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "abalone", } path = AdvisoryNotificationsServiceClient.common_billing_account_path(**expected) @@ -2667,7 +3704,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -2677,7 +3714,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "clam", } path = AdvisoryNotificationsServiceClient.common_folder_path(**expected) @@ -2687,7 +3724,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -2697,7 +3734,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "octopus", } path = AdvisoryNotificationsServiceClient.common_organization_path(**expected) @@ -2707,7 +3744,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -2717,7 +3754,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "nudibranch", } path = AdvisoryNotificationsServiceClient.common_project_path(**expected) @@ -2727,8 +3764,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -2739,8 +3776,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "winkle", + "location": "nautilus", } path = AdvisoryNotificationsServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-artifact-registry/CONTRIBUTING.rst b/packages/google-cloud-artifact-registry/CONTRIBUTING.rst index 8d91ea49b23f..983eba978a12 100644 --- a/packages/google-cloud-artifact-registry/CONTRIBUTING.rst +++ b/packages/google-cloud-artifact-registry/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system- -- -k + $ nox -s system-3.11 -- -k .. note:: - System tests are only configured to run under Python. + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-artifact-registry/noxfile.py b/packages/google-cloud-artifact-registry/noxfile.py index 9a2acd8b6787..be54712bfa8f 100644 --- a/packages/google-cloud-artifact-registry/noxfile.py +++ b/packages/google-cloud-artifact-registry/noxfile.py @@ -46,7 +46,7 @@ UNIT_TEST_EXTRAS = [] UNIT_TEST_EXTRAS_BY_PYTHON = {} -SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -405,24 +405,3 @@ def prerelease_deps(session): session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) diff --git a/packages/google-cloud-batch/CONTRIBUTING.rst b/packages/google-cloud-batch/CONTRIBUTING.rst index 5d0578dbaf3a..8be652b36cd9 100644 --- a/packages/google-cloud-batch/CONTRIBUTING.rst +++ b/packages/google-cloud-batch/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system- -- -k + $ nox -s system-3.11 -- -k .. note:: - System tests are only configured to run under Python. + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py index 3bd552ddd8cc..2bcfd3b44e17 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py @@ -523,10 +523,11 @@ class Disk(proto.Message): disk size 30 GB, you can only use this field to make the disk larger or equal to 30 GB. disk_interface (str): - Local SSDs are available through both "SCSI" - and "NVMe" interfaces. If not indicated, "NVMe" - will be the default one for local ssds. We only - support "SCSI" for persistent disks now. + Local SSDs are available through both "SCSI" and "NVMe" + interfaces. If not indicated, "NVMe" will be the default one + for local ssds. This field is ignored for persistent disks + as the interface is chosen automatically. See + https://cloud.google.com/compute/docs/disks/persistent-disks#choose_an_interface. """ image: str = proto.Field( diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py index e53bb549ef58..44a32f7933b2 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/job.py @@ -643,10 +643,11 @@ class Disk(proto.Message): disk size 30 GB, you can only use this field to make the disk larger or equal to 30 GB. disk_interface (str): - Local SSDs are available through both "SCSI" - and "NVMe" interfaces. If not indicated, "NVMe" - will be the default one for local ssds. We only - support "SCSI" for persistent disks now. + Local SSDs are available through both "SCSI" and "NVMe" + interfaces. If not indicated, "NVMe" will be the default one + for local ssds. This field is ignored for persistent disks + as the interface is chosen automatically. See + https://cloud.google.com/compute/docs/disks/persistent-disks#choose_an_interface. """ image: str = proto.Field( diff --git a/packages/google-cloud-batch/noxfile.py b/packages/google-cloud-batch/noxfile.py index 9a2acd8b6787..be54712bfa8f 100644 --- a/packages/google-cloud-batch/noxfile.py +++ b/packages/google-cloud-batch/noxfile.py @@ -46,7 +46,7 @@ UNIT_TEST_EXTRAS = [] UNIT_TEST_EXTRAS_BY_PYTHON = {} -SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -405,24 +405,3 @@ def prerelease_deps(session): session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) diff --git a/packages/google-cloud-bigquery-analyticshub/CONTRIBUTING.rst b/packages/google-cloud-bigquery-analyticshub/CONTRIBUTING.rst index 93146e21e0aa..cf7dfbb1e8e0 100644 --- a/packages/google-cloud-bigquery-analyticshub/CONTRIBUTING.rst +++ b/packages/google-cloud-bigquery-analyticshub/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system- -- -k + $ nox -s system-3.11 -- -k .. note:: - System tests are only configured to run under Python. + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/__init__.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/__init__.py index f2b4dd873d98..51901b22986f 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/__init__.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/__init__.py @@ -31,10 +31,12 @@ DataProvider, DeleteDataExchangeRequest, DeleteListingRequest, + DeleteSubscriptionRequest, DestinationDataset, DestinationDatasetReference, GetDataExchangeRequest, GetListingRequest, + GetSubscriptionRequest, ListDataExchangesRequest, ListDataExchangesResponse, Listing, @@ -42,9 +44,22 @@ ListListingsResponse, ListOrgDataExchangesRequest, ListOrgDataExchangesResponse, + ListSharedResourceSubscriptionsRequest, + ListSharedResourceSubscriptionsResponse, + ListSubscriptionsRequest, + ListSubscriptionsResponse, + OperationMetadata, Publisher, + RefreshSubscriptionRequest, + RefreshSubscriptionResponse, + RevokeSubscriptionRequest, + RevokeSubscriptionResponse, + SharingEnvironmentConfig, + SubscribeDataExchangeRequest, + SubscribeDataExchangeResponse, SubscribeListingRequest, SubscribeListingResponse, + Subscription, UpdateDataExchangeRequest, UpdateListingRequest, ) @@ -58,10 +73,12 @@ "DataProvider", "DeleteDataExchangeRequest", "DeleteListingRequest", + "DeleteSubscriptionRequest", "DestinationDataset", "DestinationDatasetReference", "GetDataExchangeRequest", "GetListingRequest", + "GetSubscriptionRequest", "ListDataExchangesRequest", "ListDataExchangesResponse", "Listing", @@ -69,9 +86,22 @@ "ListListingsResponse", "ListOrgDataExchangesRequest", "ListOrgDataExchangesResponse", + "ListSharedResourceSubscriptionsRequest", + "ListSharedResourceSubscriptionsResponse", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "OperationMetadata", "Publisher", + "RefreshSubscriptionRequest", + "RefreshSubscriptionResponse", + "RevokeSubscriptionRequest", + "RevokeSubscriptionResponse", + "SharingEnvironmentConfig", + "SubscribeDataExchangeRequest", + "SubscribeDataExchangeResponse", "SubscribeListingRequest", "SubscribeListingResponse", + "Subscription", "UpdateDataExchangeRequest", "UpdateListingRequest", ) diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/__init__.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/__init__.py index 6d2c3d036c35..ffd9c8bd5836 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/__init__.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/__init__.py @@ -29,10 +29,12 @@ DataProvider, DeleteDataExchangeRequest, DeleteListingRequest, + DeleteSubscriptionRequest, DestinationDataset, DestinationDatasetReference, GetDataExchangeRequest, GetListingRequest, + GetSubscriptionRequest, ListDataExchangesRequest, ListDataExchangesResponse, Listing, @@ -40,9 +42,22 @@ ListListingsResponse, ListOrgDataExchangesRequest, ListOrgDataExchangesResponse, + ListSharedResourceSubscriptionsRequest, + ListSharedResourceSubscriptionsResponse, + ListSubscriptionsRequest, + ListSubscriptionsResponse, + OperationMetadata, Publisher, + RefreshSubscriptionRequest, + RefreshSubscriptionResponse, + RevokeSubscriptionRequest, + RevokeSubscriptionResponse, + SharingEnvironmentConfig, + SubscribeDataExchangeRequest, + SubscribeDataExchangeResponse, SubscribeListingRequest, SubscribeListingResponse, + Subscription, UpdateDataExchangeRequest, UpdateListingRequest, ) @@ -56,20 +71,35 @@ "DataProvider", "DeleteDataExchangeRequest", "DeleteListingRequest", + "DeleteSubscriptionRequest", "DestinationDataset", "DestinationDatasetReference", "GetDataExchangeRequest", "GetListingRequest", + "GetSubscriptionRequest", "ListDataExchangesRequest", "ListDataExchangesResponse", "ListListingsRequest", "ListListingsResponse", "ListOrgDataExchangesRequest", "ListOrgDataExchangesResponse", + "ListSharedResourceSubscriptionsRequest", + "ListSharedResourceSubscriptionsResponse", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", "Listing", + "OperationMetadata", "Publisher", + "RefreshSubscriptionRequest", + "RefreshSubscriptionResponse", + "RevokeSubscriptionRequest", + "RevokeSubscriptionResponse", + "SharingEnvironmentConfig", + "SubscribeDataExchangeRequest", + "SubscribeDataExchangeResponse", "SubscribeListingRequest", "SubscribeListingResponse", + "Subscription", "UpdateDataExchangeRequest", "UpdateListingRequest", ) diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_metadata.json b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_metadata.json index c6fe5bf84176..35fc21fe2c76 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_metadata.json +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_metadata.json @@ -30,6 +30,11 @@ "delete_listing" ] }, + "DeleteSubscription": { + "methods": [ + "delete_subscription" + ] + }, "GetDataExchange": { "methods": [ "get_data_exchange" @@ -45,6 +50,11 @@ "get_listing" ] }, + "GetSubscription": { + "methods": [ + "get_subscription" + ] + }, "ListDataExchanges": { "methods": [ "list_data_exchanges" @@ -60,11 +70,36 @@ "list_org_data_exchanges" ] }, + "ListSharedResourceSubscriptions": { + "methods": [ + "list_shared_resource_subscriptions" + ] + }, + "ListSubscriptions": { + "methods": [ + "list_subscriptions" + ] + }, + "RefreshSubscription": { + "methods": [ + "refresh_subscription" + ] + }, + "RevokeSubscription": { + "methods": [ + "revoke_subscription" + ] + }, "SetIamPolicy": { "methods": [ "set_iam_policy" ] }, + "SubscribeDataExchange": { + "methods": [ + "subscribe_data_exchange" + ] + }, "SubscribeListing": { "methods": [ "subscribe_listing" @@ -110,6 +145,11 @@ "delete_listing" ] }, + "DeleteSubscription": { + "methods": [ + "delete_subscription" + ] + }, "GetDataExchange": { "methods": [ "get_data_exchange" @@ -125,6 +165,11 @@ "get_listing" ] }, + "GetSubscription": { + "methods": [ + "get_subscription" + ] + }, "ListDataExchanges": { "methods": [ "list_data_exchanges" @@ -140,11 +185,36 @@ "list_org_data_exchanges" ] }, + "ListSharedResourceSubscriptions": { + "methods": [ + "list_shared_resource_subscriptions" + ] + }, + "ListSubscriptions": { + "methods": [ + "list_subscriptions" + ] + }, + "RefreshSubscription": { + "methods": [ + "refresh_subscription" + ] + }, + "RevokeSubscription": { + "methods": [ + "revoke_subscription" + ] + }, "SetIamPolicy": { "methods": [ "set_iam_policy" ] }, + "SubscribeDataExchange": { + "methods": [ + "subscribe_data_exchange" + ] + }, "SubscribeListing": { "methods": [ "subscribe_listing" diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py index 216227f1825a..5f60bdb0c055 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py @@ -42,9 +42,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service import pagers from google.cloud.bigquery_analyticshub_v1.types import analyticshub @@ -77,6 +82,10 @@ class AnalyticsHubServiceAsyncClient: parse_dataset_path = staticmethod(AnalyticsHubServiceClient.parse_dataset_path) listing_path = staticmethod(AnalyticsHubServiceClient.listing_path) parse_listing_path = staticmethod(AnalyticsHubServiceClient.parse_listing_path) + subscription_path = staticmethod(AnalyticsHubServiceClient.subscription_path) + parse_subscription_path = staticmethod( + AnalyticsHubServiceClient.parse_subscription_path + ) common_billing_account_path = staticmethod( AnalyticsHubServiceClient.common_billing_account_path ) @@ -1565,6 +1574,820 @@ async def sample_subscribe_listing(): # Done; return the response. return response + async def subscribe_data_exchange( + self, + request: Optional[ + Union[analyticshub.SubscribeDataExchangeRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a Subscription to a Data Exchange. This is a + long-running operation as it will create one or more + linked datasets. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_analyticshub_v1 + + async def sample_subscribe_data_exchange(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.SubscribeDataExchangeRequest( + name="name_value", + destination="destination_value", + subscription="subscription_value", + ) + + # Make the request + operation = client.subscribe_data_exchange(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.SubscribeDataExchangeRequest, dict]]): + The request object. Message for subscribing to a Data + Exchange. + name (:class:`str`): + Required. Resource name of the Data Exchange. e.g. + ``projects/publisherproject/locations/US/dataExchanges/123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bigquery_analyticshub_v1.types.SubscribeDataExchangeResponse` + Message for response when you subscribe to a Data + Exchange. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = analyticshub.SubscribeDataExchangeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.subscribe_data_exchange, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + analyticshub.SubscribeDataExchangeResponse, + metadata_type=analyticshub.OperationMetadata, + ) + + # Done; return the response. + return response + + async def refresh_subscription( + self, + request: Optional[Union[analyticshub.RefreshSubscriptionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Refreshes a Subscription to a Data Exchange. A Data + Exchange can become stale when a publisher adds or + removes data. This is a long-running operation as it may + create many linked datasets. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_analyticshub_v1 + + async def sample_refresh_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.RefreshSubscriptionRequest( + name="name_value", + ) + + # Make the request + operation = client.refresh_subscription(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.RefreshSubscriptionRequest, dict]]): + The request object. Message for refreshing a + subscription. + name (:class:`str`): + Required. Resource name of the Subscription to refresh. + e.g. + ``projects/subscriberproject/locations/US/subscriptions/123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bigquery_analyticshub_v1.types.RefreshSubscriptionResponse` + Message for response when you refresh a subscription. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = analyticshub.RefreshSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.refresh_subscription, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + analyticshub.RefreshSubscriptionResponse, + metadata_type=analyticshub.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_subscription( + self, + request: Optional[Union[analyticshub.GetSubscriptionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyticshub.Subscription: + r"""Gets the details of a Subscription. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_analyticshub_v1 + + async def sample_get_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.GetSubscriptionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.GetSubscriptionRequest, dict]]): + The request object. Message for getting a subscription. + name (:class:`str`): + Required. Resource name of the + subscription. e.g. + projects/123/locations/US/subscriptions/456 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_analyticshub_v1.types.Subscription: + A subscription represents a + subscribers' access to a particular set + of published data. It contains + references to associated listings, data + exchanges, and linked datasets. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = analyticshub.GetSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_subscription, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_subscriptions( + self, + request: Optional[Union[analyticshub.ListSubscriptionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSubscriptionsAsyncPager: + r"""Lists all subscriptions in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_analyticshub_v1 + + async def sample_list_subscriptions(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.ListSubscriptionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsRequest, dict]]): + The request object. Message for listing subscriptions. + parent (:class:`str`): + Required. The parent resource path of + the subscription. e.g. + projects/myproject/locations/US + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSubscriptionsAsyncPager: + Message for response to the listing + of subscriptions. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = analyticshub.ListSubscriptionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_subscriptions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSubscriptionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_shared_resource_subscriptions( + self, + request: Optional[ + Union[analyticshub.ListSharedResourceSubscriptionsRequest, dict] + ] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSharedResourceSubscriptionsAsyncPager: + r"""Lists all subscriptions on a given Data Exchange or + Listing. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_analyticshub_v1 + + async def sample_list_shared_resource_subscriptions(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.ListSharedResourceSubscriptionsRequest( + resource="resource_value", + ) + + # Make the request + page_result = client.list_shared_resource_subscriptions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsRequest, dict]]): + The request object. Message for listing subscriptions of + a shared resource. + resource (:class:`str`): + Required. Resource name of the + requested target. This resource may be + either a Listing or a DataExchange. e.g. + projects/123/locations/US/dataExchanges/456 + OR e.g. + projects/123/locations/US/dataExchanges/456/listings/789 + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSharedResourceSubscriptionsAsyncPager: + Message for response to the listing + of shared resource subscriptions. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = analyticshub.ListSharedResourceSubscriptionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_shared_resource_subscriptions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSharedResourceSubscriptionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def revoke_subscription( + self, + request: Optional[Union[analyticshub.RevokeSubscriptionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyticshub.RevokeSubscriptionResponse: + r"""Revokes a given subscription. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_analyticshub_v1 + + async def sample_revoke_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.RevokeSubscriptionRequest( + name="name_value", + ) + + # Make the request + response = await client.revoke_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionRequest, dict]]): + The request object. Message for revoking a subscription. + name (:class:`str`): + Required. Resource name of the + subscription to revoke. e.g. + projects/123/locations/US/subscriptions/456 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionResponse: + Message for response when you revoke + a subscription. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = analyticshub.RevokeSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.revoke_subscription, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_subscription( + self, + request: Optional[Union[analyticshub.DeleteSubscriptionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a subscription. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_analyticshub_v1 + + async def sample_delete_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.DeleteSubscriptionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_subscription(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.DeleteSubscriptionRequest, dict]]): + The request object. Message for deleting a subscription. + name (:class:`str`): + Required. Resource name of the + subscription to delete. e.g. + projects/123/locations/US/subscriptions/456 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = analyticshub.DeleteSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_subscription, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=analyticshub.OperationMetadata, + ) + + # Done; return the response. + return response + async def get_iam_policy( self, request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py index 4692013ce871..803fbf114005 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py @@ -46,9 +46,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service import pagers from google.cloud.bigquery_analyticshub_v1.types import analyticshub @@ -249,6 +254,28 @@ def parse_listing_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def subscription_path( + project: str, + location: str, + subscription: str, + ) -> str: + """Returns a fully-qualified subscription string.""" + return "projects/{project}/locations/{location}/subscriptions/{subscription}".format( + project=project, + location=location, + subscription=subscription, + ) + + @staticmethod + def parse_subscription_path(path: str) -> Dict[str, str]: + """Parses a subscription path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/subscriptions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -1826,6 +1853,822 @@ def sample_subscribe_listing(): # Done; return the response. return response + def subscribe_data_exchange( + self, + request: Optional[ + Union[analyticshub.SubscribeDataExchangeRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a Subscription to a Data Exchange. This is a + long-running operation as it will create one or more + linked datasets. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_analyticshub_v1 + + def sample_subscribe_data_exchange(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.SubscribeDataExchangeRequest( + name="name_value", + destination="destination_value", + subscription="subscription_value", + ) + + # Make the request + operation = client.subscribe_data_exchange(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_analyticshub_v1.types.SubscribeDataExchangeRequest, dict]): + The request object. Message for subscribing to a Data + Exchange. + name (str): + Required. Resource name of the Data Exchange. e.g. + ``projects/publisherproject/locations/US/dataExchanges/123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bigquery_analyticshub_v1.types.SubscribeDataExchangeResponse` + Message for response when you subscribe to a Data + Exchange. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a analyticshub.SubscribeDataExchangeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, analyticshub.SubscribeDataExchangeRequest): + request = analyticshub.SubscribeDataExchangeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.subscribe_data_exchange] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + analyticshub.SubscribeDataExchangeResponse, + metadata_type=analyticshub.OperationMetadata, + ) + + # Done; return the response. + return response + + def refresh_subscription( + self, + request: Optional[Union[analyticshub.RefreshSubscriptionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Refreshes a Subscription to a Data Exchange. A Data + Exchange can become stale when a publisher adds or + removes data. This is a long-running operation as it may + create many linked datasets. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_analyticshub_v1 + + def sample_refresh_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.RefreshSubscriptionRequest( + name="name_value", + ) + + # Make the request + operation = client.refresh_subscription(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_analyticshub_v1.types.RefreshSubscriptionRequest, dict]): + The request object. Message for refreshing a + subscription. + name (str): + Required. Resource name of the Subscription to refresh. + e.g. + ``projects/subscriberproject/locations/US/subscriptions/123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.bigquery_analyticshub_v1.types.RefreshSubscriptionResponse` + Message for response when you refresh a subscription. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a analyticshub.RefreshSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, analyticshub.RefreshSubscriptionRequest): + request = analyticshub.RefreshSubscriptionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.refresh_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + analyticshub.RefreshSubscriptionResponse, + metadata_type=analyticshub.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_subscription( + self, + request: Optional[Union[analyticshub.GetSubscriptionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyticshub.Subscription: + r"""Gets the details of a Subscription. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_analyticshub_v1 + + def sample_get_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.GetSubscriptionRequest( + name="name_value", + ) + + # Make the request + response = client.get_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_analyticshub_v1.types.GetSubscriptionRequest, dict]): + The request object. Message for getting a subscription. + name (str): + Required. Resource name of the + subscription. e.g. + projects/123/locations/US/subscriptions/456 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_analyticshub_v1.types.Subscription: + A subscription represents a + subscribers' access to a particular set + of published data. It contains + references to associated listings, data + exchanges, and linked datasets. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a analyticshub.GetSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, analyticshub.GetSubscriptionRequest): + request = analyticshub.GetSubscriptionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_subscriptions( + self, + request: Optional[Union[analyticshub.ListSubscriptionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSubscriptionsPager: + r"""Lists all subscriptions in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_analyticshub_v1 + + def sample_list_subscriptions(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.ListSubscriptionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsRequest, dict]): + The request object. Message for listing subscriptions. + parent (str): + Required. The parent resource path of + the subscription. e.g. + projects/myproject/locations/US + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSubscriptionsPager: + Message for response to the listing + of subscriptions. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a analyticshub.ListSubscriptionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, analyticshub.ListSubscriptionsRequest): + request = analyticshub.ListSubscriptionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_subscriptions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSubscriptionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_shared_resource_subscriptions( + self, + request: Optional[ + Union[analyticshub.ListSharedResourceSubscriptionsRequest, dict] + ] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSharedResourceSubscriptionsPager: + r"""Lists all subscriptions on a given Data Exchange or + Listing. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_analyticshub_v1 + + def sample_list_shared_resource_subscriptions(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.ListSharedResourceSubscriptionsRequest( + resource="resource_value", + ) + + # Make the request + page_result = client.list_shared_resource_subscriptions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsRequest, dict]): + The request object. Message for listing subscriptions of + a shared resource. + resource (str): + Required. Resource name of the + requested target. This resource may be + either a Listing or a DataExchange. e.g. + projects/123/locations/US/dataExchanges/456 + OR e.g. + projects/123/locations/US/dataExchanges/456/listings/789 + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSharedResourceSubscriptionsPager: + Message for response to the listing + of shared resource subscriptions. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a analyticshub.ListSharedResourceSubscriptionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, analyticshub.ListSharedResourceSubscriptionsRequest): + request = analyticshub.ListSharedResourceSubscriptionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_shared_resource_subscriptions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSharedResourceSubscriptionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def revoke_subscription( + self, + request: Optional[Union[analyticshub.RevokeSubscriptionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyticshub.RevokeSubscriptionResponse: + r"""Revokes a given subscription. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_analyticshub_v1 + + def sample_revoke_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.RevokeSubscriptionRequest( + name="name_value", + ) + + # Make the request + response = client.revoke_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionRequest, dict]): + The request object. Message for revoking a subscription. + name (str): + Required. Resource name of the + subscription to revoke. e.g. + projects/123/locations/US/subscriptions/456 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionResponse: + Message for response when you revoke + a subscription. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a analyticshub.RevokeSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, analyticshub.RevokeSubscriptionRequest): + request = analyticshub.RevokeSubscriptionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.revoke_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_subscription( + self, + request: Optional[Union[analyticshub.DeleteSubscriptionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a subscription. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_analyticshub_v1 + + def sample_delete_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.DeleteSubscriptionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_subscription(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_analyticshub_v1.types.DeleteSubscriptionRequest, dict]): + The request object. Message for deleting a subscription. + name (str): + Required. Resource name of the + subscription to delete. e.g. + projects/123/locations/US/subscriptions/456 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a analyticshub.DeleteSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, analyticshub.DeleteSubscriptionRequest): + request = analyticshub.DeleteSubscriptionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=analyticshub.OperationMetadata, + ) + + # Done; return the response. + return response + def get_iam_policy( self, request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/pagers.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/pagers.py index 0e2cde912672..7df415d79935 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/pagers.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/pagers.py @@ -409,3 +409,263 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSubscriptionsPager: + """A pager for iterating through ``list_subscriptions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., analyticshub.ListSubscriptionsResponse], + request: analyticshub.ListSubscriptionsRequest, + response: analyticshub.ListSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsRequest): + The initial request object. + response (google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analyticshub.ListSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[analyticshub.ListSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analyticshub.Subscription]: + for page in self.pages: + yield from page.subscriptions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSubscriptionsAsyncPager: + """A pager for iterating through ``list_subscriptions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[analyticshub.ListSubscriptionsResponse]], + request: analyticshub.ListSubscriptionsRequest, + response: analyticshub.ListSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsRequest): + The initial request object. + response (google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analyticshub.ListSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[analyticshub.ListSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[analyticshub.Subscription]: + async def async_generator(): + async for page in self.pages: + for response in page.subscriptions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSharedResourceSubscriptionsPager: + """A pager for iterating through ``list_shared_resource_subscriptions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``shared_resource_subscriptions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSharedResourceSubscriptions`` requests and continue to iterate + through the ``shared_resource_subscriptions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., analyticshub.ListSharedResourceSubscriptionsResponse], + request: analyticshub.ListSharedResourceSubscriptionsRequest, + response: analyticshub.ListSharedResourceSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsRequest): + The initial request object. + response (google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analyticshub.ListSharedResourceSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[analyticshub.ListSharedResourceSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analyticshub.Subscription]: + for page in self.pages: + yield from page.shared_resource_subscriptions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSharedResourceSubscriptionsAsyncPager: + """A pager for iterating through ``list_shared_resource_subscriptions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``shared_resource_subscriptions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSharedResourceSubscriptions`` requests and continue to iterate + through the ``shared_resource_subscriptions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[analyticshub.ListSharedResourceSubscriptionsResponse] + ], + request: analyticshub.ListSharedResourceSubscriptionsRequest, + response: analyticshub.ListSharedResourceSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsRequest): + The initial request object. + response (google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = analyticshub.ListSharedResourceSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[analyticshub.ListSharedResourceSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[analyticshub.Subscription]: + async def async_generator(): + async for page in self.pages: + for response in page.shared_resource_subscriptions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/base.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/base.py index 4abc83534625..8a2bf5f5ec70 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/base.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/base.py @@ -18,12 +18,13 @@ import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore @@ -188,6 +189,41 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.subscribe_data_exchange: gapic_v1.method.wrap_method( + self.subscribe_data_exchange, + default_timeout=None, + client_info=client_info, + ), + self.refresh_subscription: gapic_v1.method.wrap_method( + self.refresh_subscription, + default_timeout=None, + client_info=client_info, + ), + self.get_subscription: gapic_v1.method.wrap_method( + self.get_subscription, + default_timeout=None, + client_info=client_info, + ), + self.list_subscriptions: gapic_v1.method.wrap_method( + self.list_subscriptions, + default_timeout=None, + client_info=client_info, + ), + self.list_shared_resource_subscriptions: gapic_v1.method.wrap_method( + self.list_shared_resource_subscriptions, + default_timeout=None, + client_info=client_info, + ), + self.revoke_subscription: gapic_v1.method.wrap_method( + self.revoke_subscription, + default_timeout=None, + client_info=client_info, + ), + self.delete_subscription: gapic_v1.method.wrap_method( + self.delete_subscription, + default_timeout=None, + client_info=client_info, + ), self.get_iam_policy: gapic_v1.method.wrap_method( self.get_iam_policy, default_timeout=None, @@ -214,6 +250,11 @@ def close(self): """ raise NotImplementedError() + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + @property def list_data_exchanges( self, @@ -334,6 +375,78 @@ def subscribe_listing( ]: raise NotImplementedError() + @property + def subscribe_data_exchange( + self, + ) -> Callable[ + [analyticshub.SubscribeDataExchangeRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def refresh_subscription( + self, + ) -> Callable[ + [analyticshub.RefreshSubscriptionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_subscription( + self, + ) -> Callable[ + [analyticshub.GetSubscriptionRequest], + Union[analyticshub.Subscription, Awaitable[analyticshub.Subscription]], + ]: + raise NotImplementedError() + + @property + def list_subscriptions( + self, + ) -> Callable[ + [analyticshub.ListSubscriptionsRequest], + Union[ + analyticshub.ListSubscriptionsResponse, + Awaitable[analyticshub.ListSubscriptionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_shared_resource_subscriptions( + self, + ) -> Callable[ + [analyticshub.ListSharedResourceSubscriptionsRequest], + Union[ + analyticshub.ListSharedResourceSubscriptionsResponse, + Awaitable[analyticshub.ListSharedResourceSubscriptionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def revoke_subscription( + self, + ) -> Callable[ + [analyticshub.RevokeSubscriptionRequest], + Union[ + analyticshub.RevokeSubscriptionResponse, + Awaitable[analyticshub.RevokeSubscriptionResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_subscription( + self, + ) -> Callable[ + [analyticshub.DeleteSubscriptionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_iam_policy( self, diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/grpc.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/grpc.py index f7e0ddd92c06..e3bb7dea2f26 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/grpc.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/grpc.py @@ -16,12 +16,13 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, grpc_helpers +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore @@ -119,6 +120,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -237,6 +239,20 @@ def grpc_channel(self) -> grpc.Channel: """Return the channel designed to connect to this service.""" return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + @property def list_data_exchanges( self, @@ -566,6 +582,208 @@ def subscribe_listing( ) return self._stubs["subscribe_listing"] + @property + def subscribe_data_exchange( + self, + ) -> Callable[ + [analyticshub.SubscribeDataExchangeRequest], operations_pb2.Operation + ]: + r"""Return a callable for the subscribe data exchange method over gRPC. + + Creates a Subscription to a Data Exchange. This is a + long-running operation as it will create one or more + linked datasets. + + Returns: + Callable[[~.SubscribeDataExchangeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "subscribe_data_exchange" not in self._stubs: + self._stubs["subscribe_data_exchange"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/SubscribeDataExchange", + request_serializer=analyticshub.SubscribeDataExchangeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["subscribe_data_exchange"] + + @property + def refresh_subscription( + self, + ) -> Callable[[analyticshub.RefreshSubscriptionRequest], operations_pb2.Operation]: + r"""Return a callable for the refresh subscription method over gRPC. + + Refreshes a Subscription to a Data Exchange. A Data + Exchange can become stale when a publisher adds or + removes data. This is a long-running operation as it may + create many linked datasets. + + Returns: + Callable[[~.RefreshSubscriptionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "refresh_subscription" not in self._stubs: + self._stubs["refresh_subscription"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/RefreshSubscription", + request_serializer=analyticshub.RefreshSubscriptionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["refresh_subscription"] + + @property + def get_subscription( + self, + ) -> Callable[[analyticshub.GetSubscriptionRequest], analyticshub.Subscription]: + r"""Return a callable for the get subscription method over gRPC. + + Gets the details of a Subscription. + + Returns: + Callable[[~.GetSubscriptionRequest], + ~.Subscription]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_subscription" not in self._stubs: + self._stubs["get_subscription"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/GetSubscription", + request_serializer=analyticshub.GetSubscriptionRequest.serialize, + response_deserializer=analyticshub.Subscription.deserialize, + ) + return self._stubs["get_subscription"] + + @property + def list_subscriptions( + self, + ) -> Callable[ + [analyticshub.ListSubscriptionsRequest], analyticshub.ListSubscriptionsResponse + ]: + r"""Return a callable for the list subscriptions method over gRPC. + + Lists all subscriptions in a given project and + location. + + Returns: + Callable[[~.ListSubscriptionsRequest], + ~.ListSubscriptionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_subscriptions" not in self._stubs: + self._stubs["list_subscriptions"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/ListSubscriptions", + request_serializer=analyticshub.ListSubscriptionsRequest.serialize, + response_deserializer=analyticshub.ListSubscriptionsResponse.deserialize, + ) + return self._stubs["list_subscriptions"] + + @property + def list_shared_resource_subscriptions( + self, + ) -> Callable[ + [analyticshub.ListSharedResourceSubscriptionsRequest], + analyticshub.ListSharedResourceSubscriptionsResponse, + ]: + r"""Return a callable for the list shared resource + subscriptions method over gRPC. + + Lists all subscriptions on a given Data Exchange or + Listing. + + Returns: + Callable[[~.ListSharedResourceSubscriptionsRequest], + ~.ListSharedResourceSubscriptionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_shared_resource_subscriptions" not in self._stubs: + self._stubs[ + "list_shared_resource_subscriptions" + ] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/ListSharedResourceSubscriptions", + request_serializer=analyticshub.ListSharedResourceSubscriptionsRequest.serialize, + response_deserializer=analyticshub.ListSharedResourceSubscriptionsResponse.deserialize, + ) + return self._stubs["list_shared_resource_subscriptions"] + + @property + def revoke_subscription( + self, + ) -> Callable[ + [analyticshub.RevokeSubscriptionRequest], + analyticshub.RevokeSubscriptionResponse, + ]: + r"""Return a callable for the revoke subscription method over gRPC. + + Revokes a given subscription. + + Returns: + Callable[[~.RevokeSubscriptionRequest], + ~.RevokeSubscriptionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "revoke_subscription" not in self._stubs: + self._stubs["revoke_subscription"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/RevokeSubscription", + request_serializer=analyticshub.RevokeSubscriptionRequest.serialize, + response_deserializer=analyticshub.RevokeSubscriptionResponse.deserialize, + ) + return self._stubs["revoke_subscription"] + + @property + def delete_subscription( + self, + ) -> Callable[[analyticshub.DeleteSubscriptionRequest], operations_pb2.Operation]: + r"""Return a callable for the delete subscription method over gRPC. + + Deletes a subscription. + + Returns: + Callable[[~.DeleteSubscriptionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_subscription" not in self._stubs: + self._stubs["delete_subscription"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/DeleteSubscription", + request_serializer=analyticshub.DeleteSubscriptionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_subscription"] + @property def get_iam_policy( self, diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/grpc_asyncio.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/grpc_asyncio.py index 0ec089a27ffd..4ef63e87b3c3 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/grpc_asyncio.py @@ -16,11 +16,12 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -165,6 +166,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -240,6 +242,22 @@ def grpc_channel(self) -> aio.Channel: # Return the channel from cache. return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + @property def list_data_exchanges( self, @@ -577,6 +595,215 @@ def subscribe_listing( ) return self._stubs["subscribe_listing"] + @property + def subscribe_data_exchange( + self, + ) -> Callable[ + [analyticshub.SubscribeDataExchangeRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the subscribe data exchange method over gRPC. + + Creates a Subscription to a Data Exchange. This is a + long-running operation as it will create one or more + linked datasets. + + Returns: + Callable[[~.SubscribeDataExchangeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "subscribe_data_exchange" not in self._stubs: + self._stubs["subscribe_data_exchange"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/SubscribeDataExchange", + request_serializer=analyticshub.SubscribeDataExchangeRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["subscribe_data_exchange"] + + @property + def refresh_subscription( + self, + ) -> Callable[ + [analyticshub.RefreshSubscriptionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the refresh subscription method over gRPC. + + Refreshes a Subscription to a Data Exchange. A Data + Exchange can become stale when a publisher adds or + removes data. This is a long-running operation as it may + create many linked datasets. + + Returns: + Callable[[~.RefreshSubscriptionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "refresh_subscription" not in self._stubs: + self._stubs["refresh_subscription"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/RefreshSubscription", + request_serializer=analyticshub.RefreshSubscriptionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["refresh_subscription"] + + @property + def get_subscription( + self, + ) -> Callable[ + [analyticshub.GetSubscriptionRequest], Awaitable[analyticshub.Subscription] + ]: + r"""Return a callable for the get subscription method over gRPC. + + Gets the details of a Subscription. + + Returns: + Callable[[~.GetSubscriptionRequest], + Awaitable[~.Subscription]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_subscription" not in self._stubs: + self._stubs["get_subscription"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/GetSubscription", + request_serializer=analyticshub.GetSubscriptionRequest.serialize, + response_deserializer=analyticshub.Subscription.deserialize, + ) + return self._stubs["get_subscription"] + + @property + def list_subscriptions( + self, + ) -> Callable[ + [analyticshub.ListSubscriptionsRequest], + Awaitable[analyticshub.ListSubscriptionsResponse], + ]: + r"""Return a callable for the list subscriptions method over gRPC. + + Lists all subscriptions in a given project and + location. + + Returns: + Callable[[~.ListSubscriptionsRequest], + Awaitable[~.ListSubscriptionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_subscriptions" not in self._stubs: + self._stubs["list_subscriptions"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/ListSubscriptions", + request_serializer=analyticshub.ListSubscriptionsRequest.serialize, + response_deserializer=analyticshub.ListSubscriptionsResponse.deserialize, + ) + return self._stubs["list_subscriptions"] + + @property + def list_shared_resource_subscriptions( + self, + ) -> Callable[ + [analyticshub.ListSharedResourceSubscriptionsRequest], + Awaitable[analyticshub.ListSharedResourceSubscriptionsResponse], + ]: + r"""Return a callable for the list shared resource + subscriptions method over gRPC. + + Lists all subscriptions on a given Data Exchange or + Listing. + + Returns: + Callable[[~.ListSharedResourceSubscriptionsRequest], + Awaitable[~.ListSharedResourceSubscriptionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_shared_resource_subscriptions" not in self._stubs: + self._stubs[ + "list_shared_resource_subscriptions" + ] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/ListSharedResourceSubscriptions", + request_serializer=analyticshub.ListSharedResourceSubscriptionsRequest.serialize, + response_deserializer=analyticshub.ListSharedResourceSubscriptionsResponse.deserialize, + ) + return self._stubs["list_shared_resource_subscriptions"] + + @property + def revoke_subscription( + self, + ) -> Callable[ + [analyticshub.RevokeSubscriptionRequest], + Awaitable[analyticshub.RevokeSubscriptionResponse], + ]: + r"""Return a callable for the revoke subscription method over gRPC. + + Revokes a given subscription. + + Returns: + Callable[[~.RevokeSubscriptionRequest], + Awaitable[~.RevokeSubscriptionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "revoke_subscription" not in self._stubs: + self._stubs["revoke_subscription"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/RevokeSubscription", + request_serializer=analyticshub.RevokeSubscriptionRequest.serialize, + response_deserializer=analyticshub.RevokeSubscriptionResponse.deserialize, + ) + return self._stubs["revoke_subscription"] + + @property + def delete_subscription( + self, + ) -> Callable[ + [analyticshub.DeleteSubscriptionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete subscription method over gRPC. + + Deletes a subscription. + + Returns: + Callable[[~.DeleteSubscriptionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_subscription" not in self._stubs: + self._stubs["delete_subscription"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/DeleteSubscription", + request_serializer=analyticshub.DeleteSubscriptionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_subscription"] + @property def get_iam_policy( self, diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/__init__.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/__init__.py index a4d0159f72fc..7bbb7b6285f8 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/__init__.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/__init__.py @@ -20,10 +20,12 @@ DataProvider, DeleteDataExchangeRequest, DeleteListingRequest, + DeleteSubscriptionRequest, DestinationDataset, DestinationDatasetReference, GetDataExchangeRequest, GetListingRequest, + GetSubscriptionRequest, ListDataExchangesRequest, ListDataExchangesResponse, Listing, @@ -31,9 +33,22 @@ ListListingsResponse, ListOrgDataExchangesRequest, ListOrgDataExchangesResponse, + ListSharedResourceSubscriptionsRequest, + ListSharedResourceSubscriptionsResponse, + ListSubscriptionsRequest, + ListSubscriptionsResponse, + OperationMetadata, Publisher, + RefreshSubscriptionRequest, + RefreshSubscriptionResponse, + RevokeSubscriptionRequest, + RevokeSubscriptionResponse, + SharingEnvironmentConfig, + SubscribeDataExchangeRequest, + SubscribeDataExchangeResponse, SubscribeListingRequest, SubscribeListingResponse, + Subscription, UpdateDataExchangeRequest, UpdateListingRequest, ) @@ -45,10 +60,12 @@ "DataProvider", "DeleteDataExchangeRequest", "DeleteListingRequest", + "DeleteSubscriptionRequest", "DestinationDataset", "DestinationDatasetReference", "GetDataExchangeRequest", "GetListingRequest", + "GetSubscriptionRequest", "ListDataExchangesRequest", "ListDataExchangesResponse", "Listing", @@ -56,9 +73,22 @@ "ListListingsResponse", "ListOrgDataExchangesRequest", "ListOrgDataExchangesResponse", + "ListSharedResourceSubscriptionsRequest", + "ListSharedResourceSubscriptionsResponse", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "OperationMetadata", "Publisher", + "RefreshSubscriptionRequest", + "RefreshSubscriptionResponse", + "RevokeSubscriptionRequest", + "RevokeSubscriptionResponse", + "SharingEnvironmentConfig", + "SubscribeDataExchangeRequest", + "SubscribeDataExchangeResponse", "SubscribeListingRequest", "SubscribeListingResponse", + "Subscription", "UpdateDataExchangeRequest", "UpdateListingRequest", ) diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/analyticshub.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/analyticshub.py index fd6390dcac11..cf4c0582e147 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/analyticshub.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/analyticshub.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore import proto # type: ignore @@ -25,11 +26,13 @@ package="google.cloud.bigquery.analyticshub.v1", manifest={ "DataExchange", + "SharingEnvironmentConfig", "DataProvider", "Publisher", "DestinationDatasetReference", "DestinationDataset", "Listing", + "Subscription", "ListDataExchangesRequest", "ListDataExchangesResponse", "ListOrgDataExchangesRequest", @@ -46,6 +49,19 @@ "DeleteListingRequest", "SubscribeListingRequest", "SubscribeListingResponse", + "SubscribeDataExchangeRequest", + "SubscribeDataExchangeResponse", + "RefreshSubscriptionRequest", + "RefreshSubscriptionResponse", + "GetSubscriptionRequest", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "ListSharedResourceSubscriptionsRequest", + "ListSharedResourceSubscriptionsResponse", + "RevokeSubscriptionRequest", + "RevokeSubscriptionResponse", + "DeleteSubscriptionRequest", + "OperationMetadata", }, ) @@ -92,6 +108,9 @@ class DataExchange(proto.Message): of the fields are base64-encoded (which increases the size of the data by 33-36%) when using JSON on the wire. + sharing_environment_config (google.cloud.bigquery_analyticshub_v1.types.SharingEnvironmentConfig): + Optional. Configurable data sharing + environment option for a data exchange. """ name: str = proto.Field( @@ -122,6 +141,62 @@ class DataExchange(proto.Message): proto.BYTES, number=7, ) + sharing_environment_config: "SharingEnvironmentConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="SharingEnvironmentConfig", + ) + + +class SharingEnvironmentConfig(proto.Message): + r"""Sharing environment is a behavior model for sharing data + within a data exchange. This option is configurable for a data + exchange. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + default_exchange_config (google.cloud.bigquery_analyticshub_v1.types.SharingEnvironmentConfig.DefaultExchangeConfig): + Default Analytics Hub data exchange, used for + secured data sharing. + + This field is a member of `oneof`_ ``environment``. + dcr_exchange_config (google.cloud.bigquery_analyticshub_v1.types.SharingEnvironmentConfig.DcrExchangeConfig): + Data Clean Room (DCR), used for privacy-safe + and secured data sharing. + + This field is a member of `oneof`_ ``environment``. + """ + + class DefaultExchangeConfig(proto.Message): + r"""Default Analytics Hub data exchange, used for secured data + sharing. + + """ + + class DcrExchangeConfig(proto.Message): + r"""Data Clean Room (DCR), used for privacy-safe and secured data + sharing. + + """ + + default_exchange_config: DefaultExchangeConfig = proto.Field( + proto.MESSAGE, + number=1, + oneof="environment", + message=DefaultExchangeConfig, + ) + dcr_exchange_config: DcrExchangeConfig = proto.Field( + proto.MESSAGE, + number=2, + oneof="environment", + message=DcrExchangeConfig, + ) class DataProvider(proto.Message): @@ -273,8 +348,8 @@ class Listing(proto.Message): non-characters and C0 and C1 control codes except tabs (HT), new lines (LF), carriage returns (CR), and page breaks (FF). Default - value is an empty string. - Max length: 2000 bytes. + value is an empty string. Max length: 2000 + bytes. primary_contact (str): Optional. Email or URL of the primary point of contact of the listing. Max Length: 1000 @@ -307,6 +382,10 @@ class Listing(proto.Message): of the listing. Subscribers can use this reference to request access. Max Length: 1000 bytes. + restricted_export_config (google.cloud.bigquery_analyticshub_v1.types.Listing.RestrictedExportConfig): + Optional. If set, restricted export + configuration will be propagated and enforced on + the linked dataset. """ class State(proto.Enum): @@ -408,6 +487,35 @@ class BigQueryDatasetSource(proto.Message): number=1, ) + class RestrictedExportConfig(proto.Message): + r"""Restricted export config, used to configure restricted export + on linked dataset. + + Attributes: + enabled (bool): + Optional. If true, enable restricted export. + restrict_direct_table_access (bool): + Output only. If true, restrict direct table + access(read api/tabledata.list) on linked table. + restrict_query_result (bool): + Optional. If true, restrict export of query + result derived from restricted linked dataset + table. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=3, + ) + restrict_direct_table_access: bool = proto.Field( + proto.BOOL, + number=1, + ) + restrict_query_result: bool = proto.Field( + proto.BOOL, + number=2, + ) + bigquery_dataset: BigQueryDatasetSource = proto.Field( proto.MESSAGE, number=6, @@ -462,6 +570,157 @@ class BigQueryDatasetSource(proto.Message): proto.STRING, number=12, ) + restricted_export_config: RestrictedExportConfig = proto.Field( + proto.MESSAGE, + number=13, + message=RestrictedExportConfig, + ) + + +class Subscription(proto.Message): + r"""A subscription represents a subscribers' access to a + particular set of published data. It contains references to + associated listings, data exchanges, and linked datasets. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + listing (str): + Output only. Resource name of the source + Listing. e.g. + projects/123/locations/US/dataExchanges/456/listings/789 + + This field is a member of `oneof`_ ``resource_name``. + data_exchange (str): + Output only. Resource name of the source Data + Exchange. e.g. + projects/123/locations/US/dataExchanges/456 + + This field is a member of `oneof`_ ``resource_name``. + name (str): + Output only. The resource name of the subscription. e.g. + ``projects/myproject/locations/US/subscriptions/123``. + creation_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the subscription + was created. + last_modify_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp when the subscription + was last modified. + organization_id (str): + Output only. Organization of the project this + subscription belongs to. + organization_display_name (str): + Output only. Display name of the project of + this subscription. + state (google.cloud.bigquery_analyticshub_v1.types.Subscription.State): + Output only. Current state of the + subscription. + linked_dataset_map (MutableMapping[str, google.cloud.bigquery_analyticshub_v1.types.Subscription.LinkedResource]): + Output only. Map of listing resource names to associated + linked resource, e.g. + projects/123/locations/US/dataExchanges/456/listings/789 -> + projects/123/datasets/my_dataset + + For listing-level subscriptions, this is a map of size 1. + Only contains values if state == STATE_ACTIVE. + subscriber_contact (str): + Output only. Email of the subscriber. + """ + + class State(proto.Enum): + r"""State of the subscription. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + STATE_ACTIVE (1): + This subscription is active and the data is + accessible. + STATE_STALE (2): + The data referenced by this subscription is + out of date and should be refreshed. This can + happen when a data provider adds or removes + datasets. + STATE_INACTIVE (3): + This subscription has been cancelled or + revoked and the data is no longer accessible. + """ + STATE_UNSPECIFIED = 0 + STATE_ACTIVE = 1 + STATE_STALE = 2 + STATE_INACTIVE = 3 + + class LinkedResource(proto.Message): + r"""Reference to a linked resource tracked by this Subscription. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + linked_dataset (str): + Output only. Name of the linked dataset, e.g. + projects/subscriberproject/datasets/linked_dataset + + This field is a member of `oneof`_ ``reference``. + """ + + linked_dataset: str = proto.Field( + proto.STRING, + number=1, + oneof="reference", + ) + + listing: str = proto.Field( + proto.STRING, + number=5, + oneof="resource_name", + ) + data_exchange: str = proto.Field( + proto.STRING, + number=6, + oneof="resource_name", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + creation_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + last_modify_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + organization_id: str = proto.Field( + proto.STRING, + number=4, + ) + organization_display_name: str = proto.Field( + proto.STRING, + number=10, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + linked_dataset_map: MutableMapping[str, LinkedResource] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=8, + message=LinkedResource, + ) + subscriber_contact: str = proto.Field( + proto.STRING, + number=9, + ) class ListDataExchangesRequest(proto.Message): @@ -834,7 +1093,342 @@ class SubscribeListingRequest(proto.Message): class SubscribeListingResponse(proto.Message): - r"""Message for response when you subscribe to a listing.""" + r"""Message for response when you subscribe to a listing. + + Attributes: + subscription (google.cloud.bigquery_analyticshub_v1.types.Subscription): + Subscription object created from this + subscribe action. + """ + + subscription: "Subscription" = proto.Field( + proto.MESSAGE, + number=1, + message="Subscription", + ) + + +class SubscribeDataExchangeRequest(proto.Message): + r"""Message for subscribing to a Data Exchange. + + Attributes: + name (str): + Required. Resource name of the Data Exchange. e.g. + ``projects/publisherproject/locations/US/dataExchanges/123`` + destination (str): + Required. The parent resource path of the Subscription. e.g. + ``projects/subscriberproject/locations/US`` + subscription (str): + Required. Name of the subscription to create. e.g. + ``subscription1`` + subscriber_contact (str): + Email of the subscriber. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + destination: str = proto.Field( + proto.STRING, + number=2, + ) + subscription: str = proto.Field( + proto.STRING, + number=4, + ) + subscriber_contact: str = proto.Field( + proto.STRING, + number=3, + ) + + +class SubscribeDataExchangeResponse(proto.Message): + r"""Message for response when you subscribe to a Data Exchange. + + Attributes: + subscription (google.cloud.bigquery_analyticshub_v1.types.Subscription): + Subscription object created from this + subscribe action. + """ + + subscription: "Subscription" = proto.Field( + proto.MESSAGE, + number=1, + message="Subscription", + ) + + +class RefreshSubscriptionRequest(proto.Message): + r"""Message for refreshing a subscription. + + Attributes: + name (str): + Required. Resource name of the Subscription to refresh. e.g. + ``projects/subscriberproject/locations/US/subscriptions/123`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RefreshSubscriptionResponse(proto.Message): + r"""Message for response when you refresh a subscription. + + Attributes: + subscription (google.cloud.bigquery_analyticshub_v1.types.Subscription): + The refreshed subscription resource. + """ + + subscription: "Subscription" = proto.Field( + proto.MESSAGE, + number=1, + message="Subscription", + ) + + +class GetSubscriptionRequest(proto.Message): + r"""Message for getting a subscription. + + Attributes: + name (str): + Required. Resource name of the subscription. + e.g. projects/123/locations/US/subscriptions/456 + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSubscriptionsRequest(proto.Message): + r"""Message for listing subscriptions. + + Attributes: + parent (str): + Required. The parent resource path of the + subscription. e.g. + projects/myproject/locations/US + filter (str): + The filter expression may be used to filter + by Data Exchange or Listing. + page_size (int): + The maximum number of results to return in a + single response page. + page_token (str): + Page token, returned by a previous call. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSubscriptionsResponse(proto.Message): + r"""Message for response to the listing of subscriptions. + + Attributes: + subscriptions (MutableSequence[google.cloud.bigquery_analyticshub_v1.types.Subscription]): + The list of subscriptions. + next_page_token (str): + Next page token. + """ + + @property + def raw_page(self): + return self + + subscriptions: MutableSequence["Subscription"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Subscription", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListSharedResourceSubscriptionsRequest(proto.Message): + r"""Message for listing subscriptions of a shared resource. + + Attributes: + resource (str): + Required. Resource name of the requested + target. This resource may be either a Listing or + a DataExchange. e.g. + projects/123/locations/US/dataExchanges/456 OR + e.g. + projects/123/locations/US/dataExchanges/456/listings/789 + include_deleted_subscriptions (bool): + If selected, includes deleted subscriptions + in the response (up to 63 days after deletion). + page_size (int): + The maximum number of results to return in a + single response page. + page_token (str): + Page token, returned by a previous call. + """ + + resource: str = proto.Field( + proto.STRING, + number=1, + ) + include_deleted_subscriptions: bool = proto.Field( + proto.BOOL, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSharedResourceSubscriptionsResponse(proto.Message): + r"""Message for response to the listing of shared resource + subscriptions. + + Attributes: + shared_resource_subscriptions (MutableSequence[google.cloud.bigquery_analyticshub_v1.types.Subscription]): + The list of subscriptions. + next_page_token (str): + Next page token. + """ + + @property + def raw_page(self): + return self + + shared_resource_subscriptions: MutableSequence[ + "Subscription" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Subscription", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RevokeSubscriptionRequest(proto.Message): + r"""Message for revoking a subscription. + + Attributes: + name (str): + Required. Resource name of the subscription + to revoke. e.g. + projects/123/locations/US/subscriptions/456 + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RevokeSubscriptionResponse(proto.Message): + r"""Message for response when you revoke a subscription.""" + + +class DeleteSubscriptionRequest(proto.Message): + r"""Message for deleting a subscription. + + Attributes: + name (str): + Required. Resource name of the subscription + to delete. e.g. + projects/123/locations/US/subscriptions/456 + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of a long-running operation in + Analytics Hub. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-analyticshub/noxfile.py b/packages/google-cloud-bigquery-analyticshub/noxfile.py index 9a2acd8b6787..be54712bfa8f 100644 --- a/packages/google-cloud-bigquery-analyticshub/noxfile.py +++ b/packages/google-cloud-bigquery-analyticshub/noxfile.py @@ -46,7 +46,7 @@ UNIT_TEST_EXTRAS = [] UNIT_TEST_EXTRAS_BY_PYTHON = {} -SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -405,24 +405,3 @@ def prerelease_deps(session): session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_async.py new file mode 100644 index 000000000000..0e87e209c02e --- /dev/null +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-analyticshub + + +# [START analyticshub_v1_generated_AnalyticsHubService_DeleteSubscription_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_analyticshub_v1 + + +async def sample_delete_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.DeleteSubscriptionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_subscription(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END analyticshub_v1_generated_AnalyticsHubService_DeleteSubscription_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_sync.py new file mode 100644 index 000000000000..49002f06f02c --- /dev/null +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-analyticshub + + +# [START analyticshub_v1_generated_AnalyticsHubService_DeleteSubscription_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_analyticshub_v1 + + +def sample_delete_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.DeleteSubscriptionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_subscription(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END analyticshub_v1_generated_AnalyticsHubService_DeleteSubscription_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_async.py new file mode 100644 index 000000000000..b94f91986f13 --- /dev/null +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-analyticshub + + +# [START analyticshub_v1_generated_AnalyticsHubService_GetSubscription_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_analyticshub_v1 + + +async def sample_get_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.GetSubscriptionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_subscription(request=request) + + # Handle the response + print(response) + +# [END analyticshub_v1_generated_AnalyticsHubService_GetSubscription_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_sync.py new file mode 100644 index 000000000000..e4e6c9cd5c12 --- /dev/null +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-analyticshub + + +# [START analyticshub_v1_generated_AnalyticsHubService_GetSubscription_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_analyticshub_v1 + + +def sample_get_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.GetSubscriptionRequest( + name="name_value", + ) + + # Make the request + response = client.get_subscription(request=request) + + # Handle the response + print(response) + +# [END analyticshub_v1_generated_AnalyticsHubService_GetSubscription_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_async.py new file mode 100644 index 000000000000..42b93677a81f --- /dev/null +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSharedResourceSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-analyticshub + + +# [START analyticshub_v1_generated_AnalyticsHubService_ListSharedResourceSubscriptions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_analyticshub_v1 + + +async def sample_list_shared_resource_subscriptions(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.ListSharedResourceSubscriptionsRequest( + resource="resource_value", + ) + + # Make the request + page_result = client.list_shared_resource_subscriptions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END analyticshub_v1_generated_AnalyticsHubService_ListSharedResourceSubscriptions_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_sync.py new file mode 100644 index 000000000000..315c0e04fee2 --- /dev/null +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSharedResourceSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-analyticshub + + +# [START analyticshub_v1_generated_AnalyticsHubService_ListSharedResourceSubscriptions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_analyticshub_v1 + + +def sample_list_shared_resource_subscriptions(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.ListSharedResourceSubscriptionsRequest( + resource="resource_value", + ) + + # Make the request + page_result = client.list_shared_resource_subscriptions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END analyticshub_v1_generated_AnalyticsHubService_ListSharedResourceSubscriptions_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_async.py new file mode 100644 index 000000000000..b5f4364d0ac6 --- /dev/null +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-analyticshub + + +# [START analyticshub_v1_generated_AnalyticsHubService_ListSubscriptions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_analyticshub_v1 + + +async def sample_list_subscriptions(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.ListSubscriptionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END analyticshub_v1_generated_AnalyticsHubService_ListSubscriptions_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_sync.py new file mode 100644 index 000000000000..4bb39cf1b663 --- /dev/null +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-analyticshub + + +# [START analyticshub_v1_generated_AnalyticsHubService_ListSubscriptions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_analyticshub_v1 + + +def sample_list_subscriptions(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.ListSubscriptionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END analyticshub_v1_generated_AnalyticsHubService_ListSubscriptions_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_async.py new file mode 100644 index 000000000000..a97f4c2fa032 --- /dev/null +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RefreshSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-analyticshub + + +# [START analyticshub_v1_generated_AnalyticsHubService_RefreshSubscription_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_analyticshub_v1 + + +async def sample_refresh_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.RefreshSubscriptionRequest( + name="name_value", + ) + + # Make the request + operation = client.refresh_subscription(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END analyticshub_v1_generated_AnalyticsHubService_RefreshSubscription_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_sync.py new file mode 100644 index 000000000000..86b9650501aa --- /dev/null +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RefreshSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-analyticshub + + +# [START analyticshub_v1_generated_AnalyticsHubService_RefreshSubscription_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_analyticshub_v1 + + +def sample_refresh_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.RefreshSubscriptionRequest( + name="name_value", + ) + + # Make the request + operation = client.refresh_subscription(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END analyticshub_v1_generated_AnalyticsHubService_RefreshSubscription_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_async.py new file mode 100644 index 000000000000..e44962272d63 --- /dev/null +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RevokeSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-analyticshub + + +# [START analyticshub_v1_generated_AnalyticsHubService_RevokeSubscription_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_analyticshub_v1 + + +async def sample_revoke_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.RevokeSubscriptionRequest( + name="name_value", + ) + + # Make the request + response = await client.revoke_subscription(request=request) + + # Handle the response + print(response) + +# [END analyticshub_v1_generated_AnalyticsHubService_RevokeSubscription_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_sync.py new file mode 100644 index 000000000000..56ba2d0d7ca2 --- /dev/null +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RevokeSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-analyticshub + + +# [START analyticshub_v1_generated_AnalyticsHubService_RevokeSubscription_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_analyticshub_v1 + + +def sample_revoke_subscription(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.RevokeSubscriptionRequest( + name="name_value", + ) + + # Make the request + response = client.revoke_subscription(request=request) + + # Handle the response + print(response) + +# [END analyticshub_v1_generated_AnalyticsHubService_RevokeSubscription_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_async.py new file mode 100644 index 000000000000..b2caaf32164b --- /dev/null +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SubscribeDataExchange +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-analyticshub + + +# [START analyticshub_v1_generated_AnalyticsHubService_SubscribeDataExchange_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_analyticshub_v1 + + +async def sample_subscribe_data_exchange(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.SubscribeDataExchangeRequest( + name="name_value", + destination="destination_value", + subscription="subscription_value", + ) + + # Make the request + operation = client.subscribe_data_exchange(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END analyticshub_v1_generated_AnalyticsHubService_SubscribeDataExchange_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_sync.py new file mode 100644 index 000000000000..603218b24565 --- /dev/null +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SubscribeDataExchange +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-analyticshub + + +# [START analyticshub_v1_generated_AnalyticsHubService_SubscribeDataExchange_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_analyticshub_v1 + + +def sample_subscribe_data_exchange(): + # Create a client + client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() + + # Initialize request argument(s) + request = bigquery_analyticshub_v1.SubscribeDataExchangeRequest( + name="name_value", + destination="destination_value", + subscription="subscription_value", + ) + + # Make the request + operation = client.subscribe_data_exchange(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END analyticshub_v1_generated_AnalyticsHubService_SubscribeDataExchange_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json index 15f781d39e00..0e7f7893e9b6 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json @@ -659,6 +659,167 @@ ], "title": "analyticshub_v1_generated_analytics_hub_service_delete_listing_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", + "shortName": "AnalyticsHubServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.delete_subscription", + "method": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.DeleteSubscription", + "service": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", + "shortName": "AnalyticsHubService" + }, + "shortName": "DeleteSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_analyticshub_v1.types.DeleteSubscriptionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_subscription" + }, + "description": "Sample for DeleteSubscription", + "file": "analyticshub_v1_generated_analytics_hub_service_delete_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_DeleteSubscription_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticshub_v1_generated_analytics_hub_service_delete_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", + "shortName": "AnalyticsHubServiceClient" + }, + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.delete_subscription", + "method": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.DeleteSubscription", + "service": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", + "shortName": "AnalyticsHubService" + }, + "shortName": "DeleteSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_analyticshub_v1.types.DeleteSubscriptionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_subscription" + }, + "description": "Sample for DeleteSubscription", + "file": "analyticshub_v1_generated_analytics_hub_service_delete_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_DeleteSubscription_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticshub_v1_generated_analytics_hub_service_delete_subscription_sync.py" + }, { "canonical": true, "clientMethod": { @@ -1142,22 +1303,22 @@ "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", "shortName": "AnalyticsHubServiceAsyncClient" }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.list_data_exchanges", + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.get_subscription", "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListDataExchanges", + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.GetSubscription", "service": { "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", "shortName": "AnalyticsHubService" }, - "shortName": "ListDataExchanges" + "shortName": "GetSubscription" }, "parameters": [ { "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesRequest" + "type": "google.cloud.bigquery_analyticshub_v1.types.GetSubscriptionRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1173,22 +1334,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListDataExchangesAsyncPager", - "shortName": "list_data_exchanges" + "resultType": "google.cloud.bigquery_analyticshub_v1.types.Subscription", + "shortName": "get_subscription" }, - "description": "Sample for ListDataExchanges", - "file": "analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_async.py", + "description": "Sample for GetSubscription", + "file": "analyticshub_v1_generated_analytics_hub_service_get_subscription_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListDataExchanges_async", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_GetSubscription_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1208,12 +1369,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_async.py" + "title": "analyticshub_v1_generated_analytics_hub_service_get_subscription_async.py" }, { "canonical": true, @@ -1222,22 +1383,22 @@ "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", "shortName": "AnalyticsHubServiceClient" }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.list_data_exchanges", + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.get_subscription", "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListDataExchanges", + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.GetSubscription", "service": { "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", "shortName": "AnalyticsHubService" }, - "shortName": "ListDataExchanges" + "shortName": "GetSubscription" }, "parameters": [ { "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesRequest" + "type": "google.cloud.bigquery_analyticshub_v1.types.GetSubscriptionRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1253,22 +1414,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListDataExchangesPager", - "shortName": "list_data_exchanges" + "resultType": "google.cloud.bigquery_analyticshub_v1.types.Subscription", + "shortName": "get_subscription" }, - "description": "Sample for ListDataExchanges", - "file": "analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_sync.py", + "description": "Sample for GetSubscription", + "file": "analyticshub_v1_generated_analytics_hub_service_get_subscription_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListDataExchanges_sync", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_GetSubscription_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1288,12 +1449,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_sync.py" + "title": "analyticshub_v1_generated_analytics_hub_service_get_subscription_sync.py" }, { "canonical": true, @@ -1303,19 +1464,19 @@ "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", "shortName": "AnalyticsHubServiceAsyncClient" }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.list_listings", + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.list_data_exchanges", "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListListings", + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListDataExchanges", "service": { "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", "shortName": "AnalyticsHubService" }, - "shortName": "ListListings" + "shortName": "ListDataExchanges" }, "parameters": [ { "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListListingsRequest" + "type": "google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesRequest" }, { "name": "parent", @@ -1334,14 +1495,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListListingsAsyncPager", - "shortName": "list_listings" + "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListDataExchangesAsyncPager", + "shortName": "list_data_exchanges" }, - "description": "Sample for ListListings", - "file": "analyticshub_v1_generated_analytics_hub_service_list_listings_async.py", + "description": "Sample for ListDataExchanges", + "file": "analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListListings_async", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListDataExchanges_async", "segments": [ { "end": 52, @@ -1374,7 +1535,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_listings_async.py" + "title": "analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_async.py" }, { "canonical": true, @@ -1383,19 +1544,19 @@ "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", "shortName": "AnalyticsHubServiceClient" }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.list_listings", + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.list_data_exchanges", "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListListings", + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListDataExchanges", "service": { "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", "shortName": "AnalyticsHubService" }, - "shortName": "ListListings" + "shortName": "ListDataExchanges" }, "parameters": [ { "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListListingsRequest" + "type": "google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesRequest" }, { "name": "parent", @@ -1414,14 +1575,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListListingsPager", - "shortName": "list_listings" + "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListDataExchangesPager", + "shortName": "list_data_exchanges" }, - "description": "Sample for ListListings", - "file": "analyticshub_v1_generated_analytics_hub_service_list_listings_sync.py", + "description": "Sample for ListDataExchanges", + "file": "analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListListings_sync", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListDataExchanges_sync", "segments": [ { "end": 52, @@ -1454,7 +1615,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_listings_sync.py" + "title": "analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_sync.py" }, { "canonical": true, @@ -1464,22 +1625,22 @@ "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", "shortName": "AnalyticsHubServiceAsyncClient" }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.list_org_data_exchanges", + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.list_listings", "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListOrgDataExchanges", + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListListings", "service": { "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", "shortName": "AnalyticsHubService" }, - "shortName": "ListOrgDataExchanges" + "shortName": "ListListings" }, "parameters": [ { "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesRequest" + "type": "google.cloud.bigquery_analyticshub_v1.types.ListListingsRequest" }, { - "name": "organization", + "name": "parent", "type": "str" }, { @@ -1495,14 +1656,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListOrgDataExchangesAsyncPager", - "shortName": "list_org_data_exchanges" + "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListListingsAsyncPager", + "shortName": "list_listings" }, - "description": "Sample for ListOrgDataExchanges", - "file": "analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_async.py", + "description": "Sample for ListListings", + "file": "analyticshub_v1_generated_analytics_hub_service_list_listings_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListOrgDataExchanges_async", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListListings_async", "segments": [ { "end": 52, @@ -1535,7 +1696,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_async.py" + "title": "analyticshub_v1_generated_analytics_hub_service_list_listings_async.py" }, { "canonical": true, @@ -1544,22 +1705,22 @@ "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", "shortName": "AnalyticsHubServiceClient" }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.list_org_data_exchanges", + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.list_listings", "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListOrgDataExchanges", + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListListings", "service": { "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", "shortName": "AnalyticsHubService" }, - "shortName": "ListOrgDataExchanges" + "shortName": "ListListings" }, "parameters": [ { "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesRequest" + "type": "google.cloud.bigquery_analyticshub_v1.types.ListListingsRequest" }, { - "name": "organization", + "name": "parent", "type": "str" }, { @@ -1575,22 +1736,827 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListOrgDataExchangesPager", - "shortName": "list_org_data_exchanges" + "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListListingsPager", + "shortName": "list_listings" }, - "description": "Sample for ListOrgDataExchanges", - "file": "analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_sync.py", + "description": "Sample for ListListings", + "file": "analyticshub_v1_generated_analytics_hub_service_list_listings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListOrgDataExchanges_sync", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListListings_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticshub_v1_generated_analytics_hub_service_list_listings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", + "shortName": "AnalyticsHubServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.list_org_data_exchanges", + "method": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListOrgDataExchanges", + "service": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", + "shortName": "AnalyticsHubService" + }, + "shortName": "ListOrgDataExchanges" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesRequest" + }, + { + "name": "organization", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListOrgDataExchangesAsyncPager", + "shortName": "list_org_data_exchanges" + }, + "description": "Sample for ListOrgDataExchanges", + "file": "analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListOrgDataExchanges_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", + "shortName": "AnalyticsHubServiceClient" + }, + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.list_org_data_exchanges", + "method": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListOrgDataExchanges", + "service": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", + "shortName": "AnalyticsHubService" + }, + "shortName": "ListOrgDataExchanges" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesRequest" + }, + { + "name": "organization", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListOrgDataExchangesPager", + "shortName": "list_org_data_exchanges" + }, + "description": "Sample for ListOrgDataExchanges", + "file": "analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListOrgDataExchanges_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", + "shortName": "AnalyticsHubServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.list_shared_resource_subscriptions", + "method": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListSharedResourceSubscriptions", + "service": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", + "shortName": "AnalyticsHubService" + }, + "shortName": "ListSharedResourceSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSharedResourceSubscriptionsAsyncPager", + "shortName": "list_shared_resource_subscriptions" + }, + "description": "Sample for ListSharedResourceSubscriptions", + "file": "analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListSharedResourceSubscriptions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", + "shortName": "AnalyticsHubServiceClient" + }, + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.list_shared_resource_subscriptions", + "method": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListSharedResourceSubscriptions", + "service": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", + "shortName": "AnalyticsHubService" + }, + "shortName": "ListSharedResourceSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSharedResourceSubscriptionsPager", + "shortName": "list_shared_resource_subscriptions" + }, + "description": "Sample for ListSharedResourceSubscriptions", + "file": "analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListSharedResourceSubscriptions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", + "shortName": "AnalyticsHubServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.list_subscriptions", + "method": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListSubscriptions", + "service": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", + "shortName": "AnalyticsHubService" + }, + "shortName": "ListSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSubscriptionsAsyncPager", + "shortName": "list_subscriptions" + }, + "description": "Sample for ListSubscriptions", + "file": "analyticshub_v1_generated_analytics_hub_service_list_subscriptions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListSubscriptions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticshub_v1_generated_analytics_hub_service_list_subscriptions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", + "shortName": "AnalyticsHubServiceClient" + }, + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.list_subscriptions", + "method": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListSubscriptions", + "service": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", + "shortName": "AnalyticsHubService" + }, + "shortName": "ListSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSubscriptionsPager", + "shortName": "list_subscriptions" + }, + "description": "Sample for ListSubscriptions", + "file": "analyticshub_v1_generated_analytics_hub_service_list_subscriptions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListSubscriptions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticshub_v1_generated_analytics_hub_service_list_subscriptions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", + "shortName": "AnalyticsHubServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.refresh_subscription", + "method": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.RefreshSubscription", + "service": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", + "shortName": "AnalyticsHubService" + }, + "shortName": "RefreshSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_analyticshub_v1.types.RefreshSubscriptionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "refresh_subscription" + }, + "description": "Sample for RefreshSubscription", + "file": "analyticshub_v1_generated_analytics_hub_service_refresh_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_RefreshSubscription_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticshub_v1_generated_analytics_hub_service_refresh_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", + "shortName": "AnalyticsHubServiceClient" + }, + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.refresh_subscription", + "method": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.RefreshSubscription", + "service": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", + "shortName": "AnalyticsHubService" + }, + "shortName": "RefreshSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_analyticshub_v1.types.RefreshSubscriptionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "refresh_subscription" + }, + "description": "Sample for RefreshSubscription", + "file": "analyticshub_v1_generated_analytics_hub_service_refresh_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_RefreshSubscription_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticshub_v1_generated_analytics_hub_service_refresh_subscription_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", + "shortName": "AnalyticsHubServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.revoke_subscription", + "method": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.RevokeSubscription", + "service": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", + "shortName": "AnalyticsHubService" + }, + "shortName": "RevokeSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionResponse", + "shortName": "revoke_subscription" + }, + "description": "Sample for RevokeSubscription", + "file": "analyticshub_v1_generated_analytics_hub_service_revoke_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_RevokeSubscription_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, { "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticshub_v1_generated_analytics_hub_service_revoke_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", + "shortName": "AnalyticsHubServiceClient" + }, + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.revoke_subscription", + "method": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.RevokeSubscription", + "service": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", + "shortName": "AnalyticsHubService" + }, + "shortName": "RevokeSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionResponse", + "shortName": "revoke_subscription" + }, + "description": "Sample for RevokeSubscription", + "file": "analyticshub_v1_generated_analytics_hub_service_revoke_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_RevokeSubscription_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, "start": 27, "type": "SHORT" }, @@ -1610,12 +2576,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_sync.py" + "title": "analyticshub_v1_generated_analytics_hub_service_revoke_subscription_sync.py" }, { "canonical": true, @@ -1770,6 +2736,167 @@ ], "title": "analyticshub_v1_generated_analytics_hub_service_set_iam_policy_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", + "shortName": "AnalyticsHubServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.subscribe_data_exchange", + "method": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.SubscribeDataExchange", + "service": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", + "shortName": "AnalyticsHubService" + }, + "shortName": "SubscribeDataExchange" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_analyticshub_v1.types.SubscribeDataExchangeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "subscribe_data_exchange" + }, + "description": "Sample for SubscribeDataExchange", + "file": "analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_SubscribeDataExchange_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", + "shortName": "AnalyticsHubServiceClient" + }, + "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.subscribe_data_exchange", + "method": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.SubscribeDataExchange", + "service": { + "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", + "shortName": "AnalyticsHubService" + }, + "shortName": "SubscribeDataExchange" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_analyticshub_v1.types.SubscribeDataExchangeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "subscribe_data_exchange" + }, + "description": "Sample for SubscribeDataExchange", + "file": "analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticshub_v1_generated_AnalyticsHubService_SubscribeDataExchange_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-bigquery-analyticshub/scripts/fixup_bigquery_analyticshub_v1_keywords.py b/packages/google-cloud-bigquery-analyticshub/scripts/fixup_bigquery_analyticshub_v1_keywords.py index cd8cde39b87f..300e0bdff1c9 100644 --- a/packages/google-cloud-bigquery-analyticshub/scripts/fixup_bigquery_analyticshub_v1_keywords.py +++ b/packages/google-cloud-bigquery-analyticshub/scripts/fixup_bigquery_analyticshub_v1_keywords.py @@ -43,13 +43,20 @@ class bigquery_analyticshubCallTransformer(cst.CSTTransformer): 'create_listing': ('parent', 'listing_id', 'listing', ), 'delete_data_exchange': ('name', ), 'delete_listing': ('name', ), + 'delete_subscription': ('name', ), 'get_data_exchange': ('name', ), 'get_iam_policy': ('resource', 'options', ), 'get_listing': ('name', ), + 'get_subscription': ('name', ), 'list_data_exchanges': ('parent', 'page_size', 'page_token', ), 'list_listings': ('parent', 'page_size', 'page_token', ), 'list_org_data_exchanges': ('organization', 'page_size', 'page_token', ), + 'list_shared_resource_subscriptions': ('resource', 'include_deleted_subscriptions', 'page_size', 'page_token', ), + 'list_subscriptions': ('parent', 'filter', 'page_size', 'page_token', ), + 'refresh_subscription': ('name', ), + 'revoke_subscription': ('name', ), 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'subscribe_data_exchange': ('name', 'destination', 'subscription', 'subscriber_contact', ), 'subscribe_listing': ('name', 'destination_dataset', ), 'test_iam_permissions': ('resource', 'permissions', ), 'update_data_exchange': ('update_mask', 'data_exchange', ), diff --git a/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py b/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py index 503d604bc839..94083718e333 100644 --- a/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py +++ b/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py @@ -24,17 +24,29 @@ import math -from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore from google.type import expr_pb2 # type: ignore import grpc @@ -4410,11 +4422,11 @@ async def test_subscribe_listing_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.GetIamPolicyRequest, + analyticshub.SubscribeDataExchangeRequest, dict, ], ) -def test_get_iam_policy(request_type, transport: str = "grpc"): +def test_subscribe_data_exchange(request_type, transport: str = "grpc"): client = AnalyticsHubServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4425,26 +4437,23 @@ def test_get_iam_policy(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.subscribe_data_exchange), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) - response = client.get_iam_policy(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.subscribe_data_exchange(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == analyticshub.SubscribeDataExchangeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, future.Future) -def test_get_iam_policy_empty_call(): +def test_subscribe_data_exchange_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsHubServiceClient( @@ -4453,16 +4462,19 @@ def test_get_iam_policy_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - client.get_iam_policy() + with mock.patch.object( + type(client.transport.subscribe_data_exchange), "__call__" + ) as call: + client.subscribe_data_exchange() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == analyticshub.SubscribeDataExchangeRequest() @pytest.mark.asyncio -async def test_get_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +async def test_subscribe_data_exchange_async( + transport: str = "grpc_asyncio", + request_type=analyticshub.SubscribeDataExchangeRequest, ): client = AnalyticsHubServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4474,47 +4486,46 @@ async def test_get_iam_policy_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.subscribe_data_exchange), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_iam_policy(request) + response = await client.subscribe_data_exchange(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == analyticshub.SubscribeDataExchangeRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) +async def test_subscribe_data_exchange_async_from_dict(): + await test_subscribe_data_exchange_async(request_type=dict) -def test_get_iam_policy_field_headers(): +def test_subscribe_data_exchange_field_headers(): client = AnalyticsHubServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() + request = analyticshub.SubscribeDataExchangeRequest() - request.resource = "resource_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) + with mock.patch.object( + type(client.transport.subscribe_data_exchange), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.subscribe_data_exchange(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4525,26 +4536,30 @@ def test_get_iam_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): +async def test_subscribe_data_exchange_field_headers_async(): client = AnalyticsHubServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() + request = analyticshub.SubscribeDataExchangeRequest() - request.resource = "resource_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) + with mock.patch.object( + type(client.transport.subscribe_data_exchange), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.subscribe_data_exchange(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4555,35 +4570,104 @@ async def test_get_iam_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "name=name_value", ) in kw["metadata"] -def test_get_iam_policy_from_dict_foreign(): +def test_subscribe_data_exchange_flattened(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.subscribe_data_exchange), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.subscribe_data_exchange( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_subscribe_data_exchange_flattened_error(): client = AnalyticsHubServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.subscribe_data_exchange( + analyticshub.SubscribeDataExchangeRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_subscribe_data_exchange_flattened_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.subscribe_data_exchange), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.subscribe_data_exchange( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_subscribe_data_exchange_flattened_error_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.subscribe_data_exchange( + analyticshub.SubscribeDataExchangeRequest(), + name="name_value", ) - call.assert_called() @pytest.mark.parametrize( "request_type", [ - iam_policy_pb2.SetIamPolicyRequest, + analyticshub.RefreshSubscriptionRequest, dict, ], ) -def test_set_iam_policy(request_type, transport: str = "grpc"): +def test_refresh_subscription(request_type, transport: str = "grpc"): client = AnalyticsHubServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4594,26 +4678,23 @@ def test_set_iam_policy(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.refresh_subscription), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) - response = client.set_iam_policy(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.refresh_subscription(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == analyticshub.RefreshSubscriptionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, future.Future) -def test_set_iam_policy_empty_call(): +def test_refresh_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AnalyticsHubServiceClient( @@ -4622,16 +4703,19 @@ def test_set_iam_policy_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - client.set_iam_policy() + with mock.patch.object( + type(client.transport.refresh_subscription), "__call__" + ) as call: + client.refresh_subscription() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == analyticshub.RefreshSubscriptionRequest() @pytest.mark.asyncio -async def test_set_iam_policy_async( - transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +async def test_refresh_subscription_async( + transport: str = "grpc_asyncio", + request_type=analyticshub.RefreshSubscriptionRequest, ): client = AnalyticsHubServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4643,47 +4727,46 @@ async def test_set_iam_policy_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.refresh_subscription), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.set_iam_policy(request) + response = await client.refresh_subscription(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == analyticshub.RefreshSubscriptionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b"etag_blob" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) +async def test_refresh_subscription_async_from_dict(): + await test_refresh_subscription_async(request_type=dict) -def test_set_iam_policy_field_headers(): +def test_refresh_subscription_field_headers(): client = AnalyticsHubServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() + request = analyticshub.RefreshSubscriptionRequest() - request.resource = "resource_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) + with mock.patch.object( + type(client.transport.refresh_subscription), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.refresh_subscription(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4694,26 +4777,30 @@ def test_set_iam_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): +async def test_refresh_subscription_field_headers_async(): client = AnalyticsHubServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() + request = analyticshub.RefreshSubscriptionRequest() - request.resource = "resource_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) + with mock.patch.object( + type(client.transport.refresh_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.refresh_subscription(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4724,59 +4811,2084 @@ async def test_set_iam_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "resource=resource_value", + "name=name_value", ) in kw["metadata"] -def test_set_iam_policy_from_dict_foreign(): +def test_refresh_subscription_flattened(): client = AnalyticsHubServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + with mock.patch.object( + type(client.transport.refresh_subscription), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), - } + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.refresh_subscription( + name="name_value", ) - call.assert_called() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, - ], -) -def test_test_iam_permissions(request_type, transport: str = "grpc"): + +def test_refresh_subscription_flattened_error(): client = AnalyticsHubServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.refresh_subscription( + analyticshub.RefreshSubscriptionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_refresh_subscription_flattened_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" + type(client.transport.refresh_subscription), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.refresh_subscription( + name="name_value", ) - response = client.test_iam_permissions(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_refresh_subscription_flattened_error_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.refresh_subscription( + analyticshub.RefreshSubscriptionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analyticshub.GetSubscriptionRequest, + dict, + ], +) +def test_get_subscription(request_type, transport: str = "grpc"): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyticshub.Subscription( + name="name_value", + organization_id="organization_id_value", + organization_display_name="organization_display_name_value", + state=analyticshub.Subscription.State.STATE_ACTIVE, + subscriber_contact="subscriber_contact_value", + listing="listing_value", + ) + response = client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.GetSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analyticshub.Subscription) + assert response.name == "name_value" + assert response.organization_id == "organization_id_value" + assert response.organization_display_name == "organization_display_name_value" + assert response.state == analyticshub.Subscription.State.STATE_ACTIVE + assert response.subscriber_contact == "subscriber_contact_value" + + +def test_get_subscription_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + client.get_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.GetSubscriptionRequest() + + +@pytest.mark.asyncio +async def test_get_subscription_async( + transport: str = "grpc_asyncio", request_type=analyticshub.GetSubscriptionRequest +): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyticshub.Subscription( + name="name_value", + organization_id="organization_id_value", + organization_display_name="organization_display_name_value", + state=analyticshub.Subscription.State.STATE_ACTIVE, + subscriber_contact="subscriber_contact_value", + ) + ) + response = await client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.GetSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analyticshub.Subscription) + assert response.name == "name_value" + assert response.organization_id == "organization_id_value" + assert response.organization_display_name == "organization_display_name_value" + assert response.state == analyticshub.Subscription.State.STATE_ACTIVE + assert response.subscriber_contact == "subscriber_contact_value" + + +@pytest.mark.asyncio +async def test_get_subscription_async_from_dict(): + await test_get_subscription_async(request_type=dict) + + +def test_get_subscription_field_headers(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analyticshub.GetSubscriptionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + call.return_value = analyticshub.Subscription() + client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_subscription_field_headers_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analyticshub.GetSubscriptionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyticshub.Subscription() + ) + await client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_subscription_flattened(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyticshub.Subscription() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_subscription( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_subscription_flattened_error(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_subscription( + analyticshub.GetSubscriptionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_subscription_flattened_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyticshub.Subscription() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyticshub.Subscription() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_subscription( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_subscription_flattened_error_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_subscription( + analyticshub.GetSubscriptionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analyticshub.ListSubscriptionsRequest, + dict, + ], +) +def test_list_subscriptions(request_type, transport: str = "grpc"): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analyticshub.ListSubscriptionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.ListSubscriptionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubscriptionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_subscriptions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + client.list_subscriptions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.ListSubscriptionsRequest() + + +@pytest.mark.asyncio +async def test_list_subscriptions_async( + transport: str = "grpc_asyncio", request_type=analyticshub.ListSubscriptionsRequest +): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyticshub.ListSubscriptionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.ListSubscriptionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubscriptionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_subscriptions_async_from_dict(): + await test_list_subscriptions_async(request_type=dict) + + +def test_list_subscriptions_field_headers(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analyticshub.ListSubscriptionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + call.return_value = analyticshub.ListSubscriptionsResponse() + client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_subscriptions_field_headers_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analyticshub.ListSubscriptionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyticshub.ListSubscriptionsResponse() + ) + await client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_subscriptions_flattened(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analyticshub.ListSubscriptionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_subscriptions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_subscriptions_flattened_error(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_subscriptions( + analyticshub.ListSubscriptionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_subscriptions_flattened_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analyticshub.ListSubscriptionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyticshub.ListSubscriptionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_subscriptions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_subscriptions_flattened_error_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_subscriptions( + analyticshub.ListSubscriptionsRequest(), + parent="parent_value", + ) + + +def test_list_subscriptions_pager(transport_name: str = "grpc"): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analyticshub.ListSubscriptionsResponse( + subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + next_page_token="abc", + ), + analyticshub.ListSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + analyticshub.ListSubscriptionsResponse( + subscriptions=[ + analyticshub.Subscription(), + ], + next_page_token="ghi", + ), + analyticshub.ListSubscriptionsResponse( + subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_subscriptions(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analyticshub.Subscription) for i in results) + + +def test_list_subscriptions_pages(transport_name: str = "grpc"): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analyticshub.ListSubscriptionsResponse( + subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + next_page_token="abc", + ), + analyticshub.ListSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + analyticshub.ListSubscriptionsResponse( + subscriptions=[ + analyticshub.Subscription(), + ], + next_page_token="ghi", + ), + analyticshub.ListSubscriptionsResponse( + subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + ), + RuntimeError, + ) + pages = list(client.list_subscriptions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_subscriptions_async_pager(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analyticshub.ListSubscriptionsResponse( + subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + next_page_token="abc", + ), + analyticshub.ListSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + analyticshub.ListSubscriptionsResponse( + subscriptions=[ + analyticshub.Subscription(), + ], + next_page_token="ghi", + ), + analyticshub.ListSubscriptionsResponse( + subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_subscriptions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analyticshub.Subscription) for i in responses) + + +@pytest.mark.asyncio +async def test_list_subscriptions_async_pages(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analyticshub.ListSubscriptionsResponse( + subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + next_page_token="abc", + ), + analyticshub.ListSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + analyticshub.ListSubscriptionsResponse( + subscriptions=[ + analyticshub.Subscription(), + ], + next_page_token="ghi", + ), + analyticshub.ListSubscriptionsResponse( + subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_subscriptions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analyticshub.ListSharedResourceSubscriptionsRequest, + dict, + ], +) +def test_list_shared_resource_subscriptions(request_type, transport: str = "grpc"): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_shared_resource_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analyticshub.ListSharedResourceSubscriptionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_shared_resource_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.ListSharedResourceSubscriptionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSharedResourceSubscriptionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_shared_resource_subscriptions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_shared_resource_subscriptions), "__call__" + ) as call: + client.list_shared_resource_subscriptions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.ListSharedResourceSubscriptionsRequest() + + +@pytest.mark.asyncio +async def test_list_shared_resource_subscriptions_async( + transport: str = "grpc_asyncio", + request_type=analyticshub.ListSharedResourceSubscriptionsRequest, +): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_shared_resource_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyticshub.ListSharedResourceSubscriptionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_shared_resource_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.ListSharedResourceSubscriptionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSharedResourceSubscriptionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_shared_resource_subscriptions_async_from_dict(): + await test_list_shared_resource_subscriptions_async(request_type=dict) + + +def test_list_shared_resource_subscriptions_field_headers(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analyticshub.ListSharedResourceSubscriptionsRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_shared_resource_subscriptions), "__call__" + ) as call: + call.return_value = analyticshub.ListSharedResourceSubscriptionsResponse() + client.list_shared_resource_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_shared_resource_subscriptions_field_headers_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analyticshub.ListSharedResourceSubscriptionsRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_shared_resource_subscriptions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyticshub.ListSharedResourceSubscriptionsResponse() + ) + await client.list_shared_resource_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_list_shared_resource_subscriptions_flattened(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_shared_resource_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analyticshub.ListSharedResourceSubscriptionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_shared_resource_subscriptions( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +def test_list_shared_resource_subscriptions_flattened_error(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_shared_resource_subscriptions( + analyticshub.ListSharedResourceSubscriptionsRequest(), + resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_list_shared_resource_subscriptions_flattened_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_shared_resource_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analyticshub.ListSharedResourceSubscriptionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyticshub.ListSharedResourceSubscriptionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_shared_resource_subscriptions( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_shared_resource_subscriptions_flattened_error_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_shared_resource_subscriptions( + analyticshub.ListSharedResourceSubscriptionsRequest(), + resource="resource_value", + ) + + +def test_list_shared_resource_subscriptions_pager(transport_name: str = "grpc"): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_shared_resource_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + next_page_token="abc", + ), + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[], + next_page_token="def", + ), + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[ + analyticshub.Subscription(), + ], + next_page_token="ghi", + ), + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", ""),)), + ) + pager = client.list_shared_resource_subscriptions(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analyticshub.Subscription) for i in results) + + +def test_list_shared_resource_subscriptions_pages(transport_name: str = "grpc"): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_shared_resource_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + next_page_token="abc", + ), + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[], + next_page_token="def", + ), + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[ + analyticshub.Subscription(), + ], + next_page_token="ghi", + ), + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + ), + RuntimeError, + ) + pages = list(client.list_shared_resource_subscriptions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_shared_resource_subscriptions_async_pager(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_shared_resource_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + next_page_token="abc", + ), + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[], + next_page_token="def", + ), + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[ + analyticshub.Subscription(), + ], + next_page_token="ghi", + ), + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_shared_resource_subscriptions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analyticshub.Subscription) for i in responses) + + +@pytest.mark.asyncio +async def test_list_shared_resource_subscriptions_async_pages(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_shared_resource_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + next_page_token="abc", + ), + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[], + next_page_token="def", + ), + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[ + analyticshub.Subscription(), + ], + next_page_token="ghi", + ), + analyticshub.ListSharedResourceSubscriptionsResponse( + shared_resource_subscriptions=[ + analyticshub.Subscription(), + analyticshub.Subscription(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_shared_resource_subscriptions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analyticshub.RevokeSubscriptionRequest, + dict, + ], +) +def test_revoke_subscription(request_type, transport: str = "grpc"): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.revoke_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analyticshub.RevokeSubscriptionResponse() + response = client.revoke_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.RevokeSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analyticshub.RevokeSubscriptionResponse) + + +def test_revoke_subscription_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.revoke_subscription), "__call__" + ) as call: + client.revoke_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.RevokeSubscriptionRequest() + + +@pytest.mark.asyncio +async def test_revoke_subscription_async( + transport: str = "grpc_asyncio", request_type=analyticshub.RevokeSubscriptionRequest +): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.revoke_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyticshub.RevokeSubscriptionResponse() + ) + response = await client.revoke_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.RevokeSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analyticshub.RevokeSubscriptionResponse) + + +@pytest.mark.asyncio +async def test_revoke_subscription_async_from_dict(): + await test_revoke_subscription_async(request_type=dict) + + +def test_revoke_subscription_field_headers(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analyticshub.RevokeSubscriptionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.revoke_subscription), "__call__" + ) as call: + call.return_value = analyticshub.RevokeSubscriptionResponse() + client.revoke_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_revoke_subscription_field_headers_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analyticshub.RevokeSubscriptionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.revoke_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyticshub.RevokeSubscriptionResponse() + ) + await client.revoke_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_revoke_subscription_flattened(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.revoke_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analyticshub.RevokeSubscriptionResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.revoke_subscription( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_revoke_subscription_flattened_error(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.revoke_subscription( + analyticshub.RevokeSubscriptionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_revoke_subscription_flattened_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.revoke_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analyticshub.RevokeSubscriptionResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyticshub.RevokeSubscriptionResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.revoke_subscription( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_revoke_subscription_flattened_error_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.revoke_subscription( + analyticshub.RevokeSubscriptionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + analyticshub.DeleteSubscriptionRequest, + dict, + ], +) +def test_delete_subscription(request_type, transport: str = "grpc"): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.DeleteSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_subscription_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + client.delete_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.DeleteSubscriptionRequest() + + +@pytest.mark.asyncio +async def test_delete_subscription_async( + transport: str = "grpc_asyncio", request_type=analyticshub.DeleteSubscriptionRequest +): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == analyticshub.DeleteSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_subscription_async_from_dict(): + await test_delete_subscription_async(request_type=dict) + + +def test_delete_subscription_field_headers(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analyticshub.DeleteSubscriptionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_subscription_field_headers_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analyticshub.DeleteSubscriptionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_subscription_flattened(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_subscription( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_subscription_flattened_error(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_subscription( + analyticshub.DeleteSubscriptionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_subscription_flattened_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_subscription( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_subscription_flattened_error_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_subscription( + analyticshub.DeleteSubscriptionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy(request_type, transport: str = "grpc"): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict_foreign(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy(request_type, transport: str = "grpc"): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict_foreign(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), + } + ) + call.assert_called() + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions(request_type, transport: str = "grpc"): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) @@ -5075,6 +7187,13 @@ def test_analytics_hub_service_base_transport(): "update_listing", "delete_listing", "subscribe_listing", + "subscribe_data_exchange", + "refresh_subscription", + "get_subscription", + "list_subscriptions", + "list_shared_resource_subscriptions", + "revoke_subscription", + "delete_subscription", "get_iam_policy", "set_iam_policy", "test_iam_permissions", @@ -5086,6 +7205,11 @@ def test_analytics_hub_service_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + # Catch all for all remaining methods and properties remainder = [ "kind", @@ -5437,6 +7561,40 @@ def test_analytics_hub_service_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel +def test_analytics_hub_service_grpc_lro_client(): + client = AnalyticsHubServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_analytics_hub_service_grpc_lro_async_client(): + client = AnalyticsHubServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + def test_data_exchange_path(): project = "squid" location = "clam" @@ -5521,8 +7679,38 @@ def test_parse_listing_path(): assert expected == actual +def test_subscription_path(): + project = "cuttlefish" + location = "mussel" + subscription = "winkle" + expected = ( + "projects/{project}/locations/{location}/subscriptions/{subscription}".format( + project=project, + location=location, + subscription=subscription, + ) + ) + actual = AnalyticsHubServiceClient.subscription_path( + project, location, subscription + ) + assert expected == actual + + +def test_parse_subscription_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "subscription": "abalone", + } + path = AnalyticsHubServiceClient.subscription_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsHubServiceClient.parse_subscription_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -5532,7 +7720,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "clam", } path = AnalyticsHubServiceClient.common_billing_account_path(**expected) @@ -5542,7 +7730,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -5552,7 +7740,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "octopus", } path = AnalyticsHubServiceClient.common_folder_path(**expected) @@ -5562,7 +7750,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -5572,7 +7760,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "nudibranch", } path = AnalyticsHubServiceClient.common_organization_path(**expected) @@ -5582,7 +7770,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -5592,7 +7780,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "mussel", } path = AnalyticsHubServiceClient.common_project_path(**expected) @@ -5602,8 +7790,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -5614,8 +7802,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "scallop", + "location": "abalone", } path = AnalyticsHubServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-bigquery-connection/.OwlBot.yaml b/packages/google-cloud-bigquery-connection/.OwlBot.yaml new file mode 100644 index 000000000000..f1af10f5eb15 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/.OwlBot.yaml @@ -0,0 +1,27 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-preserve-regex: + - /owl-bot-staging/google-cloud-bigquery-connection/v1beta1 + +deep-copy-regex: + - source: /google/cloud/bigquery/connection/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-bigquery-connection/$1 + +begin-after-commit-hash: 130ce904e5d546c312943d10f48799590f9c0f66 + diff --git a/packages/google-cloud-bigquery-connection/.coveragerc b/packages/google-cloud-bigquery-connection/.coveragerc new file mode 100644 index 000000000000..6431327e975a --- /dev/null +++ b/packages/google-cloud-bigquery-connection/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/bigquery_connection/__init__.py + google/cloud/bigquery_connection/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-bigquery-connection/.eggs/README.txt b/packages/google-cloud-bigquery-connection/.eggs/README.txt new file mode 100644 index 000000000000..5d01668824f4 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/.eggs/README.txt @@ -0,0 +1,6 @@ +This directory contains eggs that were downloaded by setuptools to build, test, and run plug-ins. + +This directory caches those eggs to prevent repeated downloads. + +However, it is safe to delete this directory. + diff --git a/packages/google-cloud-bigquery-connection/.flake8 b/packages/google-cloud-bigquery-connection/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-bigquery-connection/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-bigquery-connection/.gitignore b/packages/google-cloud-bigquery-connection/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-bigquery-connection/.repo-metadata.json b/packages/google-cloud-bigquery-connection/.repo-metadata.json new file mode 100644 index 000000000000..885c2d84998b --- /dev/null +++ b/packages/google-cloud-bigquery-connection/.repo-metadata.json @@ -0,0 +1,18 @@ +{ + "name": "bigqueryconnection", + "name_pretty": "BigQuery Connection", + "api_description": "Manage BigQuery connections to external data sources.", + "product_documentation": "https://cloud.google.com/bigquery/docs/reference/bigqueryconnection", + "client_documentation": "https://cloud.google.com/python/docs/reference/bigqueryconnection/latest", + "issue_tracker": "", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-bigquery-connection", + "api_id": "bigqueryconnection.googleapis.com", + "requires_billing": true, + "default_version": "v1", + "codeowner_team": "@googleapis/api-bigquery", + "api_shortname": "bigqueryconnection" +} diff --git a/packages/google-cloud-bigquery-connection/CHANGELOG.md b/packages/google-cloud-bigquery-connection/CHANGELOG.md new file mode 100644 index 000000000000..b6d58237028a --- /dev/null +++ b/packages/google-cloud-bigquery-connection/CHANGELOG.md @@ -0,0 +1,340 @@ +# Changelog + +## [1.13.1](https://github.com/googleapis/python-bigquery-connection/compare/v1.13.0...v1.13.1) (2023-08-02) + + +### Documentation + +* Minor formatting ([#339](https://github.com/googleapis/python-bigquery-connection/issues/339)) ([527a046](https://github.com/googleapis/python-bigquery-connection/commit/527a04645650d442a5aab62749a9af6b7281907e)) + +## [1.13.0](https://github.com/googleapis/python-bigquery-connection/compare/v1.12.1...v1.13.0) (2023-07-10) + + +### Features + +* Add cloud spanner connection properties - max_parallelism ([4d0c702](https://github.com/googleapis/python-bigquery-connection/commit/4d0c70242d2afe7be338d259cf5360f3b3de380f)) +* Add cloud spanner connection properties - use_data_boost ([4d0c702](https://github.com/googleapis/python-bigquery-connection/commit/4d0c70242d2afe7be338d259cf5360f3b3de380f)) +* Add support for Salesforce connections, which are usable only by allowlisted partners ([4d0c702](https://github.com/googleapis/python-bigquery-connection/commit/4d0c70242d2afe7be338d259cf5360f3b3de380f)) + +## [1.12.1](https://github.com/googleapis/python-bigquery-connection/compare/v1.12.0...v1.12.1) (2023-07-04) + + +### Bug Fixes + +* Add async context manager return types ([#320](https://github.com/googleapis/python-bigquery-connection/issues/320)) ([aa9088a](https://github.com/googleapis/python-bigquery-connection/commit/aa9088ab70a2930849e4fd96685e52d741a3a385)) + +## [1.12.0](https://github.com/googleapis/python-bigquery-connection/compare/v1.11.0...v1.12.0) (2023-03-23) + + +### Features + +* Add spark connection properties type ([#304](https://github.com/googleapis/python-bigquery-connection/issues/304)) ([9f7de41](https://github.com/googleapis/python-bigquery-connection/commit/9f7de41e4379666a788ab04820afb716275b3d31)) + + +### Documentation + +* Fix formatting of request arg in docstring ([#307](https://github.com/googleapis/python-bigquery-connection/issues/307)) ([18839c6](https://github.com/googleapis/python-bigquery-connection/commit/18839c643f176d960f61ba256cf62ca2262a0fde)) + +## [1.11.0](https://github.com/googleapis/python-bigquery-connection/compare/v1.10.0...v1.11.0) (2023-03-01) + + +### Features + +* Add cloud spanner connection properties - database role ([181685a](https://github.com/googleapis/python-bigquery-connection/commit/181685a1a82d4d932b4108580328f5ee09718513)) +* Add cloud spanner connection properties - serverless analytics ([181685a](https://github.com/googleapis/python-bigquery-connection/commit/181685a1a82d4d932b4108580328f5ee09718513)) + +## [1.10.0](https://github.com/googleapis/python-bigquery-connection/compare/v1.9.1...v1.10.0) (2023-02-27) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#294](https://github.com/googleapis/python-bigquery-connection/issues/294)) ([e4cbcab](https://github.com/googleapis/python-bigquery-connection/commit/e4cbcabfa70da4e2ab4cc7b11a56831263400940)) + +## [1.9.1](https://github.com/googleapis/python-bigquery-connection/compare/v1.9.0...v1.9.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([a62dada](https://github.com/googleapis/python-bigquery-connection/commit/a62dada3529fe2ef72496e3e35f56c7184cfdef0)) + + +### Documentation + +* Add documentation for enums ([a62dada](https://github.com/googleapis/python-bigquery-connection/commit/a62dada3529fe2ef72496e3e35f56c7184cfdef0)) + +## [1.9.0](https://github.com/googleapis/python-bigquery-connection/compare/v1.8.0...v1.9.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#280](https://github.com/googleapis/python-bigquery-connection/issues/280)) ([ac90427](https://github.com/googleapis/python-bigquery-connection/commit/ac904279a668049eb8f01e69d52d7dd12918c545)) + +## [1.8.0](https://github.com/googleapis/python-bigquery-connection/compare/v1.7.3...v1.8.0) (2022-12-15) + + +### Features + +* Add support for `google.cloud.bigquery_connection.__version__` ([5ae476d](https://github.com/googleapis/python-bigquery-connection/commit/5ae476d45a64fed355836969579ebe94653bf6fa)) +* Add typing to proto.Message based class attributes ([5ae476d](https://github.com/googleapis/python-bigquery-connection/commit/5ae476d45a64fed355836969579ebe94653bf6fa)) + + +### Bug Fixes + +* Add dict typing for client_options ([5ae476d](https://github.com/googleapis/python-bigquery-connection/commit/5ae476d45a64fed355836969579ebe94653bf6fa)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([cbcb79c](https://github.com/googleapis/python-bigquery-connection/commit/cbcb79c785cc1475b71a6f3f1dd11531deef429b)) +* Drop usage of pkg_resources ([cbcb79c](https://github.com/googleapis/python-bigquery-connection/commit/cbcb79c785cc1475b71a6f3f1dd11531deef429b)) +* Fix timeout default values ([cbcb79c](https://github.com/googleapis/python-bigquery-connection/commit/cbcb79c785cc1475b71a6f3f1dd11531deef429b)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([5ae476d](https://github.com/googleapis/python-bigquery-connection/commit/5ae476d45a64fed355836969579ebe94653bf6fa)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([cbcb79c](https://github.com/googleapis/python-bigquery-connection/commit/cbcb79c785cc1475b71a6f3f1dd11531deef429b)) + +## [1.7.3](https://github.com/googleapis/python-bigquery-connection/compare/v1.7.2...v1.7.3) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#265](https://github.com/googleapis/python-bigquery-connection/issues/265)) ([174901e](https://github.com/googleapis/python-bigquery-connection/commit/174901ea69f0c442839f5b40cfb7521748c5b1e7)) + +## [1.7.2](https://github.com/googleapis/python-bigquery-connection/compare/v1.7.1...v1.7.2) (2022-09-29) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#262](https://github.com/googleapis/python-bigquery-connection/issues/262)) ([5db326d](https://github.com/googleapis/python-bigquery-connection/commit/5db326dd3f949f9e8980bd046885a4ae6a49d856)) + +## [1.7.1](https://github.com/googleapis/python-bigquery-connection/compare/v1.7.0...v1.7.1) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#243](https://github.com/googleapis/python-bigquery-connection/issues/243)) ([5d2e50c](https://github.com/googleapis/python-bigquery-connection/commit/5d2e50c72536f1e8679cea1f02831d93d52e3d9f)) +* **deps:** require proto-plus >= 1.22.0 ([5d2e50c](https://github.com/googleapis/python-bigquery-connection/commit/5d2e50c72536f1e8679cea1f02831d93d52e3d9f)) + +## [1.7.0](https://github.com/googleapis/python-bigquery-connection/compare/v1.6.0...v1.7.0) (2022-08-02) + + +### Features + +* Add service_account_id output field to CloudSQL properties ([#237](https://github.com/googleapis/python-bigquery-connection/issues/237)) ([adc73a6](https://github.com/googleapis/python-bigquery-connection/commit/adc73a6d8ce3f35de56c46a140e940bc63dcd23b)) + + +### Documentation + +* deprecate the AwsCrossAccountRole property ([#240](https://github.com/googleapis/python-bigquery-connection/issues/240)) ([ad17197](https://github.com/googleapis/python-bigquery-connection/commit/ad17197e49d34ef933876d2c1926d2ee4ee206f8)) + +## [1.6.0](https://github.com/googleapis/python-bigquery-connection/compare/v1.5.1...v1.6.0) (2022-07-16) + + +### Features + +* add audience parameter ([0dd5a10](https://github.com/googleapis/python-bigquery-connection/commit/0dd5a10e39a8e52ac4f82d0f602b6a8fac76d607)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#229](https://github.com/googleapis/python-bigquery-connection/issues/229)) ([0dd5a10](https://github.com/googleapis/python-bigquery-connection/commit/0dd5a10e39a8e52ac4f82d0f602b6a8fac76d607)) +* require python 3.7+ ([#231](https://github.com/googleapis/python-bigquery-connection/issues/231)) ([740194d](https://github.com/googleapis/python-bigquery-connection/commit/740194d60af8f598e7cdc942e0ff0c0ed7ca9b1b)) + +## [1.5.1](https://github.com/googleapis/python-bigquery-connection/compare/v1.5.0...v1.5.1) (2022-06-07) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#218](https://github.com/googleapis/python-bigquery-connection/issues/218)) ([f842925](https://github.com/googleapis/python-bigquery-connection/commit/f842925ac025647b2959f60443b1b22326f8f9bc)) + + +### Documentation + +* fix changelog header to consistent size ([#219](https://github.com/googleapis/python-bigquery-connection/issues/219)) ([33c376f](https://github.com/googleapis/python-bigquery-connection/commit/33c376f7f1df8825dfdf1697512e42754b988075)) + +## [1.5.0](https://github.com/googleapis/python-bigquery-connection/compare/v1.4.0...v1.5.0) (2022-05-05) + + +### Features + +* AuditConfig for IAM v1 ([#194](https://github.com/googleapis/python-bigquery-connection/issues/194)) ([d350b94](https://github.com/googleapis/python-bigquery-connection/commit/d350b947b3cfbb1aede8638c518eac2e8ba5495d)) + + +### Bug Fixes + +* **deps:** require grpc-google-iam-v1 >=0.12.4 ([d350b94](https://github.com/googleapis/python-bigquery-connection/commit/d350b947b3cfbb1aede8638c518eac2e8ba5495d)) +* region tags in create_mysql_connection.py ([#205](https://github.com/googleapis/python-bigquery-connection/issues/205)) ([f082fd2](https://github.com/googleapis/python-bigquery-connection/commit/f082fd246495cd6f874e6ac85655d27d594ab786)) + + +### Documentation + +* **samples:** create connection sample for MySQL instance ([#147](https://github.com/googleapis/python-bigquery-connection/issues/147)) ([8e664be](https://github.com/googleapis/python-bigquery-connection/commit/8e664bea488183d1132a61cb1ab7a912dde48b43)) + +## [1.4.0](https://github.com/googleapis/python-bigquery-connection/compare/v1.3.4...v1.4.0) (2022-03-08) + + +### Features + +* Add Cloud_Resource Connection Support ([#181](https://github.com/googleapis/python-bigquery-connection/issues/181)) ([1be012a](https://github.com/googleapis/python-bigquery-connection/commit/1be012a7d7f585365cfd6c1e499188784838965a)) + +## [1.3.4](https://github.com/googleapis/python-bigquery-connection/compare/v1.3.3...v1.3.4) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#176](https://github.com/googleapis/python-bigquery-connection/issues/176)) ([a6cfa8f](https://github.com/googleapis/python-bigquery-connection/commit/a6cfa8f0c27ffa507305618d16a7ae5fb6fb15f9)) +* **deps:** require proto-plus>=1.15.0 ([a6cfa8f](https://github.com/googleapis/python-bigquery-connection/commit/a6cfa8f0c27ffa507305618d16a7ae5fb6fb15f9)) + +## [1.3.3](https://github.com/googleapis/python-bigquery-connection/compare/v1.3.2...v1.3.3) (2022-02-26) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([d1e6767](https://github.com/googleapis/python-bigquery-connection/commit/d1e676705826962072919c51d881f22d540377b5)) + + +### Documentation + +* add generated snippets ([#165](https://github.com/googleapis/python-bigquery-connection/issues/165)) ([53edc14](https://github.com/googleapis/python-bigquery-connection/commit/53edc14b8f976985549856ac0823565b88a1a4ee)) + +## [1.3.2](https://www.github.com/googleapis/python-bigquery-connection/compare/v1.3.1...v1.3.2) (2022-01-08) + + +### Documentation + +* add python quickstart sample ([#141](https://www.github.com/googleapis/python-bigquery-connection/issues/141)) ([8b85fb6](https://www.github.com/googleapis/python-bigquery-connection/commit/8b85fb6784ba9bf51123e9185f276391326dd54a)) + +## [1.3.1](https://www.github.com/googleapis/python-bigquery-connection/compare/v1.3.0...v1.3.1) (2021-11-02) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([826da22](https://www.github.com/googleapis/python-bigquery-connection/commit/826da22f591ab1c16eadf3a53cc8476e04577f40)) +* **deps:** require google-api-core >= 1.28.0 ([826da22](https://www.github.com/googleapis/python-bigquery-connection/commit/826da22f591ab1c16eadf3a53cc8476e04577f40)) + + +### Documentation + +* list oneofs in docstring ([826da22](https://www.github.com/googleapis/python-bigquery-connection/commit/826da22f591ab1c16eadf3a53cc8476e04577f40)) + +## [1.3.0](https://www.github.com/googleapis/python-bigquery-connection/compare/v1.2.3...v1.3.0) (2021-10-08) + + +### Features + +* add context manager support in client ([#125](https://www.github.com/googleapis/python-bigquery-connection/issues/125)) ([bf9cc26](https://www.github.com/googleapis/python-bigquery-connection/commit/bf9cc268363a2a6e115b6af65ab6b50c05bbde28)) + +## [1.2.3](https://www.github.com/googleapis/python-bigquery-connection/compare/v1.2.2...v1.2.3) (2021-10-05) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([87b77ee](https://www.github.com/googleapis/python-bigquery-connection/commit/87b77ee76e3abcce5428230a6884c66843353440)) +* improper types in pagers generation ([47d1b68](https://www.github.com/googleapis/python-bigquery-connection/commit/47d1b68b74dda036b64979e4c7aab589046822ba)) + +## [1.2.2](https://www.github.com/googleapis/python-bigquery-connection/compare/v1.2.1...v1.2.2) (2021-07-27) + + +### Bug Fixes + +* enable self signed jwt for grpc ([#102](https://www.github.com/googleapis/python-bigquery-connection/issues/102)) ([d3d00a5](https://www.github.com/googleapis/python-bigquery-connection/commit/d3d00a5ba2e4521217b09a53c279dc2134d20e48)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#98](https://www.github.com/googleapis/python-bigquery-connection/issues/98)) ([842e239](https://www.github.com/googleapis/python-bigquery-connection/commit/842e239cbde9f041a5d2d9a8785c94682bc9140b)) + + +### Miscellaneous Chores + +* release as 1.2.2 ([#103](https://www.github.com/googleapis/python-bigquery-connection/issues/103)) ([2d6b168](https://www.github.com/googleapis/python-bigquery-connection/commit/2d6b168a7fce539383e72c9ea00d93fffe233607)) + +## [1.2.1](https://www.github.com/googleapis/python-bigquery-connection/compare/v1.2.0...v1.2.1) (2021-07-20) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#97](https://www.github.com/googleapis/python-bigquery-connection/issues/97)) ([11e1214](https://www.github.com/googleapis/python-bigquery-connection/commit/11e12147753b04f68811ec9144d59c0fc8b15530)) + +## [1.2.0](https://www.github.com/googleapis/python-bigquery-connection/compare/v1.1.1...v1.2.0) (2021-07-13) + + +### Features + +* add cloud spanner connection support ([#93](https://www.github.com/googleapis/python-bigquery-connection/issues/93)) ([3ae2369](https://www.github.com/googleapis/python-bigquery-connection/commit/3ae236928f0ac923367d5379daa59f366299397b)) + +## [1.1.1](https://www.github.com/googleapis/python-bigquery-connection/compare/v1.1.0...v1.1.1) (2021-06-30) + + +### Bug Fixes + +* disable always_use_jwt_access ([37f28c5](https://www.github.com/googleapis/python-bigquery-connection/commit/37f28c5112d9b8f180a8cf754d474ac74f5f92d9)) +* disable always_use_jwt_access ([#91](https://www.github.com/googleapis/python-bigquery-connection/issues/91)) ([37f28c5](https://www.github.com/googleapis/python-bigquery-connection/commit/37f28c5112d9b8f180a8cf754d474ac74f5f92d9)) + +## [1.1.0](https://www.github.com/googleapis/python-bigquery-connection/compare/v1.0.1...v1.1.0) (2021-06-23) + + +### Features + +* add always_use_jwt_access ([#88](https://www.github.com/googleapis/python-bigquery-connection/issues/88)) ([821fffc](https://www.github.com/googleapis/python-bigquery-connection/commit/821fffcc3f9ecdb222e4a5a2c94ad9c5d3325681)) +* support self-signed JWT flow for service accounts ([2f1db84](https://www.github.com/googleapis/python-bigquery-connection/commit/2f1db842b16cf2c3981c61b503482fa7df85bdfe)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([2f1db84](https://www.github.com/googleapis/python-bigquery-connection/commit/2f1db842b16cf2c3981c61b503482fa7df85bdfe)) +* **deps:** add packaging requirement ([#77](https://www.github.com/googleapis/python-bigquery-connection/issues/77)) ([2ab8403](https://www.github.com/googleapis/python-bigquery-connection/commit/2ab84031d3f46b5ccd1acaefe5b744679b43e140)) +* exclude docs and tests from package ([#83](https://www.github.com/googleapis/python-bigquery-connection/issues/83)) ([3ef23e5](https://www.github.com/googleapis/python-bigquery-connection/commit/3ef23e5b9e8f4a0bcef24dbe79773ca92a336ef0)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-bigquery-connection/issues/1127)) ([#85](https://www.github.com/googleapis/python-bigquery-connection/issues/85)) ([715e04b](https://www.github.com/googleapis/python-bigquery-connection/commit/715e04b77dc352b17e508288a7268c6c2ce46e10)), closes [#1126](https://www.github.com/googleapis/python-bigquery-connection/issues/1126) +* Update the README to reflect that this library is GA ([#79](https://www.github.com/googleapis/python-bigquery-connection/issues/79)) ([f737861](https://www.github.com/googleapis/python-bigquery-connection/commit/f7378614002697ed5c7dc9217fbe8b48ba7c7410)) + +## [1.0.1](https://www.github.com/googleapis/python-bigquery-connection/compare/v1.0.0...v1.0.1) (2021-02-03) + + +### Bug Fixes + +* remove gRPC send/recv limits ([#37](https://www.github.com/googleapis/python-bigquery-connection/issues/37)) ([c8d639a](https://www.github.com/googleapis/python-bigquery-connection/commit/c8d639a23d1800c37c2db1cf9f0fc41b51ac07b8)) + + +### Documentation + +* **python:** update intersphinx for grpc and auth ([#32](https://www.github.com/googleapis/python-bigquery-connection/issues/32)) ([f3ce3aa](https://www.github.com/googleapis/python-bigquery-connection/commit/f3ce3aa826173bf61b3b79803d0231c27f89e6fa)) + +## [1.0.0](https://www.github.com/googleapis/python-bigquery-connection/compare/v0.3.0...v1.0.0) (2020-10-29) + + +### ⚠ BREAKING CHANGES + +* update package names to avoid conflict with google-cloud-bigquery (#27) + +### Bug Fixes + +* update package names to avoid conflict with google-cloud-bigquery ([#27](https://www.github.com/googleapis/python-bigquery-connection/issues/27)) ([741121c](https://www.github.com/googleapis/python-bigquery-connection/commit/741121c44786ac78e5750aa5383b6da918c3230c)) + +## [0.3.0](https://www.github.com/googleapis/python-bigquery-connection/compare/v0.2.0...v0.3.0) (2020-10-28) + + +### Features + +* add AWS connection type ([#19](https://www.github.com/googleapis/python-bigquery-connection/issues/19)) ([3d1a41a](https://www.github.com/googleapis/python-bigquery-connection/commit/3d1a41ad208274448604a0a17d072f6fcb36535a)) + +## [0.2.0](https://www.github.com/googleapis/python-bigquery-connection/compare/v0.1.0...v0.2.0) (2020-08-10) + + +### Features + +* add async client ([#12](https://www.github.com/googleapis/python-bigquery-connection/issues/12)) ([58eb861](https://www.github.com/googleapis/python-bigquery-connection/commit/58eb8615e1858b50a9727db7a56cec3610959d4f)) + + +### Documentation + +* **readme:** adds link to BQ external data sources docs ([#5](https://www.github.com/googleapis/python-bigquery-connection/issues/5)) ([4a740d0](https://www.github.com/googleapis/python-bigquery-connection/commit/4a740d0beba471bd5646a0c69045f69c9b158639)) + +## 0.1.0 (2020-05-19) + + +### Features + +* generate v1 ([73b89dc](https://www.github.com/googleapis/python-bigquery-connection/commit/73b89dcb423026c4b4e537ff728d22be2cb5ff3f)) diff --git a/packages/google-cloud-bigquery-connection/CODE_OF_CONDUCT.md b/packages/google-cloud-bigquery-connection/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-bigquery-connection/CONTRIBUTING.rst b/packages/google-cloud-bigquery-connection/CONTRIBUTING.rst new file mode 100644 index 000000000000..1f4cf1ac4585 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.11 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-bigquery-connection + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-bigquery-connection/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-bigquery-connection/LICENSE b/packages/google-cloud-bigquery-connection/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-bigquery-connection/MANIFEST.in b/packages/google-cloud-bigquery-connection/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-bigquery-connection/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-bigquery-connection/README.rst b/packages/google-cloud-bigquery-connection/README.rst new file mode 100644 index 000000000000..d12780e9786e --- /dev/null +++ b/packages/google-cloud-bigquery-connection/README.rst @@ -0,0 +1,108 @@ +Python Client for BigQuery Connection +===================================== + +|stable| |pypi| |versions| + +`BigQuery Connection`_: Manage BigQuery connections to external data sources. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-connection.svg + :target: https://pypi.org/project/google-cloud-bigquery-connection/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery-connection.svg + :target: https://pypi.org/project/google-cloud-bigquery-connection/ +.. _BigQuery Connection: https://cloud.google.com/bigquery/docs/reference/bigqueryconnection +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/bigqueryconnection/latest +.. _Product Documentation: https://cloud.google.com/bigquery/docs/reference/bigqueryconnection + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the BigQuery Connection.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the BigQuery Connection.: https://cloud.google.com/bigquery/docs/reference/bigqueryconnection +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-bigquery-connection + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-bigquery-connection + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for BigQuery Connection + to see other available methods on the client. +- Read the `BigQuery Connection Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _BigQuery Connection Product documentation: https://cloud.google.com/bigquery/docs/reference/bigqueryconnection +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-bigquery-connection/SECURITY.md b/packages/google-cloud-bigquery-connection/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-bigquery-connection/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-bigquery-connection/docs/CHANGELOG.md b/packages/google-cloud-bigquery-connection/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-bigquery-connection/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-bigquery-connection/docs/README.rst b/packages/google-cloud-bigquery-connection/docs/README.rst new file mode 100644 index 000000000000..8e5a07469704 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/docs/README.rst @@ -0,0 +1,84 @@ +Python Client for BigQuery Connection +================================================= + +|beta| |pypi| |versions| + +`BigQuery Connection API`_: Manage BigQuery connections to external data sources. + +- `Client Library Documentation`_ +- `Product Documentation`_ +- `Introduction to BigQuery external data sources`_ + +.. |beta| image:: https://img.shields.io/badge/support-beta-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#beta-support +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-connection.svg + :target: https://pypi.org/project/google-cloud-bigquery-connection/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery-connection.svg + :target: https://pypi.org/project/google-cloud-bigquery-connection/ +.. _BigQuery Connection API: https://cloud.google.com/bigquery/docs/reference/bigqueryconnection/rest +.. _Client Library Documentation: https://googleapis.dev/python/bigqueryconnection/latest +.. _Product Documentation: https://cloud.google.com/bigquery/docs/reference/bigqueryconnection/rest +.. _Introduction to BigQuery external data sources: https://cloud.google.com/bigquery/external-data-sources + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the BigQuery Connection API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the BigQuery Connection API.: https://console.cloud.google.com/apis/library/bigqueryconnection.googleapis.com +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-bigquery-connection + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-bigquery-connection + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for BigQuery Connection + API to see other available methods on the client. +- Read the `BigQuery Connection API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _BigQuery Connection API Product documentation: https://cloud.google.com/bigquery/docs/reference/bigqueryconnection/rest +.. _repository’s main README: https://github.com/googleapis/google-cloud-python/blob/master/README.rst \ No newline at end of file diff --git a/packages/google-cloud-bigquery-connection/docs/_static/custom.css b/packages/google-cloud-bigquery-connection/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-bigquery-connection/docs/_templates/layout.html b/packages/google-cloud-bigquery-connection/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-bigquery-connection/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-bigquery-connection/docs/bigquery_connection_v1/connection_service.rst b/packages/google-cloud-bigquery-connection/docs/bigquery_connection_v1/connection_service.rst new file mode 100644 index 000000000000..905920529de7 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/docs/bigquery_connection_v1/connection_service.rst @@ -0,0 +1,10 @@ +ConnectionService +----------------------------------- + +.. automodule:: google.cloud.bigquery_connection_v1.services.connection_service + :members: + :inherited-members: + +.. automodule:: google.cloud.bigquery_connection_v1.services.connection_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-bigquery-connection/docs/bigquery_connection_v1/services.rst b/packages/google-cloud-bigquery-connection/docs/bigquery_connection_v1/services.rst new file mode 100644 index 000000000000..317815a9deef --- /dev/null +++ b/packages/google-cloud-bigquery-connection/docs/bigquery_connection_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Bigquery Connection v1 API +==================================================== +.. toctree:: + :maxdepth: 2 + + connection_service diff --git a/packages/google-cloud-bigquery-connection/docs/bigquery_connection_v1/types.rst b/packages/google-cloud-bigquery-connection/docs/bigquery_connection_v1/types.rst new file mode 100644 index 000000000000..9c90aa6e8782 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/docs/bigquery_connection_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Bigquery Connection v1 API +================================================= + +.. automodule:: google.cloud.bigquery_connection_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-bigquery-connection/docs/conf.py b/packages/google-cloud-bigquery-connection/docs/conf.py new file mode 100644 index 000000000000..b9909902f665 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-bigquery-connection documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-bigquery-connection" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-bigquery-connection", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-bigquery-connection-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-bigquery-connection.tex", + "google-cloud-bigquery-connection Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-bigquery-connection", + "google-cloud-bigquery-connection Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-bigquery-connection", + "google-cloud-bigquery-connection Documentation", + author, + "google-cloud-bigquery-connection", + "google-cloud-bigquery-connection Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-bigquery-connection/docs/index.rst b/packages/google-cloud-bigquery-connection/docs/index.rst new file mode 100644 index 000000000000..ccfb7940ffb8 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + bigquery_connection_v1/services + bigquery_connection_v1/types + + +Changelog +--------- + +For a list of all ``google-cloud-bigquery-connection`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-bigquery-connection/docs/multiprocessing.rst b/packages/google-cloud-bigquery-connection/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/__init__.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/__init__.py new file mode 100644 index 000000000000..e1479d5a7e23 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/__init__.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.bigquery_connection import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.bigquery_connection_v1.services.connection_service.async_client import ( + ConnectionServiceAsyncClient, +) +from google.cloud.bigquery_connection_v1.services.connection_service.client import ( + ConnectionServiceClient, +) +from google.cloud.bigquery_connection_v1.types.connection import ( + AwsAccessRole, + AwsCrossAccountRole, + AwsProperties, + AzureProperties, + CloudResourceProperties, + CloudSpannerProperties, + CloudSqlCredential, + CloudSqlProperties, + Connection, + CreateConnectionRequest, + DeleteConnectionRequest, + GetConnectionRequest, + ListConnectionsRequest, + ListConnectionsResponse, + MetastoreServiceConfig, + SalesforceDataCloudProperties, + SparkHistoryServerConfig, + SparkProperties, + UpdateConnectionRequest, +) + +__all__ = ( + "ConnectionServiceClient", + "ConnectionServiceAsyncClient", + "AwsAccessRole", + "AwsCrossAccountRole", + "AwsProperties", + "AzureProperties", + "CloudResourceProperties", + "CloudSpannerProperties", + "CloudSqlCredential", + "CloudSqlProperties", + "Connection", + "CreateConnectionRequest", + "DeleteConnectionRequest", + "GetConnectionRequest", + "ListConnectionsRequest", + "ListConnectionsResponse", + "MetastoreServiceConfig", + "SalesforceDataCloudProperties", + "SparkHistoryServerConfig", + "SparkProperties", + "UpdateConnectionRequest", +) diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py new file mode 100644 index 000000000000..b01a85df0fd5 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.13.1" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/py.typed b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/py.typed new file mode 100644 index 000000000000..cf21dff2cc5c --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-connection package uses inline types. diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/__init__.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/__init__.py new file mode 100644 index 000000000000..dc69d9140bec --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/__init__.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.bigquery_connection_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.connection_service import ( + ConnectionServiceAsyncClient, + ConnectionServiceClient, +) +from .types.connection import ( + AwsAccessRole, + AwsCrossAccountRole, + AwsProperties, + AzureProperties, + CloudResourceProperties, + CloudSpannerProperties, + CloudSqlCredential, + CloudSqlProperties, + Connection, + CreateConnectionRequest, + DeleteConnectionRequest, + GetConnectionRequest, + ListConnectionsRequest, + ListConnectionsResponse, + MetastoreServiceConfig, + SalesforceDataCloudProperties, + SparkHistoryServerConfig, + SparkProperties, + UpdateConnectionRequest, +) + +__all__ = ( + "ConnectionServiceAsyncClient", + "AwsAccessRole", + "AwsCrossAccountRole", + "AwsProperties", + "AzureProperties", + "CloudResourceProperties", + "CloudSpannerProperties", + "CloudSqlCredential", + "CloudSqlProperties", + "Connection", + "ConnectionServiceClient", + "CreateConnectionRequest", + "DeleteConnectionRequest", + "GetConnectionRequest", + "ListConnectionsRequest", + "ListConnectionsResponse", + "MetastoreServiceConfig", + "SalesforceDataCloudProperties", + "SparkHistoryServerConfig", + "SparkProperties", + "UpdateConnectionRequest", +) diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_metadata.json b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_metadata.json new file mode 100644 index 000000000000..df674495dc33 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_metadata.json @@ -0,0 +1,148 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.bigquery_connection_v1", + "protoPackage": "google.cloud.bigquery.connection.v1", + "schema": "1.0", + "services": { + "ConnectionService": { + "clients": { + "grpc": { + "libraryClient": "ConnectionServiceClient", + "rpcs": { + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ConnectionServiceAsyncClient", + "rpcs": { + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + } + } + }, + "rest": { + "libraryClient": "ConnectionServiceClient", + "rpcs": { + "CreateConnection": { + "methods": [ + "create_connection" + ] + }, + "DeleteConnection": { + "methods": [ + "delete_connection" + ] + }, + "GetConnection": { + "methods": [ + "get_connection" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "ListConnections": { + "methods": [ + "list_connections" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateConnection": { + "methods": [ + "update_connection" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py new file mode 100644 index 000000000000..b01a85df0fd5 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.13.1" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/py.typed b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/py.typed new file mode 100644 index 000000000000..cf21dff2cc5c --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-connection package uses inline types. diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/__init__.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/__init__.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/__init__.py new file mode 100644 index 000000000000..1729977c36f4 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ConnectionServiceAsyncClient +from .client import ConnectionServiceClient + +__all__ = ( + "ConnectionServiceClient", + "ConnectionServiceAsyncClient", +) diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py new file mode 100644 index 000000000000..30bac8fd3912 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py @@ -0,0 +1,1220 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery_connection_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.bigquery_connection_v1.services.connection_service import pagers +from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection +from google.cloud.bigquery_connection_v1.types import connection + +from .client import ConnectionServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ConnectionServiceTransport +from .transports.grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport + + +class ConnectionServiceAsyncClient: + """Manages external data source connections and credentials.""" + + _client: ConnectionServiceClient + + DEFAULT_ENDPOINT = ConnectionServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ConnectionServiceClient.DEFAULT_MTLS_ENDPOINT + + cluster_path = staticmethod(ConnectionServiceClient.cluster_path) + parse_cluster_path = staticmethod(ConnectionServiceClient.parse_cluster_path) + connection_path = staticmethod(ConnectionServiceClient.connection_path) + parse_connection_path = staticmethod(ConnectionServiceClient.parse_connection_path) + service_path = staticmethod(ConnectionServiceClient.service_path) + parse_service_path = staticmethod(ConnectionServiceClient.parse_service_path) + common_billing_account_path = staticmethod( + ConnectionServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ConnectionServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ConnectionServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ConnectionServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ConnectionServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ConnectionServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ConnectionServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ConnectionServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ConnectionServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ConnectionServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConnectionServiceAsyncClient: The constructed client. + """ + return ConnectionServiceClient.from_service_account_info.__func__(ConnectionServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConnectionServiceAsyncClient: The constructed client. + """ + return ConnectionServiceClient.from_service_account_file.__func__(ConnectionServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ConnectionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ConnectionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ConnectionServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ConnectionServiceClient).get_transport_class, type(ConnectionServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ConnectionServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the connection service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ConnectionServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ConnectionServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_connection( + self, + request: Optional[Union[gcbc_connection.CreateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + connection: Optional[gcbc_connection.Connection] = None, + connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcbc_connection.Connection: + r"""Creates a new connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + + async def sample_create_connection(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_connection_v1.CreateConnectionRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_connection_v1.types.CreateConnectionRequest, dict]]): + The request object. The request for + [ConnectionService.CreateConnection][google.cloud.bigquery.connection.v1.ConnectionService.CreateConnection]. + parent (:class:`str`): + Required. Parent resource name. Must be in the format + ``projects/{project_id}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection (:class:`google.cloud.bigquery_connection_v1.types.Connection`): + Required. Connection to create. + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_id (:class:`str`): + Optional. Connection id that should + be assigned to the created connection. + + This corresponds to the ``connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_connection_v1.types.Connection: + Configuration parameters to establish + connection with an external data source, + except the credential attributes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, connection, connection_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcbc_connection.CreateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if connection is not None: + request.connection = connection + if connection_id is not None: + request.connection_id = connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_connection( + self, + request: Optional[Union[connection.GetConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> connection.Connection: + r"""Returns specified connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + + async def sample_get_connection(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_connection_v1.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_connection_v1.types.GetConnectionRequest, dict]]): + The request object. The request for + [ConnectionService.GetConnection][google.cloud.bigquery.connection.v1.ConnectionService.GetConnection]. + name (:class:`str`): + Required. Name of the requested connection, for example: + ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_connection_v1.types.Connection: + Configuration parameters to establish + connection with an external data source, + except the credential attributes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = connection.GetConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_connection, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_connections( + self, + request: Optional[Union[connection.ListConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConnectionsAsyncPager: + r"""Returns a list of connections in the given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + + async def sample_list_connections(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_connection_v1.ListConnectionsRequest( + parent="parent_value", + page_size=951, + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_connection_v1.types.ListConnectionsRequest, dict]]): + The request object. The request for + [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. + parent (:class:`str`): + Required. Parent resource name. Must be in the form: + ``projects/{project_id}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_connection_v1.services.connection_service.pagers.ListConnectionsAsyncPager: + The response for + [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = connection.ListConnectionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_connections, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListConnectionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_connection( + self, + request: Optional[Union[gcbc_connection.UpdateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + connection: Optional[gcbc_connection.Connection] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcbc_connection.Connection: + r"""Updates the specified connection. For security + reasons, also resets credential if connection properties + are in the update field mask. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + + async def sample_update_connection(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_connection_v1.UpdateConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.update_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_connection_v1.types.UpdateConnectionRequest, dict]]): + The request object. The request for + [ConnectionService.UpdateConnection][google.cloud.bigquery.connection.v1.ConnectionService.UpdateConnection]. + name (:class:`str`): + Required. Name of the connection to update, for example: + ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection (:class:`google.cloud.bigquery_connection_v1.types.Connection`): + Required. Connection containing the + updated fields. + + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Update mask for the + connection fields to be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_connection_v1.types.Connection: + Configuration parameters to establish + connection with an external data source, + except the credential attributes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, connection, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcbc_connection.UpdateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if connection is not None: + request.connection = connection + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_connection( + self, + request: Optional[Union[connection.DeleteConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes connection and associated credential. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + + async def sample_delete_connection(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_connection_v1.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + await client.delete_connection(request=request) + + Args: + request (Optional[Union[google.cloud.bigquery_connection_v1.types.DeleteConnectionRequest, dict]]): + The request object. The request for + [ConnectionService.DeleteConnectionRequest][]. + name (:class:`str`): + Required. Name of the deleted connection, for example: + ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = connection.DeleteConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_connection, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does + not have a policy set. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest( + resource=resource, + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on the specified resource. + Replaces any existing policy. + + Can return ``NOT_FOUND``, ``INVALID_ARGUMENT``, and + ``PERMISSION_DENIED`` errors. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a ``NOT_FOUND`` error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`MutableSequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, + permissions=permissions, + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ConnectionServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ConnectionServiceAsyncClient",) diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py new file mode 100644 index 000000000000..f63a6d3757b4 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py @@ -0,0 +1,1460 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery_connection_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.bigquery_connection_v1.services.connection_service import pagers +from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection +from google.cloud.bigquery_connection_v1.types import connection + +from .transports.base import DEFAULT_CLIENT_INFO, ConnectionServiceTransport +from .transports.grpc import ConnectionServiceGrpcTransport +from .transports.grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport +from .transports.rest import ConnectionServiceRestTransport + + +class ConnectionServiceClientMeta(type): + """Metaclass for the ConnectionService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ConnectionServiceTransport]] + _transport_registry["grpc"] = ConnectionServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ConnectionServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ConnectionServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ConnectionServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ConnectionServiceClient(metaclass=ConnectionServiceClientMeta): + """Manages external data source connections and credentials.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "bigqueryconnection.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConnectionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConnectionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ConnectionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ConnectionServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def cluster_path( + project: str, + region: str, + cluster: str, + ) -> str: + """Returns a fully-qualified cluster string.""" + return "projects/{project}/regions/{region}/clusters/{cluster}".format( + project=project, + region=region, + cluster=cluster, + ) + + @staticmethod + def parse_cluster_path(path: str) -> Dict[str, str]: + """Parses a cluster path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/clusters/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def connection_path( + project: str, + location: str, + connection: str, + ) -> str: + """Returns a fully-qualified connection string.""" + return ( + "projects/{project}/locations/{location}/connections/{connection}".format( + project=project, + location=location, + connection=connection, + ) + ) + + @staticmethod + def parse_connection_path(path: str) -> Dict[str, str]: + """Parses a connection path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def service_path( + project: str, + location: str, + service: str, + ) -> str: + """Returns a fully-qualified service string.""" + return "projects/{project}/locations/{location}/services/{service}".format( + project=project, + location=location, + service=service, + ) + + @staticmethod + def parse_service_path(path: str) -> Dict[str, str]: + """Parses a service path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ConnectionServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the connection service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ConnectionServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ConnectionServiceTransport): + # transport is a ConnectionServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_connection( + self, + request: Optional[Union[gcbc_connection.CreateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + connection: Optional[gcbc_connection.Connection] = None, + connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcbc_connection.Connection: + r"""Creates a new connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + + def sample_create_connection(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceClient() + + # Initialize request argument(s) + request = bigquery_connection_v1.CreateConnectionRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_connection_v1.types.CreateConnectionRequest, dict]): + The request object. The request for + [ConnectionService.CreateConnection][google.cloud.bigquery.connection.v1.ConnectionService.CreateConnection]. + parent (str): + Required. Parent resource name. Must be in the format + ``projects/{project_id}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection (google.cloud.bigquery_connection_v1.types.Connection): + Required. Connection to create. + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection_id (str): + Optional. Connection id that should + be assigned to the created connection. + + This corresponds to the ``connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_connection_v1.types.Connection: + Configuration parameters to establish + connection with an external data source, + except the credential attributes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, connection, connection_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcbc_connection.CreateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcbc_connection.CreateConnectionRequest): + request = gcbc_connection.CreateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if connection is not None: + request.connection = connection + if connection_id is not None: + request.connection_id = connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_connection( + self, + request: Optional[Union[connection.GetConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> connection.Connection: + r"""Returns specified connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + + def sample_get_connection(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceClient() + + # Initialize request argument(s) + request = bigquery_connection_v1.GetConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_connection_v1.types.GetConnectionRequest, dict]): + The request object. The request for + [ConnectionService.GetConnection][google.cloud.bigquery.connection.v1.ConnectionService.GetConnection]. + name (str): + Required. Name of the requested connection, for example: + ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_connection_v1.types.Connection: + Configuration parameters to establish + connection with an external data source, + except the credential attributes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a connection.GetConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, connection.GetConnectionRequest): + request = connection.GetConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_connections( + self, + request: Optional[Union[connection.ListConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConnectionsPager: + r"""Returns a list of connections in the given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + + def sample_list_connections(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceClient() + + # Initialize request argument(s) + request = bigquery_connection_v1.ListConnectionsRequest( + parent="parent_value", + page_size=951, + ) + + # Make the request + page_result = client.list_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_connection_v1.types.ListConnectionsRequest, dict]): + The request object. The request for + [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. + parent (str): + Required. Parent resource name. Must be in the form: + ``projects/{project_id}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_connection_v1.services.connection_service.pagers.ListConnectionsPager: + The response for + [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a connection.ListConnectionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, connection.ListConnectionsRequest): + request = connection.ListConnectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListConnectionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_connection( + self, + request: Optional[Union[gcbc_connection.UpdateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + connection: Optional[gcbc_connection.Connection] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcbc_connection.Connection: + r"""Updates the specified connection. For security + reasons, also resets credential if connection properties + are in the update field mask. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + + def sample_update_connection(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceClient() + + # Initialize request argument(s) + request = bigquery_connection_v1.UpdateConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.update_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_connection_v1.types.UpdateConnectionRequest, dict]): + The request object. The request for + [ConnectionService.UpdateConnection][google.cloud.bigquery.connection.v1.ConnectionService.UpdateConnection]. + name (str): + Required. Name of the connection to update, for example: + ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + connection (google.cloud.bigquery_connection_v1.types.Connection): + Required. Connection containing the + updated fields. + + This corresponds to the ``connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Update mask for the + connection fields to be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_connection_v1.types.Connection: + Configuration parameters to establish + connection with an external data source, + except the credential attributes. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, connection, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcbc_connection.UpdateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcbc_connection.UpdateConnectionRequest): + request = gcbc_connection.UpdateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if connection is not None: + request.connection = connection + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_connection( + self, + request: Optional[Union[connection.DeleteConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes connection and associated credential. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + + def sample_delete_connection(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceClient() + + # Initialize request argument(s) + request = bigquery_connection_v1.DeleteConnectionRequest( + name="name_value", + ) + + # Make the request + client.delete_connection(request=request) + + Args: + request (Union[google.cloud.bigquery_connection_v1.types.DeleteConnectionRequest, dict]): + The request object. The request for + [ConnectionService.DeleteConnectionRequest][]. + name (str): + Required. Name of the deleted connection, for example: + ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a connection.DeleteConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, connection.DeleteConnectionRequest): + request = connection.DeleteConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does + not have a policy set. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on the specified resource. + Replaces any existing policy. + + Can return ``NOT_FOUND``, ``INVALID_ARGUMENT``, and + ``PERMISSION_DENIED`` errors. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a ``NOT_FOUND`` error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_connection_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = bigquery_connection_v1.ConnectionServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + resource (str): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (MutableSequence[str]): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + if resource is not None: + request.resource = resource + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ConnectionServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ConnectionServiceClient",) diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/pagers.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/pagers.py new file mode 100644 index 000000000000..94976d618dbb --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.bigquery_connection_v1.types import connection + + +class ListConnectionsPager: + """A pager for iterating through ``list_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_connection_v1.types.ListConnectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``connections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListConnections`` requests and continue to iterate + through the ``connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_connection_v1.types.ListConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., connection.ListConnectionsResponse], + request: connection.ListConnectionsRequest, + response: connection.ListConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_connection_v1.types.ListConnectionsRequest): + The initial request object. + response (google.cloud.bigquery_connection_v1.types.ListConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = connection.ListConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[connection.ListConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[connection.Connection]: + for page in self.pages: + yield from page.connections + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListConnectionsAsyncPager: + """A pager for iterating through ``list_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_connection_v1.types.ListConnectionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``connections`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListConnections`` requests and continue to iterate + through the ``connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_connection_v1.types.ListConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[connection.ListConnectionsResponse]], + request: connection.ListConnectionsRequest, + response: connection.ListConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_connection_v1.types.ListConnectionsRequest): + The initial request object. + response (google.cloud.bigquery_connection_v1.types.ListConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = connection.ListConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[connection.ListConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[connection.Connection]: + async def async_generator(): + async for page in self.pages: + for response in page.connections: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/__init__.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/__init__.py new file mode 100644 index 000000000000..7c00b984654f --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ConnectionServiceTransport +from .grpc import ConnectionServiceGrpcTransport +from .grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport +from .rest import ConnectionServiceRestInterceptor, ConnectionServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ConnectionServiceTransport]] +_transport_registry["grpc"] = ConnectionServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ConnectionServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ConnectionServiceRestTransport + +__all__ = ( + "ConnectionServiceTransport", + "ConnectionServiceGrpcTransport", + "ConnectionServiceGrpcAsyncIOTransport", + "ConnectionServiceRestTransport", + "ConnectionServiceRestInterceptor", +) diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/base.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/base.py new file mode 100644 index 000000000000..0cab80cf94a9 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/base.py @@ -0,0 +1,296 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.bigquery_connection_v1 import gapic_version as package_version +from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection +from google.cloud.bigquery_connection_v1.types import connection + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ConnectionServiceTransport(abc.ABC): + """Abstract transport class for ConnectionService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "bigqueryconnection.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_connection: gapic_v1.method.wrap_method( + self.create_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.get_connection: gapic_v1.method.wrap_method( + self.get_connection, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_connections: gapic_v1.method.wrap_method( + self.list_connections, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_connection: gapic_v1.method.wrap_method( + self.update_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_connection: gapic_v1.method.wrap_method( + self.delete_connection, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=60.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_connection( + self, + ) -> Callable[ + [gcbc_connection.CreateConnectionRequest], + Union[gcbc_connection.Connection, Awaitable[gcbc_connection.Connection]], + ]: + raise NotImplementedError() + + @property + def get_connection( + self, + ) -> Callable[ + [connection.GetConnectionRequest], + Union[connection.Connection, Awaitable[connection.Connection]], + ]: + raise NotImplementedError() + + @property + def list_connections( + self, + ) -> Callable[ + [connection.ListConnectionsRequest], + Union[ + connection.ListConnectionsResponse, + Awaitable[connection.ListConnectionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def update_connection( + self, + ) -> Callable[ + [gcbc_connection.UpdateConnectionRequest], + Union[gcbc_connection.Connection, Awaitable[gcbc_connection.Connection]], + ]: + raise NotImplementedError() + + @property + def delete_connection( + self, + ) -> Callable[ + [connection.DeleteConnectionRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ConnectionServiceTransport",) diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc.py new file mode 100644 index 000000000000..2a06cd5ed2eb --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc.py @@ -0,0 +1,475 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection +from google.cloud.bigquery_connection_v1.types import connection + +from .base import DEFAULT_CLIENT_INFO, ConnectionServiceTransport + + +class ConnectionServiceGrpcTransport(ConnectionServiceTransport): + """gRPC backend transport for ConnectionService. + + Manages external data source connections and credentials. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "bigqueryconnection.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "bigqueryconnection.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_connection( + self, + ) -> Callable[ + [gcbc_connection.CreateConnectionRequest], gcbc_connection.Connection + ]: + r"""Return a callable for the create connection method over gRPC. + + Creates a new connection. + + Returns: + Callable[[~.CreateConnectionRequest], + ~.Connection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_connection" not in self._stubs: + self._stubs["create_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/CreateConnection", + request_serializer=gcbc_connection.CreateConnectionRequest.serialize, + response_deserializer=gcbc_connection.Connection.deserialize, + ) + return self._stubs["create_connection"] + + @property + def get_connection( + self, + ) -> Callable[[connection.GetConnectionRequest], connection.Connection]: + r"""Return a callable for the get connection method over gRPC. + + Returns specified connection. + + Returns: + Callable[[~.GetConnectionRequest], + ~.Connection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_connection" not in self._stubs: + self._stubs["get_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/GetConnection", + request_serializer=connection.GetConnectionRequest.serialize, + response_deserializer=connection.Connection.deserialize, + ) + return self._stubs["get_connection"] + + @property + def list_connections( + self, + ) -> Callable[ + [connection.ListConnectionsRequest], connection.ListConnectionsResponse + ]: + r"""Return a callable for the list connections method over gRPC. + + Returns a list of connections in the given project. + + Returns: + Callable[[~.ListConnectionsRequest], + ~.ListConnectionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_connections" not in self._stubs: + self._stubs["list_connections"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/ListConnections", + request_serializer=connection.ListConnectionsRequest.serialize, + response_deserializer=connection.ListConnectionsResponse.deserialize, + ) + return self._stubs["list_connections"] + + @property + def update_connection( + self, + ) -> Callable[ + [gcbc_connection.UpdateConnectionRequest], gcbc_connection.Connection + ]: + r"""Return a callable for the update connection method over gRPC. + + Updates the specified connection. For security + reasons, also resets credential if connection properties + are in the update field mask. + + Returns: + Callable[[~.UpdateConnectionRequest], + ~.Connection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_connection" not in self._stubs: + self._stubs["update_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/UpdateConnection", + request_serializer=gcbc_connection.UpdateConnectionRequest.serialize, + response_deserializer=gcbc_connection.Connection.deserialize, + ) + return self._stubs["update_connection"] + + @property + def delete_connection( + self, + ) -> Callable[[connection.DeleteConnectionRequest], empty_pb2.Empty]: + r"""Return a callable for the delete connection method over gRPC. + + Deletes connection and associated credential. + + Returns: + Callable[[~.DeleteConnectionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_connection" not in self._stubs: + self._stubs["delete_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/DeleteConnection", + request_serializer=connection.DeleteConnectionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_connection"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does + not have a policy set. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on the specified resource. + Replaces any existing policy. + + Can return ``NOT_FOUND``, ``INVALID_ARGUMENT``, and + ``PERMISSION_DENIED`` errors. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a ``NOT_FOUND`` error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ConnectionServiceGrpcTransport",) diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc_asyncio.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..d320530d2626 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc_asyncio.py @@ -0,0 +1,475 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection +from google.cloud.bigquery_connection_v1.types import connection + +from .base import DEFAULT_CLIENT_INFO, ConnectionServiceTransport +from .grpc import ConnectionServiceGrpcTransport + + +class ConnectionServiceGrpcAsyncIOTransport(ConnectionServiceTransport): + """gRPC AsyncIO backend transport for ConnectionService. + + Manages external data source connections and credentials. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "bigqueryconnection.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "bigqueryconnection.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_connection( + self, + ) -> Callable[ + [gcbc_connection.CreateConnectionRequest], Awaitable[gcbc_connection.Connection] + ]: + r"""Return a callable for the create connection method over gRPC. + + Creates a new connection. + + Returns: + Callable[[~.CreateConnectionRequest], + Awaitable[~.Connection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_connection" not in self._stubs: + self._stubs["create_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/CreateConnection", + request_serializer=gcbc_connection.CreateConnectionRequest.serialize, + response_deserializer=gcbc_connection.Connection.deserialize, + ) + return self._stubs["create_connection"] + + @property + def get_connection( + self, + ) -> Callable[[connection.GetConnectionRequest], Awaitable[connection.Connection]]: + r"""Return a callable for the get connection method over gRPC. + + Returns specified connection. + + Returns: + Callable[[~.GetConnectionRequest], + Awaitable[~.Connection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_connection" not in self._stubs: + self._stubs["get_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/GetConnection", + request_serializer=connection.GetConnectionRequest.serialize, + response_deserializer=connection.Connection.deserialize, + ) + return self._stubs["get_connection"] + + @property + def list_connections( + self, + ) -> Callable[ + [connection.ListConnectionsRequest], + Awaitable[connection.ListConnectionsResponse], + ]: + r"""Return a callable for the list connections method over gRPC. + + Returns a list of connections in the given project. + + Returns: + Callable[[~.ListConnectionsRequest], + Awaitable[~.ListConnectionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_connections" not in self._stubs: + self._stubs["list_connections"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/ListConnections", + request_serializer=connection.ListConnectionsRequest.serialize, + response_deserializer=connection.ListConnectionsResponse.deserialize, + ) + return self._stubs["list_connections"] + + @property + def update_connection( + self, + ) -> Callable[ + [gcbc_connection.UpdateConnectionRequest], Awaitable[gcbc_connection.Connection] + ]: + r"""Return a callable for the update connection method over gRPC. + + Updates the specified connection. For security + reasons, also resets credential if connection properties + are in the update field mask. + + Returns: + Callable[[~.UpdateConnectionRequest], + Awaitable[~.Connection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_connection" not in self._stubs: + self._stubs["update_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/UpdateConnection", + request_serializer=gcbc_connection.UpdateConnectionRequest.serialize, + response_deserializer=gcbc_connection.Connection.deserialize, + ) + return self._stubs["update_connection"] + + @property + def delete_connection( + self, + ) -> Callable[[connection.DeleteConnectionRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete connection method over gRPC. + + Deletes connection and associated credential. + + Returns: + Callable[[~.DeleteConnectionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_connection" not in self._stubs: + self._stubs["delete_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/DeleteConnection", + request_serializer=connection.DeleteConnectionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_connection"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does + not have a policy set. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on the specified resource. + Replaces any existing policy. + + Can return ``NOT_FOUND``, ``INVALID_ARGUMENT``, and + ``PERMISSION_DENIED`` errors. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a ``NOT_FOUND`` error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.connection.v1.ConnectionService/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("ConnectionServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/rest.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/rest.py new file mode 100644 index 000000000000..2175fb6594a4 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/rest.py @@ -0,0 +1,1385 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection +from google.cloud.bigquery_connection_v1.types import connection + +from .base import ConnectionServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ConnectionServiceRestInterceptor: + """Interceptor for ConnectionService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ConnectionServiceRestTransport. + + .. code-block:: python + class MyCustomConnectionServiceInterceptor(ConnectionServiceRestInterceptor): + def pre_create_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_connections(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_connections(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_connection(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ConnectionServiceRestTransport(interceptor=MyCustomConnectionServiceInterceptor()) + client = ConnectionServiceClient(transport=transport) + + + """ + + def pre_create_connection( + self, + request: gcbc_connection.CreateConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcbc_connection.CreateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConnectionService server. + """ + return request, metadata + + def post_create_connection( + self, response: gcbc_connection.Connection + ) -> gcbc_connection.Connection: + """Post-rpc interceptor for create_connection + + Override in a subclass to manipulate the response + after it is returned by the ConnectionService server but before + it is returned to user code. + """ + return response + + def pre_delete_connection( + self, + request: connection.DeleteConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[connection.DeleteConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConnectionService server. + """ + return request, metadata + + def pre_get_connection( + self, + request: connection.GetConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[connection.GetConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConnectionService server. + """ + return request, metadata + + def post_get_connection( + self, response: connection.Connection + ) -> connection.Connection: + """Post-rpc interceptor for get_connection + + Override in a subclass to manipulate the response + after it is returned by the ConnectionService server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConnectionService server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the ConnectionService server but before + it is returned to user code. + """ + return response + + def pre_list_connections( + self, + request: connection.ListConnectionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[connection.ListConnectionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_connections + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConnectionService server. + """ + return request, metadata + + def post_list_connections( + self, response: connection.ListConnectionsResponse + ) -> connection.ListConnectionsResponse: + """Post-rpc interceptor for list_connections + + Override in a subclass to manipulate the response + after it is returned by the ConnectionService server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConnectionService server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the ConnectionService server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConnectionService server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the ConnectionService server but before + it is returned to user code. + """ + return response + + def pre_update_connection( + self, + request: gcbc_connection.UpdateConnectionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcbc_connection.UpdateConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConnectionService server. + """ + return request, metadata + + def post_update_connection( + self, response: gcbc_connection.Connection + ) -> gcbc_connection.Connection: + """Post-rpc interceptor for update_connection + + Override in a subclass to manipulate the response + after it is returned by the ConnectionService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ConnectionServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ConnectionServiceRestInterceptor + + +class ConnectionServiceRestTransport(ConnectionServiceTransport): + """REST backend transport for ConnectionService. + + Manages external data source connections and credentials. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "bigqueryconnection.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ConnectionServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ConnectionServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateConnection(ConnectionServiceRestStub): + def __hash__(self): + return hash("CreateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcbc_connection.CreateConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcbc_connection.Connection: + r"""Call the create connection method over HTTP. + + Args: + request (~.gcbc_connection.CreateConnectionRequest): + The request object. The request for + [ConnectionService.CreateConnection][google.cloud.bigquery.connection.v1.ConnectionService.CreateConnection]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcbc_connection.Connection: + Configuration parameters to establish + connection with an external data source, + except the credential attributes. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/connections", + "body": "connection", + }, + ] + request, metadata = self._interceptor.pre_create_connection( + request, metadata + ) + pb_request = gcbc_connection.CreateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcbc_connection.Connection() + pb_resp = gcbc_connection.Connection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_connection(resp) + return resp + + class _DeleteConnection(ConnectionServiceRestStub): + def __hash__(self): + return hash("DeleteConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: connection.DeleteConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete connection method over HTTP. + + Args: + request (~.connection.DeleteConnectionRequest): + The request object. The request for + [ConnectionService.DeleteConnectionRequest][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/connections/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_connection( + request, metadata + ) + pb_request = connection.DeleteConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetConnection(ConnectionServiceRestStub): + def __hash__(self): + return hash("GetConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: connection.GetConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> connection.Connection: + r"""Call the get connection method over HTTP. + + Args: + request (~.connection.GetConnectionRequest): + The request object. The request for + [ConnectionService.GetConnection][google.cloud.bigquery.connection.v1.ConnectionService.GetConnection]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.connection.Connection: + Configuration parameters to establish + connection with an external data source, + except the credential attributes. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/connections/*}", + }, + ] + request, metadata = self._interceptor.pre_get_connection(request, metadata) + pb_request = connection.GetConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = connection.Connection() + pb_resp = connection.Connection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_connection(resp) + return resp + + class _GetIamPolicy(ConnectionServiceRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/connections/*}:getIamPolicy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _ListConnections(ConnectionServiceRestStub): + def __hash__(self): + return hash("ListConnections") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "pageSize": 0, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: connection.ListConnectionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> connection.ListConnectionsResponse: + r"""Call the list connections method over HTTP. + + Args: + request (~.connection.ListConnectionsRequest): + The request object. The request for + [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.connection.ListConnectionsResponse: + The response for + [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/connections", + }, + ] + request, metadata = self._interceptor.pre_list_connections( + request, metadata + ) + pb_request = connection.ListConnectionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = connection.ListConnectionsResponse() + pb_resp = connection.ListConnectionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_connections(resp) + return resp + + class _SetIamPolicy(ConnectionServiceRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/connections/*}:setIamPolicy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(ConnectionServiceRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/connections/*}:testIamPermissions", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = iam_policy_pb2.TestIamPermissionsResponse() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + class _UpdateConnection(ConnectionServiceRestStub): + def __hash__(self): + return hash("UpdateConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcbc_connection.UpdateConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcbc_connection.Connection: + r"""Call the update connection method over HTTP. + + Args: + request (~.gcbc_connection.UpdateConnectionRequest): + The request object. The request for + [ConnectionService.UpdateConnection][google.cloud.bigquery.connection.v1.ConnectionService.UpdateConnection]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcbc_connection.Connection: + Configuration parameters to establish + connection with an external data source, + except the credential attributes. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{name=projects/*/locations/*/connections/*}", + "body": "connection", + }, + ] + request, metadata = self._interceptor.pre_update_connection( + request, metadata + ) + pb_request = gcbc_connection.UpdateConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcbc_connection.Connection() + pb_resp = gcbc_connection.Connection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_connection(resp) + return resp + + @property + def create_connection( + self, + ) -> Callable[ + [gcbc_connection.CreateConnectionRequest], gcbc_connection.Connection + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_connection( + self, + ) -> Callable[[connection.DeleteConnectionRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_connection( + self, + ) -> Callable[[connection.GetConnectionRequest], connection.Connection]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_connections( + self, + ) -> Callable[ + [connection.ListConnectionsRequest], connection.ListConnectionsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListConnections(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_connection( + self, + ) -> Callable[ + [gcbc_connection.UpdateConnectionRequest], gcbc_connection.Connection + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ConnectionServiceRestTransport",) diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/types/__init__.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/types/__init__.py new file mode 100644 index 000000000000..40ba91727493 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/types/__init__.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .connection import ( + AwsAccessRole, + AwsCrossAccountRole, + AwsProperties, + AzureProperties, + CloudResourceProperties, + CloudSpannerProperties, + CloudSqlCredential, + CloudSqlProperties, + Connection, + CreateConnectionRequest, + DeleteConnectionRequest, + GetConnectionRequest, + ListConnectionsRequest, + ListConnectionsResponse, + MetastoreServiceConfig, + SalesforceDataCloudProperties, + SparkHistoryServerConfig, + SparkProperties, + UpdateConnectionRequest, +) + +__all__ = ( + "AwsAccessRole", + "AwsCrossAccountRole", + "AwsProperties", + "AzureProperties", + "CloudResourceProperties", + "CloudSpannerProperties", + "CloudSqlCredential", + "CloudSqlProperties", + "Connection", + "CreateConnectionRequest", + "DeleteConnectionRequest", + "GetConnectionRequest", + "ListConnectionsRequest", + "ListConnectionsResponse", + "MetastoreServiceConfig", + "SalesforceDataCloudProperties", + "SparkHistoryServerConfig", + "SparkProperties", + "UpdateConnectionRequest", +) diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/types/connection.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/types/connection.py new file mode 100644 index 000000000000..3c8033618dca --- /dev/null +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/types/connection.py @@ -0,0 +1,778 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.bigquery.connection.v1", + manifest={ + "CreateConnectionRequest", + "GetConnectionRequest", + "ListConnectionsRequest", + "ListConnectionsResponse", + "UpdateConnectionRequest", + "DeleteConnectionRequest", + "Connection", + "CloudSqlProperties", + "CloudSqlCredential", + "CloudSpannerProperties", + "AwsProperties", + "AwsCrossAccountRole", + "AwsAccessRole", + "AzureProperties", + "CloudResourceProperties", + "MetastoreServiceConfig", + "SparkHistoryServerConfig", + "SparkProperties", + "SalesforceDataCloudProperties", + }, +) + + +class CreateConnectionRequest(proto.Message): + r"""The request for + [ConnectionService.CreateConnection][google.cloud.bigquery.connection.v1.ConnectionService.CreateConnection]. + + Attributes: + parent (str): + Required. Parent resource name. Must be in the format + ``projects/{project_id}/locations/{location_id}`` + connection_id (str): + Optional. Connection id that should be + assigned to the created connection. + connection (google.cloud.bigquery_connection_v1.types.Connection): + Required. Connection to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + connection_id: str = proto.Field( + proto.STRING, + number=2, + ) + connection: "Connection" = proto.Field( + proto.MESSAGE, + number=3, + message="Connection", + ) + + +class GetConnectionRequest(proto.Message): + r"""The request for + [ConnectionService.GetConnection][google.cloud.bigquery.connection.v1.ConnectionService.GetConnection]. + + Attributes: + name (str): + Required. Name of the requested connection, for example: + ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListConnectionsRequest(proto.Message): + r"""The request for + [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. + + Attributes: + parent (str): + Required. Parent resource name. Must be in the form: + ``projects/{project_id}/locations/{location_id}`` + page_size (int): + Required. Page size. + page_token (str): + Page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListConnectionsResponse(proto.Message): + r"""The response for + [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. + + Attributes: + next_page_token (str): + Next page token. + connections (MutableSequence[google.cloud.bigquery_connection_v1.types.Connection]): + List of connections. + """ + + @property + def raw_page(self): + return self + + next_page_token: str = proto.Field( + proto.STRING, + number=1, + ) + connections: MutableSequence["Connection"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Connection", + ) + + +class UpdateConnectionRequest(proto.Message): + r"""The request for + [ConnectionService.UpdateConnection][google.cloud.bigquery.connection.v1.ConnectionService.UpdateConnection]. + + Attributes: + name (str): + Required. Name of the connection to update, for example: + ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` + connection (google.cloud.bigquery_connection_v1.types.Connection): + Required. Connection containing the updated + fields. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Update mask for the connection + fields to be updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + connection: "Connection" = proto.Field( + proto.MESSAGE, + number=2, + message="Connection", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteConnectionRequest(proto.Message): + r"""The request for [ConnectionService.DeleteConnectionRequest][]. + + Attributes: + name (str): + Required. Name of the deleted connection, for example: + ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Connection(proto.Message): + r"""Configuration parameters to establish connection with an + external data source, except the credential attributes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The resource name of the connection in the form of: + ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` + friendly_name (str): + User provided display name for the + connection. + description (str): + User provided description. + cloud_sql (google.cloud.bigquery_connection_v1.types.CloudSqlProperties): + Cloud SQL properties. + + This field is a member of `oneof`_ ``properties``. + aws (google.cloud.bigquery_connection_v1.types.AwsProperties): + Amazon Web Services (AWS) properties. + + This field is a member of `oneof`_ ``properties``. + azure (google.cloud.bigquery_connection_v1.types.AzureProperties): + Azure properties. + + This field is a member of `oneof`_ ``properties``. + cloud_spanner (google.cloud.bigquery_connection_v1.types.CloudSpannerProperties): + Cloud Spanner properties. + + This field is a member of `oneof`_ ``properties``. + cloud_resource (google.cloud.bigquery_connection_v1.types.CloudResourceProperties): + Cloud Resource properties. + + This field is a member of `oneof`_ ``properties``. + spark (google.cloud.bigquery_connection_v1.types.SparkProperties): + Spark properties. + + This field is a member of `oneof`_ ``properties``. + salesforce_data_cloud (google.cloud.bigquery_connection_v1.types.SalesforceDataCloudProperties): + Optional. Salesforce DataCloud properties. + This field is intended for use only by + Salesforce partner projects. This field contains + properties for your Salesforce DataCloud + connection. + + This field is a member of `oneof`_ ``properties``. + creation_time (int): + Output only. The creation timestamp of the + connection. + last_modified_time (int): + Output only. The last update timestamp of the + connection. + has_credential (bool): + Output only. True, if credential is + configured for this connection. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + friendly_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + cloud_sql: "CloudSqlProperties" = proto.Field( + proto.MESSAGE, + number=4, + oneof="properties", + message="CloudSqlProperties", + ) + aws: "AwsProperties" = proto.Field( + proto.MESSAGE, + number=8, + oneof="properties", + message="AwsProperties", + ) + azure: "AzureProperties" = proto.Field( + proto.MESSAGE, + number=11, + oneof="properties", + message="AzureProperties", + ) + cloud_spanner: "CloudSpannerProperties" = proto.Field( + proto.MESSAGE, + number=21, + oneof="properties", + message="CloudSpannerProperties", + ) + cloud_resource: "CloudResourceProperties" = proto.Field( + proto.MESSAGE, + number=22, + oneof="properties", + message="CloudResourceProperties", + ) + spark: "SparkProperties" = proto.Field( + proto.MESSAGE, + number=23, + oneof="properties", + message="SparkProperties", + ) + salesforce_data_cloud: "SalesforceDataCloudProperties" = proto.Field( + proto.MESSAGE, + number=24, + oneof="properties", + message="SalesforceDataCloudProperties", + ) + creation_time: int = proto.Field( + proto.INT64, + number=5, + ) + last_modified_time: int = proto.Field( + proto.INT64, + number=6, + ) + has_credential: bool = proto.Field( + proto.BOOL, + number=7, + ) + + +class CloudSqlProperties(proto.Message): + r"""Connection properties specific to the Cloud SQL. + + Attributes: + instance_id (str): + Cloud SQL instance ID in the form + ``project:location:instance``. + database (str): + Database name. + type_ (google.cloud.bigquery_connection_v1.types.CloudSqlProperties.DatabaseType): + Type of the Cloud SQL database. + credential (google.cloud.bigquery_connection_v1.types.CloudSqlCredential): + Input only. Cloud SQL credential. + service_account_id (str): + Output only. The account ID of the service + used for the purpose of this connection. + + When the connection is used in the context of an + operation in BigQuery, this service account will + serve as the identity being used for connecting + to the CloudSQL instance specified in this + connection. + """ + + class DatabaseType(proto.Enum): + r"""Supported Cloud SQL database types. + + Values: + DATABASE_TYPE_UNSPECIFIED (0): + Unspecified database type. + POSTGRES (1): + Cloud SQL for PostgreSQL. + MYSQL (2): + Cloud SQL for MySQL. + """ + DATABASE_TYPE_UNSPECIFIED = 0 + POSTGRES = 1 + MYSQL = 2 + + instance_id: str = proto.Field( + proto.STRING, + number=1, + ) + database: str = proto.Field( + proto.STRING, + number=2, + ) + type_: DatabaseType = proto.Field( + proto.ENUM, + number=3, + enum=DatabaseType, + ) + credential: "CloudSqlCredential" = proto.Field( + proto.MESSAGE, + number=4, + message="CloudSqlCredential", + ) + service_account_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class CloudSqlCredential(proto.Message): + r"""Credential info for the Cloud SQL. + + Attributes: + username (str): + The username for the credential. + password (str): + The password for the credential. + """ + + username: str = proto.Field( + proto.STRING, + number=1, + ) + password: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CloudSpannerProperties(proto.Message): + r"""Connection properties specific to Cloud Spanner. + + Attributes: + database (str): + Cloud Spanner database in the form + \`project/instance/database' + use_parallelism (bool): + If parallelism should be used when reading + from Cloud Spanner + max_parallelism (int): + Allows setting max parallelism per query when executing on + Spanner independent compute resources. If unspecified, + default values of parallelism are chosen that are dependent + on the Cloud Spanner instance configuration. + + REQUIRES: ``use_parallelism`` must be set. REQUIRES: Either + ``use_data_boost`` or ``use_serverless_analytics`` must be + set. + use_serverless_analytics (bool): + If the serverless analytics service should be used to read + data from Cloud Spanner. Note: ``use_parallelism`` must be + set when using serverless analytics. + use_data_boost (bool): + If set, the request will be executed via Spanner independent + compute resources. REQUIRES: ``use_parallelism`` must be + set. + + NOTE: ``use_serverless_analytics`` will be deprecated. + Prefer ``use_data_boost`` over ``use_serverless_analytics``. + database_role (str): + Optional. Cloud Spanner database role for fine-grained + access control. The Cloud Spanner admin should have + provisioned the database role with appropriate permissions, + such as ``SELECT`` and ``INSERT``. Other users should only + use roles provided by their Cloud Spanner admins. + + For more details, see [About fine-grained access control] + (https://cloud.google.com/spanner/docs/fgac-about). + + REQUIRES: The database role name must start with a letter, + and can only contain letters, numbers, and underscores. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + use_parallelism: bool = proto.Field( + proto.BOOL, + number=2, + ) + max_parallelism: int = proto.Field( + proto.INT32, + number=5, + ) + use_serverless_analytics: bool = proto.Field( + proto.BOOL, + number=3, + ) + use_data_boost: bool = proto.Field( + proto.BOOL, + number=6, + ) + database_role: str = proto.Field( + proto.STRING, + number=4, + ) + + +class AwsProperties(proto.Message): + r"""Connection properties specific to Amazon Web Services (AWS). + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cross_account_role (google.cloud.bigquery_connection_v1.types.AwsCrossAccountRole): + Authentication using Google owned AWS IAM + user's access key to assume into customer's AWS + IAM Role. Deprecated, do not use. + + This field is a member of `oneof`_ ``authentication_method``. + access_role (google.cloud.bigquery_connection_v1.types.AwsAccessRole): + Authentication using Google owned service + account to assume into customer's AWS IAM Role. + + This field is a member of `oneof`_ ``authentication_method``. + """ + + cross_account_role: "AwsCrossAccountRole" = proto.Field( + proto.MESSAGE, + number=2, + oneof="authentication_method", + message="AwsCrossAccountRole", + ) + access_role: "AwsAccessRole" = proto.Field( + proto.MESSAGE, + number=3, + oneof="authentication_method", + message="AwsAccessRole", + ) + + +class AwsCrossAccountRole(proto.Message): + r"""Authentication method for Amazon Web Services (AWS) that uses + Google owned AWS IAM user's access key to assume into customer's + AWS IAM Role. + + Attributes: + iam_role_id (str): + The user’s AWS IAM Role that trusts the + Google-owned AWS IAM user Connection. + iam_user_id (str): + Output only. Google-owned AWS IAM User for a + Connection. + external_id (str): + Output only. A Google-generated id for representing + Connection’s identity in AWS. External Id is also used for + preventing the Confused Deputy Problem. See + https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html + """ + + iam_role_id: str = proto.Field( + proto.STRING, + number=1, + ) + iam_user_id: str = proto.Field( + proto.STRING, + number=2, + ) + external_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class AwsAccessRole(proto.Message): + r"""Authentication method for Amazon Web Services (AWS) that uses + Google owned Google service account to assume into customer's + AWS IAM Role. + + Attributes: + iam_role_id (str): + The user’s AWS IAM Role that trusts the + Google-owned AWS IAM user Connection. + identity (str): + A unique Google-owned and Google-generated + identity for the Connection. This identity will + be used to access the user's AWS IAM Role. + """ + + iam_role_id: str = proto.Field( + proto.STRING, + number=1, + ) + identity: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AzureProperties(proto.Message): + r"""Container for connection properties specific to Azure. + + Attributes: + application (str): + Output only. The name of the Azure Active + Directory Application. + client_id (str): + Output only. The client id of the Azure + Active Directory Application. + object_id (str): + Output only. The object id of the Azure + Active Directory Application. + customer_tenant_id (str): + The id of customer's directory that host the + data. + redirect_uri (str): + The URL user will be redirected to after + granting consent during connection setup. + federated_application_client_id (str): + The client ID of the user's Azure Active + Directory Application used for a federated + connection. + identity (str): + Output only. A unique Google-owned and + Google-generated identity for the Connection. + This identity will be used to access the user's + Azure Active Directory Application. + """ + + application: str = proto.Field( + proto.STRING, + number=1, + ) + client_id: str = proto.Field( + proto.STRING, + number=2, + ) + object_id: str = proto.Field( + proto.STRING, + number=3, + ) + customer_tenant_id: str = proto.Field( + proto.STRING, + number=4, + ) + redirect_uri: str = proto.Field( + proto.STRING, + number=5, + ) + federated_application_client_id: str = proto.Field( + proto.STRING, + number=6, + ) + identity: str = proto.Field( + proto.STRING, + number=7, + ) + + +class CloudResourceProperties(proto.Message): + r"""Container for connection properties for delegation of access + to GCP resources. + + Attributes: + service_account_id (str): + Output only. The account ID of the service + created for the purpose of this connection. + + The service account does not have any + permissions associated with it when it is + created. After creation, customers delegate + permissions to the service account. When the + connection is used in the context of an + operation in BigQuery, the service account will + be used to connect to the desired resources in + GCP. + + The account ID is in the form of: + + @gcp-sa-bigquery-cloudresource.iam.gserviceaccount.com + """ + + service_account_id: str = proto.Field( + proto.STRING, + number=1, + ) + + +class MetastoreServiceConfig(proto.Message): + r"""Configuration of the Dataproc Metastore Service. + + Attributes: + metastore_service (str): + Optional. Resource name of an existing Dataproc Metastore + service. + + Example: + + - ``projects/[project_id]/locations/[region]/services/[service_id]`` + """ + + metastore_service: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SparkHistoryServerConfig(proto.Message): + r"""Configuration of the Spark History Server. + + Attributes: + dataproc_cluster (str): + Optional. Resource name of an existing Dataproc Cluster to + act as a Spark History Server for the connection. + + Example: + + - ``projects/[project_id]/regions/[region]/clusters/[cluster_name]`` + """ + + dataproc_cluster: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SparkProperties(proto.Message): + r"""Container for connection properties to execute stored + procedures for Apache Spark. + + Attributes: + service_account_id (str): + Output only. The account ID of the service + created for the purpose of this connection. + + The service account does not have any + permissions associated with it when it is + created. After creation, customers delegate + permissions to the service account. When the + connection is used in the context of a stored + procedure for Apache Spark in BigQuery, the + service account is used to connect to the + desired resources in Google Cloud. + + The account ID is in the form of: + + bqcx--@gcp-sa-bigquery-consp.iam.gserviceaccount.com + metastore_service_config (google.cloud.bigquery_connection_v1.types.MetastoreServiceConfig): + Optional. Dataproc Metastore Service + configuration for the connection. + spark_history_server_config (google.cloud.bigquery_connection_v1.types.SparkHistoryServerConfig): + Optional. Spark History Server configuration + for the connection. + """ + + service_account_id: str = proto.Field( + proto.STRING, + number=1, + ) + metastore_service_config: "MetastoreServiceConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="MetastoreServiceConfig", + ) + spark_history_server_config: "SparkHistoryServerConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="SparkHistoryServerConfig", + ) + + +class SalesforceDataCloudProperties(proto.Message): + r"""Connection properties specific to Salesforce DataCloud. This + is intended for use only by Salesforce partner projects. + + Attributes: + instance_uri (str): + The URL to the user's Salesforce DataCloud + instance. + identity (str): + Output only. A unique Google-owned and + Google-generated service account identity for + the connection. + tenant_id (str): + The ID of the user's Salesforce tenant. + """ + + instance_uri: str = proto.Field( + proto.STRING, + number=1, + ) + identity: str = proto.Field( + proto.STRING, + number=2, + ) + tenant_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-connection/mypy.ini b/packages/google-cloud-bigquery-connection/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-bigquery-connection/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-bigquery-connection/noxfile.py b/packages/google-cloud-bigquery-connection/noxfile.py new file mode 100644 index 000000000000..be54712bfa8f --- /dev/null +++ b/packages/google-cloud-bigquery-connection/noxfile.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-bigquery-connection/renovate.json b/packages/google-cloud-bigquery-connection/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-bigquery-connection/samples/AUTHORING_GUIDE.md b/packages/google-cloud-bigquery-connection/samples/AUTHORING_GUIDE.md new file mode 100644 index 000000000000..ab870f391311 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md diff --git a/packages/google-cloud-bigquery-connection/samples/CONTRIBUTING.md b/packages/google-cloud-bigquery-connection/samples/CONTRIBUTING.md new file mode 100644 index 000000000000..34c882b6f1a3 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/packages/google-cloud-bigquery-connection/scripts/decrypt-secrets.sh b/packages/google-cloud-bigquery-connection/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-bigquery-connection/scripts/fixup_bigquery_connection_v1_keywords.py b/packages/google-cloud-bigquery-connection/scripts/fixup_bigquery_connection_v1_keywords.py new file mode 100644 index 000000000000..0076fbd5cc09 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/scripts/fixup_bigquery_connection_v1_keywords.py @@ -0,0 +1,183 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class bigquery_connectionCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_connection': ('parent', 'connection', 'connection_id', ), + 'delete_connection': ('name', ), + 'get_connection': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'list_connections': ('parent', 'page_size', 'page_token', ), + 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_connection': ('name', 'connection', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=bigquery_connectionCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the bigquery_connection client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-bigquery-connection/scripts/fixup_keywords.py b/packages/google-cloud-bigquery-connection/scripts/fixup_keywords.py new file mode 100644 index 000000000000..5c15e5bb6ea7 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/scripts/fixup_keywords.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class connectionCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_connection': ('parent', 'connection', 'connection_id', ), + 'delete_connection': ('name', ), + 'get_connection': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'list_connections': ('parent', 'page_size', 'page_token', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_connection': ('name', 'connection', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=connectionCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the connection client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-bigquery-connection/scripts/readme-gen/readme_gen.py b/packages/google-cloud-bigquery-connection/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..1acc119835b5 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-bigquery-connection/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-bigquery-connection/setup.cfg b/packages/google-cloud-bigquery-connection/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-bigquery-connection/setup.py b/packages/google-cloud-bigquery-connection/setup.py new file mode 100644 index 000000000000..5aa5771bff94 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/setup.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-bigquery-connection" + + +description = "Google Cloud Bigquery Connection API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/bigquery_connection/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-bigquery-connection/testing/.gitignore b/packages/google-cloud-bigquery-connection/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-bigquery-connection/testing/constraints-3.10.txt b/packages/google-cloud-bigquery-connection/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-bigquery-connection/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-bigquery-connection/testing/constraints-3.11.txt b/packages/google-cloud-bigquery-connection/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-bigquery-connection/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-bigquery-connection/testing/constraints-3.12.txt b/packages/google-cloud-bigquery-connection/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-bigquery-connection/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-bigquery-connection/testing/constraints-3.7.txt b/packages/google-cloud-bigquery-connection/testing/constraints-3.7.txt new file mode 100644 index 000000000000..2beecf99e0be --- /dev/null +++ b/packages/google-cloud-bigquery-connection/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/packages/google-cloud-bigquery-connection/testing/constraints-3.8.txt b/packages/google-cloud-bigquery-connection/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-bigquery-connection/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-bigquery-connection/testing/constraints-3.9.txt b/packages/google-cloud-bigquery-connection/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-bigquery-connection/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-bigquery-connection/testing/test-env.tmpl.sh b/packages/google-cloud-bigquery-connection/testing/test-env.tmpl.sh new file mode 100644 index 000000000000..67962ef63c58 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/testing/test-env.tmpl.sh @@ -0,0 +1,110 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Environment variables for system tests. +export GOOGLE_CLOUD_PROJECT=your-project-id +export GCP_PROJECT=$GOOGLE_CLOUD_PROJECT +export FIRESTORE_PROJECT= + +export CLOUD_STORAGE_BUCKET=$GOOGLE_CLOUD_PROJECT +export REQUESTER_PAYS_TEST_BUCKET="${CLOUD_STORAGE_BUCKET}-requester-pays-test" +export API_KEY= +export BIGTABLE_CLUSTER=bigtable-test +export BIGTABLE_ZONE=us-central1-c +export BIGTABLE_INSTANCE= +export SPANNER_INSTANCE= +export COMPOSER_LOCATION=us-central1 +export COMPOSER_ENVIRONMENT= +export COMPOSER2_ENVIRONMENT= +# Webserver for COMPOSER2_ENVIRONMENT +export COMPOSER2_WEB_SERVER_URL= +export CLOUD_KMS_KEY= + +export MYSQL_INSTANCE= +export MYSQL_INSTANCE_ID= +export MYSQL_INSTANCE_LOCATION= +export MYSQL_USER= +export MYSQL_PASSWORD= +export MYSQL_DATABASE= +export MYSQL_HOST=localhost:3306 +export POSTGRES_INSTANCE= +export POSTGRES_USER= +export POSTGRES_PASSWORD= +export POSTGRES_DATABASE= +export POSTGRES_HOST=localhost:5432 +export SQLSERVER_INSTANCE= +export SQLSERVER_USER= +export SQLSERVER_PASSWORD= +export SQLSERVER_DATABASE= +export SQLSERVER_HOST=127.0.0.1:1433 +export DB_SOCKET_DIR= + +export KG_API_KEY= +export SLACK_TEST_SIGNATURE= +export SLACK_SECRET= +export FUNCTIONS_TOPIC= + +# Service account for HMAC samples +export HMAC_KEY_TEST_SERVICE_ACCOUNT= + +# Environment variables for App Engine Flexible system tests. +export GA_TRACKING_ID= +export SQLALCHEMY_DATABASE_URI=sqlite:// +export PUBSUB_TOPIC=gae-mvm-pubsub-topic +export PUBSUB_VERIFICATION_TOKEN=1234abc + +# Secret Manager Test Vars +export GCLOUD_SECRETS_SERVICE_ACCOUNT= + +# Automl +# A centralized project is used to remove duplicate work across all 7 languages +# and reduce the management of these resources. +# https://docs.google.com/document/d/1-E7zTNqBm9ex7XIOhzMHCupwKWieyMKgAVwrRK5JTVY +export AUTOML_PROJECT_ID= + +export ENTITY_EXTRACTION_DATASET_ID= +export ENTITY_EXTRACTION_MODEL_ID= + +export SENTIMENT_ANALYSIS_DATASET_ID= +export SENTIMENT_ANALYSIS_MODEL_ID= + +export TEXT_CLASSIFICATION_DATASET_ID= +export TEXT_CLASSIFICATION_MODEL_ID= + +export TRANSLATION_DATASET_ID= +export TRANSLATION_MODEL_ID= + +export VISION_CLASSIFICATION_DATASET_ID= +export VISION_CLASSIFICATION_MODEL_ID= + +export OBJECT_DETECTION_DATASET_ID= +# AutoML model takes 8-24 hours to create, having predefined +# and centralized models remove duplicate work across all languages. +export OBJECT_DETECTION_MODEL_ID= + +# For git operations in the test driver(testing/run_tests.sh). +# These are optional, but for avoiding flakes in Kokoro builds. +export GITHUB_ACCESS_TOKEN= +export GITHUB_USERNAME= + +# Cloud Run +# For run/idp example, a Firebase IDP token +export IDP_KEY= +# For run/filesystem +export IP_ADDRESS= +export CONNECTOR= + +# Dialogflow examples. +export SMART_REPLY_MODEL= +export SMART_REPLY_ALLOWLIST= \ No newline at end of file diff --git a/packages/google-cloud-bigquery-connection/tests/__init__.py b/packages/google-cloud-bigquery-connection/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-connection/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-connection/tests/system/__init__.py b/packages/google-cloud-bigquery-connection/tests/system/__init__.py new file mode 100644 index 000000000000..6cfb48f4cdf8 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/tests/system/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-bigquery-connection/tests/system/smoke_test.py b/packages/google-cloud-bigquery-connection/tests/system/smoke_test.py new file mode 100644 index 000000000000..a0b9cd264d86 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/tests/system/smoke_test.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + +from google.cloud import bigquery_connection_v1 + + +@pytest.fixture(scope="session") +def project_id(): + return os.environ["PROJECT_ID"] + + +@pytest.mark.parametrize("transport", ["grpc", "rest"]) +def test_list_connections(project_id: str, transport: str): + client = bigquery_connection_v1.ConnectionServiceClient(transport=transport) + + parent = client.common_location_path(project_id, location="us-central1") + client.list_connections(parent=parent) + + # The purpose of this smoke test is to test the communication with the API server, + # rather than API-specific functionality. + # If the smoke test fails, we won't reach this line. + assert True diff --git a/packages/google-cloud-bigquery-connection/tests/unit/__init__.py b/packages/google-cloud-bigquery-connection/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-connection/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-connection/tests/unit/gapic/__init__.py b/packages/google-cloud-bigquery-connection/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-connection/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/__init__.py b/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py b/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py new file mode 100644 index 000000000000..02640b712ad1 --- /dev/null +++ b/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py @@ -0,0 +1,6411 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.type import expr_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.bigquery_connection_v1.services.connection_service import ( + ConnectionServiceAsyncClient, + ConnectionServiceClient, + pagers, + transports, +) +from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection +from google.cloud.bigquery_connection_v1.types import connection + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ConnectionServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ConnectionServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ConnectionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ConnectionServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ConnectionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ConnectionServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ConnectionServiceClient, "grpc"), + (ConnectionServiceAsyncClient, "grpc_asyncio"), + (ConnectionServiceClient, "rest"), + ], +) +def test_connection_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "bigqueryconnection.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigqueryconnection.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ConnectionServiceGrpcTransport, "grpc"), + (transports.ConnectionServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ConnectionServiceRestTransport, "rest"), + ], +) +def test_connection_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ConnectionServiceClient, "grpc"), + (ConnectionServiceAsyncClient, "grpc_asyncio"), + (ConnectionServiceClient, "rest"), + ], +) +def test_connection_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "bigqueryconnection.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigqueryconnection.googleapis.com" + ) + + +def test_connection_service_client_get_transport_class(): + transport = ConnectionServiceClient.get_transport_class() + available_transports = [ + transports.ConnectionServiceGrpcTransport, + transports.ConnectionServiceRestTransport, + ] + assert transport in available_transports + + transport = ConnectionServiceClient.get_transport_class("grpc") + assert transport == transports.ConnectionServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc"), + ( + ConnectionServiceAsyncClient, + transports.ConnectionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ConnectionServiceClient, transports.ConnectionServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + ConnectionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConnectionServiceClient), +) +@mock.patch.object( + ConnectionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConnectionServiceAsyncClient), +) +def test_connection_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ConnectionServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ConnectionServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ConnectionServiceClient, + transports.ConnectionServiceGrpcTransport, + "grpc", + "true", + ), + ( + ConnectionServiceAsyncClient, + transports.ConnectionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ConnectionServiceClient, + transports.ConnectionServiceGrpcTransport, + "grpc", + "false", + ), + ( + ConnectionServiceAsyncClient, + transports.ConnectionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + ConnectionServiceClient, + transports.ConnectionServiceRestTransport, + "rest", + "true", + ), + ( + ConnectionServiceClient, + transports.ConnectionServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + ConnectionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConnectionServiceClient), +) +@mock.patch.object( + ConnectionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConnectionServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_connection_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ConnectionServiceClient, ConnectionServiceAsyncClient] +) +@mock.patch.object( + ConnectionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConnectionServiceClient), +) +@mock.patch.object( + ConnectionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConnectionServiceAsyncClient), +) +def test_connection_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc"), + ( + ConnectionServiceAsyncClient, + transports.ConnectionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ConnectionServiceClient, transports.ConnectionServiceRestTransport, "rest"), + ], +) +def test_connection_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ConnectionServiceClient, + transports.ConnectionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ConnectionServiceAsyncClient, + transports.ConnectionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + ConnectionServiceClient, + transports.ConnectionServiceRestTransport, + "rest", + None, + ), + ], +) +def test_connection_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_connection_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.bigquery_connection_v1.services.connection_service.transports.ConnectionServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ConnectionServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ConnectionServiceClient, + transports.ConnectionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ConnectionServiceAsyncClient, + transports.ConnectionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_connection_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "bigqueryconnection.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + scopes=None, + default_host="bigqueryconnection.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcbc_connection.CreateConnectionRequest, + dict, + ], +) +def test_create_connection(request_type, transport: str = "grpc"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcbc_connection.Connection( + name="name_value", + friendly_name="friendly_name_value", + description="description_value", + creation_time=1379, + last_modified_time=1890, + has_credential=True, + ) + response = client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcbc_connection.CreateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcbc_connection.Connection) + assert response.name == "name_value" + assert response.friendly_name == "friendly_name_value" + assert response.description == "description_value" + assert response.creation_time == 1379 + assert response.last_modified_time == 1890 + assert response.has_credential is True + + +def test_create_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + client.create_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcbc_connection.CreateConnectionRequest() + + +@pytest.mark.asyncio +async def test_create_connection_async( + transport: str = "grpc_asyncio", + request_type=gcbc_connection.CreateConnectionRequest, +): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcbc_connection.Connection( + name="name_value", + friendly_name="friendly_name_value", + description="description_value", + creation_time=1379, + last_modified_time=1890, + has_credential=True, + ) + ) + response = await client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcbc_connection.CreateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcbc_connection.Connection) + assert response.name == "name_value" + assert response.friendly_name == "friendly_name_value" + assert response.description == "description_value" + assert response.creation_time == 1379 + assert response.last_modified_time == 1890 + assert response.has_credential is True + + +@pytest.mark.asyncio +async def test_create_connection_async_from_dict(): + await test_create_connection_async(request_type=dict) + + +def test_create_connection_field_headers(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcbc_connection.CreateConnectionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + call.return_value = gcbc_connection.Connection() + client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_connection_field_headers_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcbc_connection.CreateConnectionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcbc_connection.Connection() + ) + await client.create_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_connection_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcbc_connection.Connection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_connection( + parent="parent_value", + connection=gcbc_connection.Connection(name="name_value"), + connection_id="connection_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].connection + mock_val = gcbc_connection.Connection(name="name_value") + assert arg == mock_val + arg = args[0].connection_id + mock_val = "connection_id_value" + assert arg == mock_val + + +def test_create_connection_flattened_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_connection( + gcbc_connection.CreateConnectionRequest(), + parent="parent_value", + connection=gcbc_connection.Connection(name="name_value"), + connection_id="connection_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_connection_flattened_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcbc_connection.Connection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcbc_connection.Connection() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_connection( + parent="parent_value", + connection=gcbc_connection.Connection(name="name_value"), + connection_id="connection_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].connection + mock_val = gcbc_connection.Connection(name="name_value") + assert arg == mock_val + arg = args[0].connection_id + mock_val = "connection_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_connection_flattened_error_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_connection( + gcbc_connection.CreateConnectionRequest(), + parent="parent_value", + connection=gcbc_connection.Connection(name="name_value"), + connection_id="connection_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + connection.GetConnectionRequest, + dict, + ], +) +def test_get_connection(request_type, transport: str = "grpc"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = connection.Connection( + name="name_value", + friendly_name="friendly_name_value", + description="description_value", + creation_time=1379, + last_modified_time=1890, + has_credential=True, + ) + response = client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == connection.GetConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, connection.Connection) + assert response.name == "name_value" + assert response.friendly_name == "friendly_name_value" + assert response.description == "description_value" + assert response.creation_time == 1379 + assert response.last_modified_time == 1890 + assert response.has_credential is True + + +def test_get_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + client.get_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == connection.GetConnectionRequest() + + +@pytest.mark.asyncio +async def test_get_connection_async( + transport: str = "grpc_asyncio", request_type=connection.GetConnectionRequest +): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + connection.Connection( + name="name_value", + friendly_name="friendly_name_value", + description="description_value", + creation_time=1379, + last_modified_time=1890, + has_credential=True, + ) + ) + response = await client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == connection.GetConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, connection.Connection) + assert response.name == "name_value" + assert response.friendly_name == "friendly_name_value" + assert response.description == "description_value" + assert response.creation_time == 1379 + assert response.last_modified_time == 1890 + assert response.has_credential is True + + +@pytest.mark.asyncio +async def test_get_connection_async_from_dict(): + await test_get_connection_async(request_type=dict) + + +def test_get_connection_field_headers(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = connection.GetConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + call.return_value = connection.Connection() + client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_connection_field_headers_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = connection.GetConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + connection.Connection() + ) + await client.get_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_connection_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = connection.Connection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_connection_flattened_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection( + connection.GetConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_connection_flattened_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_connection), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = connection.Connection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + connection.Connection() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_connection_flattened_error_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_connection( + connection.GetConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + connection.ListConnectionsRequest, + dict, + ], +) +def test_list_connections(request_type, transport: str = "grpc"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = connection.ListConnectionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == connection.ListConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_connections_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + client.list_connections() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == connection.ListConnectionsRequest() + + +@pytest.mark.asyncio +async def test_list_connections_async( + transport: str = "grpc_asyncio", request_type=connection.ListConnectionsRequest +): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + connection.ListConnectionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == connection.ListConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_connections_async_from_dict(): + await test_list_connections_async(request_type=dict) + + +def test_list_connections_field_headers(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = connection.ListConnectionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + call.return_value = connection.ListConnectionsResponse() + client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_connections_field_headers_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = connection.ListConnectionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + connection.ListConnectionsResponse() + ) + await client.list_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_connections_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = connection.ListConnectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_connections( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_connections_flattened_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connections( + connection.ListConnectionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_connections_flattened_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = connection.ListConnectionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + connection.ListConnectionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_connections( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_connections_flattened_error_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_connections( + connection.ListConnectionsRequest(), + parent="parent_value", + ) + + +def test_list_connections_pager(transport_name: str = "grpc"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + connection.Connection(), + ], + next_page_token="abc", + ), + connection.ListConnectionsResponse( + connections=[], + next_page_token="def", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + ], + next_page_token="ghi", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_connections(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, connection.Connection) for i in results) + + +def test_list_connections_pages(transport_name: str = "grpc"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_connections), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + connection.Connection(), + ], + next_page_token="abc", + ), + connection.ListConnectionsResponse( + connections=[], + next_page_token="def", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + ], + next_page_token="ghi", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + ], + ), + RuntimeError, + ) + pages = list(client.list_connections(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_connections_async_pager(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + connection.Connection(), + ], + next_page_token="abc", + ), + connection.ListConnectionsResponse( + connections=[], + next_page_token="def", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + ], + next_page_token="ghi", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_connections( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, connection.Connection) for i in responses) + + +@pytest.mark.asyncio +async def test_list_connections_async_pages(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_connections), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + connection.Connection(), + ], + next_page_token="abc", + ), + connection.ListConnectionsResponse( + connections=[], + next_page_token="def", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + ], + next_page_token="ghi", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_connections(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gcbc_connection.UpdateConnectionRequest, + dict, + ], +) +def test_update_connection(request_type, transport: str = "grpc"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcbc_connection.Connection( + name="name_value", + friendly_name="friendly_name_value", + description="description_value", + creation_time=1379, + last_modified_time=1890, + has_credential=True, + ) + response = client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcbc_connection.UpdateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcbc_connection.Connection) + assert response.name == "name_value" + assert response.friendly_name == "friendly_name_value" + assert response.description == "description_value" + assert response.creation_time == 1379 + assert response.last_modified_time == 1890 + assert response.has_credential is True + + +def test_update_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + client.update_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcbc_connection.UpdateConnectionRequest() + + +@pytest.mark.asyncio +async def test_update_connection_async( + transport: str = "grpc_asyncio", + request_type=gcbc_connection.UpdateConnectionRequest, +): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcbc_connection.Connection( + name="name_value", + friendly_name="friendly_name_value", + description="description_value", + creation_time=1379, + last_modified_time=1890, + has_credential=True, + ) + ) + response = await client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcbc_connection.UpdateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcbc_connection.Connection) + assert response.name == "name_value" + assert response.friendly_name == "friendly_name_value" + assert response.description == "description_value" + assert response.creation_time == 1379 + assert response.last_modified_time == 1890 + assert response.has_credential is True + + +@pytest.mark.asyncio +async def test_update_connection_async_from_dict(): + await test_update_connection_async(request_type=dict) + + +def test_update_connection_field_headers(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcbc_connection.UpdateConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + call.return_value = gcbc_connection.Connection() + client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_connection_field_headers_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcbc_connection.UpdateConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcbc_connection.Connection() + ) + await client.update_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_update_connection_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcbc_connection.Connection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_connection( + name="name_value", + connection=gcbc_connection.Connection(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].connection + mock_val = gcbc_connection.Connection(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_connection_flattened_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_connection( + gcbc_connection.UpdateConnectionRequest(), + name="name_value", + connection=gcbc_connection.Connection(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_connection_flattened_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcbc_connection.Connection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcbc_connection.Connection() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_connection( + name="name_value", + connection=gcbc_connection.Connection(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].connection + mock_val = gcbc_connection.Connection(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_connection_flattened_error_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_connection( + gcbc_connection.UpdateConnectionRequest(), + name="name_value", + connection=gcbc_connection.Connection(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + connection.DeleteConnectionRequest, + dict, + ], +) +def test_delete_connection(request_type, transport: str = "grpc"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == connection.DeleteConnectionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + client.delete_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == connection.DeleteConnectionRequest() + + +@pytest.mark.asyncio +async def test_delete_connection_async( + transport: str = "grpc_asyncio", request_type=connection.DeleteConnectionRequest +): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == connection.DeleteConnectionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_connection_async_from_dict(): + await test_delete_connection_async(request_type=dict) + + +def test_delete_connection_field_headers(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = connection.DeleteConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + call.return_value = None + client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_connection_field_headers_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = connection.DeleteConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_connection_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_connection_flattened_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_connection( + connection.DeleteConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_connection_flattened_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_connection_flattened_error_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_connection( + connection.DeleteConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy(request_type, transport: str = "grpc"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict_foreign(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +def test_get_iam_policy_flattened_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy(request_type, transport: str = "grpc"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict_foreign(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +def test_set_iam_policy_flattened_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions(request_type, transport: str = "grpc"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async( + transport: str = "grpc_asyncio", + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict_foreign(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource="resource_value", + permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + arg = args[0].permissions + mock_val = ["permissions_value"] + assert arg == mock_val + + +def test_test_iam_permissions_flattened_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource="resource_value", + permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + arg = args[0].permissions + mock_val = ["permissions_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcbc_connection.CreateConnectionRequest, + dict, + ], +) +def test_create_connection_rest(request_type): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["connection"] = { + "name": "name_value", + "friendly_name": "friendly_name_value", + "description": "description_value", + "cloud_sql": { + "instance_id": "instance_id_value", + "database": "database_value", + "type_": 1, + "credential": {"username": "username_value", "password": "password_value"}, + "service_account_id": "service_account_id_value", + }, + "aws": { + "cross_account_role": { + "iam_role_id": "iam_role_id_value", + "iam_user_id": "iam_user_id_value", + "external_id": "external_id_value", + }, + "access_role": { + "iam_role_id": "iam_role_id_value", + "identity": "identity_value", + }, + }, + "azure": { + "application": "application_value", + "client_id": "client_id_value", + "object_id": "object_id_value", + "customer_tenant_id": "customer_tenant_id_value", + "redirect_uri": "redirect_uri_value", + "federated_application_client_id": "federated_application_client_id_value", + "identity": "identity_value", + }, + "cloud_spanner": { + "database": "database_value", + "use_parallelism": True, + "max_parallelism": 1595, + "use_serverless_analytics": True, + "use_data_boost": True, + "database_role": "database_role_value", + }, + "cloud_resource": {"service_account_id": "service_account_id_value"}, + "spark": { + "service_account_id": "service_account_id_value", + "metastore_service_config": { + "metastore_service": "metastore_service_value" + }, + "spark_history_server_config": { + "dataproc_cluster": "dataproc_cluster_value" + }, + }, + "salesforce_data_cloud": { + "instance_uri": "instance_uri_value", + "identity": "identity_value", + "tenant_id": "tenant_id_value", + }, + "creation_time": 1379, + "last_modified_time": 1890, + "has_credential": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcbc_connection.Connection( + name="name_value", + friendly_name="friendly_name_value", + description="description_value", + creation_time=1379, + last_modified_time=1890, + has_credential=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcbc_connection.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcbc_connection.Connection) + assert response.name == "name_value" + assert response.friendly_name == "friendly_name_value" + assert response.description == "description_value" + assert response.creation_time == 1379 + assert response.last_modified_time == 1890 + assert response.has_credential is True + + +def test_create_connection_rest_required_fields( + request_type=gcbc_connection.CreateConnectionRequest, +): + transport_class = transports.ConnectionServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("connection_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcbc_connection.Connection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcbc_connection.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_connection(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_connection_rest_unset_required_fields(): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_connection._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("connectionId",)) + & set( + ( + "parent", + "connection", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_connection_rest_interceptors(null_interceptor): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConnectionServiceRestInterceptor(), + ) + client = ConnectionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "post_create_connection" + ) as post, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "pre_create_connection" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcbc_connection.CreateConnectionRequest.pb( + gcbc_connection.CreateConnectionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcbc_connection.Connection.to_json( + gcbc_connection.Connection() + ) + + request = gcbc_connection.CreateConnectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcbc_connection.Connection() + + client.create_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_connection_rest_bad_request( + transport: str = "rest", request_type=gcbc_connection.CreateConnectionRequest +): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["connection"] = { + "name": "name_value", + "friendly_name": "friendly_name_value", + "description": "description_value", + "cloud_sql": { + "instance_id": "instance_id_value", + "database": "database_value", + "type_": 1, + "credential": {"username": "username_value", "password": "password_value"}, + "service_account_id": "service_account_id_value", + }, + "aws": { + "cross_account_role": { + "iam_role_id": "iam_role_id_value", + "iam_user_id": "iam_user_id_value", + "external_id": "external_id_value", + }, + "access_role": { + "iam_role_id": "iam_role_id_value", + "identity": "identity_value", + }, + }, + "azure": { + "application": "application_value", + "client_id": "client_id_value", + "object_id": "object_id_value", + "customer_tenant_id": "customer_tenant_id_value", + "redirect_uri": "redirect_uri_value", + "federated_application_client_id": "federated_application_client_id_value", + "identity": "identity_value", + }, + "cloud_spanner": { + "database": "database_value", + "use_parallelism": True, + "max_parallelism": 1595, + "use_serverless_analytics": True, + "use_data_boost": True, + "database_role": "database_role_value", + }, + "cloud_resource": {"service_account_id": "service_account_id_value"}, + "spark": { + "service_account_id": "service_account_id_value", + "metastore_service_config": { + "metastore_service": "metastore_service_value" + }, + "spark_history_server_config": { + "dataproc_cluster": "dataproc_cluster_value" + }, + }, + "salesforce_data_cloud": { + "instance_uri": "instance_uri_value", + "identity": "identity_value", + "tenant_id": "tenant_id_value", + }, + "creation_time": 1379, + "last_modified_time": 1890, + "has_credential": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_connection(request) + + +def test_create_connection_rest_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcbc_connection.Connection() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + connection=gcbc_connection.Connection(name="name_value"), + connection_id="connection_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcbc_connection.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/connections" + % client.transport._host, + args[1], + ) + + +def test_create_connection_rest_flattened_error(transport: str = "rest"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_connection( + gcbc_connection.CreateConnectionRequest(), + parent="parent_value", + connection=gcbc_connection.Connection(name="name_value"), + connection_id="connection_id_value", + ) + + +def test_create_connection_rest_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + connection.GetConnectionRequest, + dict, + ], +) +def test_get_connection_rest(request_type): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/connections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = connection.Connection( + name="name_value", + friendly_name="friendly_name_value", + description="description_value", + creation_time=1379, + last_modified_time=1890, + has_credential=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = connection.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, connection.Connection) + assert response.name == "name_value" + assert response.friendly_name == "friendly_name_value" + assert response.description == "description_value" + assert response.creation_time == 1379 + assert response.last_modified_time == 1890 + assert response.has_credential is True + + +def test_get_connection_rest_required_fields( + request_type=connection.GetConnectionRequest, +): + transport_class = transports.ConnectionServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = connection.Connection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = connection.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_connection(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_connection_rest_unset_required_fields(): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_connection_rest_interceptors(null_interceptor): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConnectionServiceRestInterceptor(), + ) + client = ConnectionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "post_get_connection" + ) as post, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "pre_get_connection" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = connection.GetConnectionRequest.pb( + connection.GetConnectionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = connection.Connection.to_json( + connection.Connection() + ) + + request = connection.GetConnectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = connection.Connection() + + client.get_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_connection_rest_bad_request( + transport: str = "rest", request_type=connection.GetConnectionRequest +): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/connections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_connection(request) + + +def test_get_connection_rest_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = connection.Connection() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/connections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = connection.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/connections/*}" + % client.transport._host, + args[1], + ) + + +def test_get_connection_rest_flattened_error(transport: str = "rest"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_connection( + connection.GetConnectionRequest(), + name="name_value", + ) + + +def test_get_connection_rest_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + connection.ListConnectionsRequest, + dict, + ], +) +def test_list_connections_rest(request_type): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = connection.ListConnectionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = connection.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_connections(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConnectionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_connections_rest_required_fields( + request_type=connection.ListConnectionsRequest, +): + transport_class = transports.ConnectionServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["page_size"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "pageSize" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_connections._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "pageSize" in jsonified_request + assert jsonified_request["pageSize"] == request_init["page_size"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["pageSize"] = 951 + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_connections._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "pageSize" in jsonified_request + assert jsonified_request["pageSize"] == 951 + + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = connection.ListConnectionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = connection.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_connections(request) + + expected_params = [ + ( + "pageSize", + str(0), + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_connections_rest_unset_required_fields(): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_connections._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set( + ( + "parent", + "pageSize", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_connections_rest_interceptors(null_interceptor): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConnectionServiceRestInterceptor(), + ) + client = ConnectionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "post_list_connections" + ) as post, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "pre_list_connections" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = connection.ListConnectionsRequest.pb( + connection.ListConnectionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = connection.ListConnectionsResponse.to_json( + connection.ListConnectionsResponse() + ) + + request = connection.ListConnectionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = connection.ListConnectionsResponse() + + client.list_connections( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_connections_rest_bad_request( + transport: str = "rest", request_type=connection.ListConnectionsRequest +): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_connections(request) + + +def test_list_connections_rest_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = connection.ListConnectionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = connection.ListConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_connections(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/connections" + % client.transport._host, + args[1], + ) + + +def test_list_connections_rest_flattened_error(transport: str = "rest"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_connections( + connection.ListConnectionsRequest(), + parent="parent_value", + ) + + +def test_list_connections_rest_pager(transport: str = "rest"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + connection.Connection(), + ], + next_page_token="abc", + ), + connection.ListConnectionsResponse( + connections=[], + next_page_token="def", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + ], + next_page_token="ghi", + ), + connection.ListConnectionsResponse( + connections=[ + connection.Connection(), + connection.Connection(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + connection.ListConnectionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_connections(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, connection.Connection) for i in results) + + pages = list(client.list_connections(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gcbc_connection.UpdateConnectionRequest, + dict, + ], +) +def test_update_connection_rest(request_type): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/connections/sample3"} + request_init["connection"] = { + "name": "name_value", + "friendly_name": "friendly_name_value", + "description": "description_value", + "cloud_sql": { + "instance_id": "instance_id_value", + "database": "database_value", + "type_": 1, + "credential": {"username": "username_value", "password": "password_value"}, + "service_account_id": "service_account_id_value", + }, + "aws": { + "cross_account_role": { + "iam_role_id": "iam_role_id_value", + "iam_user_id": "iam_user_id_value", + "external_id": "external_id_value", + }, + "access_role": { + "iam_role_id": "iam_role_id_value", + "identity": "identity_value", + }, + }, + "azure": { + "application": "application_value", + "client_id": "client_id_value", + "object_id": "object_id_value", + "customer_tenant_id": "customer_tenant_id_value", + "redirect_uri": "redirect_uri_value", + "federated_application_client_id": "federated_application_client_id_value", + "identity": "identity_value", + }, + "cloud_spanner": { + "database": "database_value", + "use_parallelism": True, + "max_parallelism": 1595, + "use_serverless_analytics": True, + "use_data_boost": True, + "database_role": "database_role_value", + }, + "cloud_resource": {"service_account_id": "service_account_id_value"}, + "spark": { + "service_account_id": "service_account_id_value", + "metastore_service_config": { + "metastore_service": "metastore_service_value" + }, + "spark_history_server_config": { + "dataproc_cluster": "dataproc_cluster_value" + }, + }, + "salesforce_data_cloud": { + "instance_uri": "instance_uri_value", + "identity": "identity_value", + "tenant_id": "tenant_id_value", + }, + "creation_time": 1379, + "last_modified_time": 1890, + "has_credential": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcbc_connection.Connection( + name="name_value", + friendly_name="friendly_name_value", + description="description_value", + creation_time=1379, + last_modified_time=1890, + has_credential=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcbc_connection.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcbc_connection.Connection) + assert response.name == "name_value" + assert response.friendly_name == "friendly_name_value" + assert response.description == "description_value" + assert response.creation_time == 1379 + assert response.last_modified_time == 1890 + assert response.has_credential is True + + +def test_update_connection_rest_required_fields( + request_type=gcbc_connection.UpdateConnectionRequest, +): + transport_class = transports.ConnectionServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_connection._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcbc_connection.Connection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcbc_connection.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_connection(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_connection_rest_unset_required_fields(): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_connection._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "name", + "connection", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_connection_rest_interceptors(null_interceptor): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConnectionServiceRestInterceptor(), + ) + client = ConnectionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "post_update_connection" + ) as post, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "pre_update_connection" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcbc_connection.UpdateConnectionRequest.pb( + gcbc_connection.UpdateConnectionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcbc_connection.Connection.to_json( + gcbc_connection.Connection() + ) + + request = gcbc_connection.UpdateConnectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcbc_connection.Connection() + + client.update_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_connection_rest_bad_request( + transport: str = "rest", request_type=gcbc_connection.UpdateConnectionRequest +): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/connections/sample3"} + request_init["connection"] = { + "name": "name_value", + "friendly_name": "friendly_name_value", + "description": "description_value", + "cloud_sql": { + "instance_id": "instance_id_value", + "database": "database_value", + "type_": 1, + "credential": {"username": "username_value", "password": "password_value"}, + "service_account_id": "service_account_id_value", + }, + "aws": { + "cross_account_role": { + "iam_role_id": "iam_role_id_value", + "iam_user_id": "iam_user_id_value", + "external_id": "external_id_value", + }, + "access_role": { + "iam_role_id": "iam_role_id_value", + "identity": "identity_value", + }, + }, + "azure": { + "application": "application_value", + "client_id": "client_id_value", + "object_id": "object_id_value", + "customer_tenant_id": "customer_tenant_id_value", + "redirect_uri": "redirect_uri_value", + "federated_application_client_id": "federated_application_client_id_value", + "identity": "identity_value", + }, + "cloud_spanner": { + "database": "database_value", + "use_parallelism": True, + "max_parallelism": 1595, + "use_serverless_analytics": True, + "use_data_boost": True, + "database_role": "database_role_value", + }, + "cloud_resource": {"service_account_id": "service_account_id_value"}, + "spark": { + "service_account_id": "service_account_id_value", + "metastore_service_config": { + "metastore_service": "metastore_service_value" + }, + "spark_history_server_config": { + "dataproc_cluster": "dataproc_cluster_value" + }, + }, + "salesforce_data_cloud": { + "instance_uri": "instance_uri_value", + "identity": "identity_value", + "tenant_id": "tenant_id_value", + }, + "creation_time": 1379, + "last_modified_time": 1890, + "has_credential": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_connection(request) + + +def test_update_connection_rest_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcbc_connection.Connection() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/connections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + connection=gcbc_connection.Connection(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcbc_connection.Connection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/connections/*}" + % client.transport._host, + args[1], + ) + + +def test_update_connection_rest_flattened_error(transport: str = "rest"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_connection( + gcbc_connection.UpdateConnectionRequest(), + name="name_value", + connection=gcbc_connection.Connection(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_connection_rest_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + connection.DeleteConnectionRequest, + dict, + ], +) +def test_delete_connection_rest(request_type): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/connections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_connection(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_connection_rest_required_fields( + request_type=connection.DeleteConnectionRequest, +): + transport_class = transports.ConnectionServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_connection(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_connection_rest_unset_required_fields(): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_connection_rest_interceptors(null_interceptor): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConnectionServiceRestInterceptor(), + ) + client = ConnectionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "pre_delete_connection" + ) as pre: + pre.assert_not_called() + pb_message = connection.DeleteConnectionRequest.pb( + connection.DeleteConnectionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = connection.DeleteConnectionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_connection_rest_bad_request( + transport: str = "rest", request_type=connection.DeleteConnectionRequest +): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/connections/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_connection(request) + + +def test_delete_connection_rest_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/connections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/connections/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_connection_rest_flattened_error(transport: str = "rest"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_connection( + connection.DeleteConnectionRequest(), + name="name_value", + ) + + +def test_delete_connection_rest_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "resource": "projects/sample1/locations/sample2/connections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + transport_class = transports.ConnectionServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConnectionServiceRestInterceptor(), + ) + client = ConnectionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "post_get_iam_policy" + ) as post, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "pre_get_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.GetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(policy_pb2.Policy()) + + request = iam_policy_pb2.GetIamPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + + client.get_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "resource": "projects/sample1/locations/sample2/connections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/connections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{resource=projects/*/locations/*/connections/*}:getIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_get_iam_policy_rest_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "resource": "projects/sample1/locations/sample2/connections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + transport_class = transports.ConnectionServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "policy", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConnectionServiceRestInterceptor(), + ) + client = ConnectionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "post_set_iam_policy" + ) as post, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "pre_set_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.SetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(policy_pb2.Policy()) + + request = iam_policy_pb2.SetIamPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + + client.set_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "resource": "projects/sample1/locations/sample2/connections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/connections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{resource=projects/*/locations/*/connections/*}:setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_set_iam_policy_rest_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "resource": "projects/sample1/locations/sample2/connections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_rest_required_fields( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + transport_class = transports.ConnectionServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request_init["permissions"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + jsonified_request["permissions"] = "permissions_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + assert "permissions" in jsonified_request + assert jsonified_request["permissions"] == "permissions_value" + + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "permissions", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.ConnectionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ConnectionServiceRestInterceptor(), + ) + client = ConnectionServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "post_test_iam_permissions" + ) as post, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "pre_test_iam_permissions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.TestIamPermissionsRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + request = iam_policy_pb2.TestIamPermissionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "resource": "projects/sample1/locations/sample2/connections/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/connections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + permissions=["permissions_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{resource=projects/*/locations/*/connections/*}:testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +def test_test_iam_permissions_rest_error(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ConnectionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ConnectionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConnectionServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ConnectionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConnectionServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConnectionServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ConnectionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConnectionServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConnectionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ConnectionServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConnectionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ConnectionServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConnectionServiceGrpcTransport, + transports.ConnectionServiceGrpcAsyncIOTransport, + transports.ConnectionServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ConnectionServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ConnectionServiceGrpcTransport, + ) + + +def test_connection_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ConnectionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_connection_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.bigquery_connection_v1.services.connection_service.transports.ConnectionServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ConnectionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_connection", + "get_connection", + "list_connections", + "update_connection", + "delete_connection", + "get_iam_policy", + "set_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_connection_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.bigquery_connection_v1.services.connection_service.transports.ConnectionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConnectionServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_connection_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.bigquery_connection_v1.services.connection_service.transports.ConnectionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConnectionServiceTransport() + adc.assert_called_once() + + +def test_connection_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ConnectionServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConnectionServiceGrpcTransport, + transports.ConnectionServiceGrpcAsyncIOTransport, + ], +) +def test_connection_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConnectionServiceGrpcTransport, + transports.ConnectionServiceGrpcAsyncIOTransport, + transports.ConnectionServiceRestTransport, + ], +) +def test_connection_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConnectionServiceGrpcTransport, grpc_helpers), + (transports.ConnectionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_connection_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "bigqueryconnection.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + scopes=["1", "2"], + default_host="bigqueryconnection.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConnectionServiceGrpcTransport, + transports.ConnectionServiceGrpcAsyncIOTransport, + ], +) +def test_connection_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_connection_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ConnectionServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_connection_service_host_no_port(transport_name): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="bigqueryconnection.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "bigqueryconnection.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigqueryconnection.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_connection_service_host_with_port(transport_name): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="bigqueryconnection.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "bigqueryconnection.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigqueryconnection.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_connection_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ConnectionServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ConnectionServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_connection._session + session2 = client2.transport.create_connection._session + assert session1 != session2 + session1 = client1.transport.get_connection._session + session2 = client2.transport.get_connection._session + assert session1 != session2 + session1 = client1.transport.list_connections._session + session2 = client2.transport.list_connections._session + assert session1 != session2 + session1 = client1.transport.update_connection._session + session2 = client2.transport.update_connection._session + assert session1 != session2 + session1 = client1.transport.delete_connection._session + session2 = client2.transport.delete_connection._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + + +def test_connection_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConnectionServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_connection_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConnectionServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConnectionServiceGrpcTransport, + transports.ConnectionServiceGrpcAsyncIOTransport, + ], +) +def test_connection_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConnectionServiceGrpcTransport, + transports.ConnectionServiceGrpcAsyncIOTransport, + ], +) +def test_connection_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cluster_path(): + project = "squid" + region = "clam" + cluster = "whelk" + expected = "projects/{project}/regions/{region}/clusters/{cluster}".format( + project=project, + region=region, + cluster=cluster, + ) + actual = ConnectionServiceClient.cluster_path(project, region, cluster) + assert expected == actual + + +def test_parse_cluster_path(): + expected = { + "project": "octopus", + "region": "oyster", + "cluster": "nudibranch", + } + path = ConnectionServiceClient.cluster_path(**expected) + + # Check that the path construction is reversible. + actual = ConnectionServiceClient.parse_cluster_path(path) + assert expected == actual + + +def test_connection_path(): + project = "cuttlefish" + location = "mussel" + connection = "winkle" + expected = ( + "projects/{project}/locations/{location}/connections/{connection}".format( + project=project, + location=location, + connection=connection, + ) + ) + actual = ConnectionServiceClient.connection_path(project, location, connection) + assert expected == actual + + +def test_parse_connection_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "connection": "abalone", + } + path = ConnectionServiceClient.connection_path(**expected) + + # Check that the path construction is reversible. + actual = ConnectionServiceClient.parse_connection_path(path) + assert expected == actual + + +def test_service_path(): + project = "squid" + location = "clam" + service = "whelk" + expected = "projects/{project}/locations/{location}/services/{service}".format( + project=project, + location=location, + service=service, + ) + actual = ConnectionServiceClient.service_path(project, location, service) + assert expected == actual + + +def test_parse_service_path(): + expected = { + "project": "octopus", + "location": "oyster", + "service": "nudibranch", + } + path = ConnectionServiceClient.service_path(**expected) + + # Check that the path construction is reversible. + actual = ConnectionServiceClient.parse_service_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ConnectionServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = ConnectionServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ConnectionServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ConnectionServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = ConnectionServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ConnectionServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ConnectionServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = ConnectionServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ConnectionServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = ConnectionServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = ConnectionServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ConnectionServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ConnectionServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = ConnectionServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ConnectionServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ConnectionServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ConnectionServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ConnectionServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ConnectionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ConnectionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport), + ( + ConnectionServiceAsyncClient, + transports.ConnectionServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-bigquery-datatransfer/.OwlBot.yaml b/packages/google-cloud-bigquery-datatransfer/.OwlBot.yaml new file mode 100644 index 000000000000..16b79a4da229 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.OwlBot.yaml @@ -0,0 +1,24 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/bigquery/datatransfer/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-bigquery-datatransfer/$1 + +begin-after-commit-hash: ad5147b2c3694044935301527f68b951c2e1f419 + diff --git a/packages/google-cloud-bigquery-datatransfer/.coveragerc b/packages/google-cloud-bigquery-datatransfer/.coveragerc new file mode 100644 index 000000000000..691a23ba5e55 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/bigquery_datatransfer/__init__.py + google/cloud/bigquery_datatransfer/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-bigquery-datatransfer/.flake8 b/packages/google-cloud-bigquery-datatransfer/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-bigquery-datatransfer/.gitignore b/packages/google-cloud-bigquery-datatransfer/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-bigquery-datatransfer/.repo-metadata.json b/packages/google-cloud-bigquery-datatransfer/.repo-metadata.json new file mode 100644 index 000000000000..632535b72c8b --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.repo-metadata.json @@ -0,0 +1,18 @@ +{ + "name": "bigquerydatatransfer", + "name_pretty": "BigQuery Data Transfer", + "product_documentation": "https://cloud.google.com/bigquery/transfer/", + "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerydatatransfer/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-bigquery-datatransfer", + "api_id": "bigquerydatatransfer.googleapis.com", + "requires_billing": true, + "default_version": "v1", + "codeowner_team": "@googleapis/api-bigquery", + "api_shortname": "bigquerydatatransfer", + "api_description": "allows users to transfer data from partner SaaS applications to Google BigQuery on a scheduled, managed basis." +} diff --git a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md new file mode 100644 index 000000000000..cdaa564994eb --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md @@ -0,0 +1,504 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history + +## [3.12.0](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.11.2...v3.12.0) (2023-08-01) + + +### Features + +* Add EncryptionConfiguration to TransferConfig ([f57343c](https://github.com/googleapis/python-bigquery-datatransfer/commit/f57343c31acd8ab44b4cc427e3c148ba93bb5368)) +* Add List type to Data source parameter. ([f57343c](https://github.com/googleapis/python-bigquery-datatransfer/commit/f57343c31acd8ab44b4cc427e3c148ba93bb5368)) + +## [3.11.2](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.11.1...v3.11.2) (2023-07-04) + + +### Bug Fixes + +* Add async context manager return types ([#470](https://github.com/googleapis/python-bigquery-datatransfer/issues/470)) ([551adeb](https://github.com/googleapis/python-bigquery-datatransfer/commit/551adeb66c823df68daed10bd55ad7a9a075a986)) + +## [3.11.1](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.11.0...v3.11.1) (2023-03-27) + + +### Documentation + +* Fix formatting of request arg in docstring ([#455](https://github.com/googleapis/python-bigquery-datatransfer/issues/455)) ([40a85d6](https://github.com/googleapis/python-bigquery-datatransfer/commit/40a85d6eb5e40ff77c054c76bbdfe8a5b821e1ef)) +* Removes deprecated sample ([#451](https://github.com/googleapis/python-bigquery-datatransfer/issues/451)) ([250acb4](https://github.com/googleapis/python-bigquery-datatransfer/commit/250acb4ec89a33c54174659d33fd7088f22f93db)) +* Replace deprecated snippet ([e2f04db](https://github.com/googleapis/python-bigquery-datatransfer/commit/e2f04db7553721610903500133553e67fe717642)) + +## [3.11.0](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.10.1...v3.11.0) (2023-02-28) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#439](https://github.com/googleapis/python-bigquery-datatransfer/issues/439)) ([b77f671](https://github.com/googleapis/python-bigquery-datatransfer/commit/b77f6716a47ba5bda4e50919e37b41fbc7cb3e20)) + + +### Documentation + +* Minor comment update ([#445](https://github.com/googleapis/python-bigquery-datatransfer/issues/445)) ([27b3271](https://github.com/googleapis/python-bigquery-datatransfer/commit/27b3271682fae002d29c65e057190c01f4f24ed4)) + +## [3.10.1](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.10.0...v3.10.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([a10a470](https://github.com/googleapis/python-bigquery-datatransfer/commit/a10a470dc53947e35202c0b296fe74e519c0abe2)) + + +### Documentation + +* Add documentation for enums ([a10a470](https://github.com/googleapis/python-bigquery-datatransfer/commit/a10a470dc53947e35202c0b296fe74e519c0abe2)) + +## [3.10.0](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.9.0...v3.10.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#421](https://github.com/googleapis/python-bigquery-datatransfer/issues/421)) ([d81fe69](https://github.com/googleapis/python-bigquery-datatransfer/commit/d81fe694862b9663e8ffd407c9bad7af6bc4fc01)) + +## [3.9.0](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.8.0...v3.9.0) (2023-01-04) + + +### Features + +* Add location methods ([#416](https://github.com/googleapis/python-bigquery-datatransfer/issues/416)) ([6538b29](https://github.com/googleapis/python-bigquery-datatransfer/commit/6538b29601e91d16e6a2095a256b4b18920d73b9)) + +## [3.8.0](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.7.3...v3.8.0) (2022-12-15) + + +### Features + +* Add support for `google.cloud.bigquery_datatransfer.__version__` ([c333464](https://github.com/googleapis/python-bigquery-datatransfer/commit/c3334647fd103e2f10466f1daecde51a89aae727)) +* Add typing to proto.Message based class attributes ([c333464](https://github.com/googleapis/python-bigquery-datatransfer/commit/c3334647fd103e2f10466f1daecde51a89aae727)) + + +### Bug Fixes + +* Add dict typing for client_options ([c333464](https://github.com/googleapis/python-bigquery-datatransfer/commit/c3334647fd103e2f10466f1daecde51a89aae727)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([20be9ee](https://github.com/googleapis/python-bigquery-datatransfer/commit/20be9ee41126f3c38ef9a6070e7a6fad5a7712d1)) +* Drop usage of pkg_resources ([20be9ee](https://github.com/googleapis/python-bigquery-datatransfer/commit/20be9ee41126f3c38ef9a6070e7a6fad5a7712d1)) +* Fix timeout default values ([20be9ee](https://github.com/googleapis/python-bigquery-datatransfer/commit/20be9ee41126f3c38ef9a6070e7a6fad5a7712d1)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([c333464](https://github.com/googleapis/python-bigquery-datatransfer/commit/c3334647fd103e2f10466f1daecde51a89aae727)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([20be9ee](https://github.com/googleapis/python-bigquery-datatransfer/commit/20be9ee41126f3c38ef9a6070e7a6fad5a7712d1)) + +## [3.7.3](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.7.2...v3.7.3) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#400](https://github.com/googleapis/python-bigquery-datatransfer/issues/400)) ([5e71c7c](https://github.com/googleapis/python-bigquery-datatransfer/commit/5e71c7c38ee937a4b61dc6a0f1ba79431a7d97b1)) + +## [3.7.2](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.7.1...v3.7.2) (2022-09-29) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#397](https://github.com/googleapis/python-bigquery-datatransfer/issues/397)) ([4b5288d](https://github.com/googleapis/python-bigquery-datatransfer/commit/4b5288df912cfcd7db36012f0a34be9c150a5419)) + +## [3.7.1](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.7.0...v3.7.1) (2022-08-12) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#375](https://github.com/googleapis/python-bigquery-datatransfer/issues/375)) ([4bbf741](https://github.com/googleapis/python-bigquery-datatransfer/commit/4bbf741b77c3210e7065daeb0eb56625ae60a488)) +* **deps:** require proto-plus >= 1.22.0 ([4bbf741](https://github.com/googleapis/python-bigquery-datatransfer/commit/4bbf741b77c3210e7065daeb0eb56625ae60a488)) + +## [3.7.0](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.6.2...v3.7.0) (2022-07-16) + + +### Features + +* add audience parameter ([f756c16](https://github.com/googleapis/python-bigquery-datatransfer/commit/f756c16d8bff3246e77d74f03ffc7e95ce9d1e77)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#362](https://github.com/googleapis/python-bigquery-datatransfer/issues/362)) ([f756c16](https://github.com/googleapis/python-bigquery-datatransfer/commit/f756c16d8bff3246e77d74f03ffc7e95ce9d1e77)) +* require python 3.7+ ([#364](https://github.com/googleapis/python-bigquery-datatransfer/issues/364)) ([106aa92](https://github.com/googleapis/python-bigquery-datatransfer/commit/106aa926b22f4a5dd64c9e0ecf17317727ab8735)) + +## [3.6.2](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.6.1...v3.6.2) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#349](https://github.com/googleapis/python-bigquery-datatransfer/issues/349)) ([6b03c4b](https://github.com/googleapis/python-bigquery-datatransfer/commit/6b03c4b44b391f4cb9c47cebe40716db66a91ca1)) + + +### Documentation + +* fix changelog header to consistent size ([#350](https://github.com/googleapis/python-bigquery-datatransfer/issues/350)) ([c18147a](https://github.com/googleapis/python-bigquery-datatransfer/commit/c18147ab9d3713736ccdf7f327f1913c9be1e994)) + +## [3.6.1](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.6.0...v3.6.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#295](https://github.com/googleapis/python-bigquery-datatransfer/issues/295)) ([61d89b1](https://github.com/googleapis/python-bigquery-datatransfer/commit/61d89b10466b6e4792d87b5274c7e8ca8a03f8ab)) + +## [3.6.0](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.5.0...v3.6.0) (2022-02-14) + + +### Features + +* add api key support ([6bd57bd](https://github.com/googleapis/python-bigquery-datatransfer/commit/6bd57bd2fb0a8fbf8d4507045a1dad5abebb7cbb)) +* add owner email to TransferConfig message ([#279](https://github.com/googleapis/python-bigquery-datatransfer/issues/279)) ([6bd57bd](https://github.com/googleapis/python-bigquery-datatransfer/commit/6bd57bd2fb0a8fbf8d4507045a1dad5abebb7cbb)) +* allow customer to enroll a datasource programmatically ([6bd57bd](https://github.com/googleapis/python-bigquery-datatransfer/commit/6bd57bd2fb0a8fbf8d4507045a1dad5abebb7cbb)) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([6bd57bd](https://github.com/googleapis/python-bigquery-datatransfer/commit/6bd57bd2fb0a8fbf8d4507045a1dad5abebb7cbb)) + + +### Documentation + +* add generated code samples ([#283](https://github.com/googleapis/python-bigquery-datatransfer/issues/283)) ([939d4c3](https://github.com/googleapis/python-bigquery-datatransfer/commit/939d4c3d507a0c38daf6f8568c64f7c44a7dd4f3)) +* add generated snippets ([939d4c3](https://github.com/googleapis/python-bigquery-datatransfer/commit/939d4c3d507a0c38daf6f8568c64f7c44a7dd4f3)) +* improvements to various message and field descriptions ([6bd57bd](https://github.com/googleapis/python-bigquery-datatransfer/commit/6bd57bd2fb0a8fbf8d4507045a1dad5abebb7cbb)) +* preserve hyperlinks with hyphens ([6bd57bd](https://github.com/googleapis/python-bigquery-datatransfer/commit/6bd57bd2fb0a8fbf8d4507045a1dad5abebb7cbb)) + +## [3.5.0](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.4.1...v3.5.0) (2022-01-14) + + +### Features + +* add support for Python 3.9 / 3.10 ([#253](https://github.com/googleapis/python-bigquery-datatransfer/issues/253)) ([5e02bd1](https://github.com/googleapis/python-bigquery-datatransfer/commit/5e02bd166149b4ef7ee0f295fdcb6a3570ec2f58)) + +## [3.4.1](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.4.0...v3.4.1) (2021-11-01) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([2a7e0db](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/2a7e0dba3714d1664d9c67518040ccf6b51eda83)) +* **deps:** require google-api-core >= 1.28.0 ([2a7e0db](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/2a7e0dba3714d1664d9c67518040ccf6b51eda83)) + + +### Documentation + +* list oneofs in docstring ([2a7e0db](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/2a7e0dba3714d1664d9c67518040ccf6b51eda83)) + +## [3.4.0](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.3.4...v3.4.0) (2021-10-08) + + +### Features + +* add context manager support in client ([#234](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/234)) ([cba3dad](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/cba3dad55db8a00f95503b9c94d4ca21ed462a4f)) + +## [3.3.4](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.3.3...v3.3.4) (2021-10-04) + + +### Bug Fixes + +* improper types in pagers generation ([702a848](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/702a84864e5395cf7a7b8f2aeed93d2109414f7c)) + +## [3.3.3](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.3.2...v3.3.3) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([fe024db](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/fe024dbcc3075fd779358d434f37ec3ee3a0adf9)) + +## [3.3.2](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.3.1...v3.3.2) (2021-08-27) + + +### Documentation + +* Improvements to various message and field descriptions ([#202](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/202)) ([9684723](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/9684723aed8c743d665ccac2b338770dd09fc94a)) + +## [3.3.1](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.3.0...v3.3.1) (2021-07-26) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#180](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/180)) ([b0e9533](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/b0e953347b43dd2252cf907bfaa8b6fbb16379b6)) +* enable self signed jwt for grpc ([#185](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/185)) ([3dd3bdd](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/3dd3bdd9074297803eced776e0ab12491f91da3b)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#181](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/181)) ([5bc909c](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/5bc909c606fdf7cc037e1f014a997272f6284c65)) + + +### Miscellaneous Chores + +* release as 3.3.1 ([#186](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/186)) ([7c1a7b9](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/7c1a7b98fcb08dbe081c8c645d079a6586fa7b7b)) + +## [3.3.0](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.2.0...v3.3.0) (2021-07-12) + + +### Features + +* add always_use_jwt_access ([#171](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/171)) ([cd4494f](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/cd4494f0dc7304469e7d4a0ed6e13d716b3cacbf)) + + +### Documentation + +* add sample to include run notification ([#173](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/173)) ([ea018c9](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/ea018c9f4a1f9c360dbe9f08650250ea8c505f29)) + +## [3.2.0](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.1.1...v3.2.0) (2021-06-22) + + +### Features + +* support self-signed JWT flow for service accounts ([046c368](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/046c368cf5a75a210b0ecc7e6e1eee9bcd907669)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([046c368](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/046c368cf5a75a210b0ecc7e6e1eee9bcd907669)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/1127)) ([#164](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/164)) ([2741e4f](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/2741e4fb1d9074494872fafcec96d870b14b671d)), closes [#1126](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/1126) + +## [3.1.1](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.1.0...v3.1.1) (2021-04-07) + + +### Bug Fixes + +* require proto-plus>=1.15.0 ([91910f1](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/91910f1ea01c5324fa63a7d85a034d08aeaae3f9)) +* use correct retry deadline ([#121](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/121)) ([91910f1](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/91910f1ea01c5324fa63a7d85a034d08aeaae3f9)) + +## [3.1.0](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.0.1...v3.1.0) (2021-03-22) + + +### Features + +* add `client_cert_source_for_mtls` parameter to grpc transport ([#114](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/114)) ([9c7ed28](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/9c7ed285203cd44917911e78c2c313d3ad50afda)) + +## [3.0.1](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v3.0.0...v3.0.1) (2021-03-03) + + +### Bug Fixes + +* remove recv msg limit, add enums to `types` ([#84](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/84)) ([3e2bbef](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/3e2bbef292ddda6a736be397be4e5a0fb213eeff)) + + +### Documentation + +* add sample for dataset copy ([#76](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/76)) ([f6d2c5b](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/f6d2c5b8f3c75426881dfce90ab713535416950e)) +* add scheduled query samples ([#83](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/83)) ([cd51970](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/cd519709228cda3bbcf2fd978d37ccd04ef27c82)) +* ensure minimum width for 'Parameters' / 'Returns' column ([#95](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/95)) ([5c8d7c1](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/5c8d7c1e860d1c50d892bfabc7ec936aaa40e714)) +* **python:** document adding Python 3.9 support, dropping 3.5 support ([#89](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/89)) ([dd84592](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/dd8459291a3ac0f98606b61ae566cb264ce96825)), closes [#787](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/787) +* remove out-of-date sample from README ([#80](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/80)) ([af0406e](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/af0406eedac1dc8c663b5c8f67f56255caeea2fa)) +* remove redundant samples ([#86](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/86)) ([093e407](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/093e407c60b117a00d2cdf8d225f22d61bc221c4)) +* update contributing guide to Python 3.8 ([#105](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/105)) ([678c335](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/678c3355e1b2e8525005ad337048d60a51400fc0)) + +## [3.0.0](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v2.1.0...v3.0.0) (2020-12-09) + + +### ⚠ BREAKING CHANGES + +* type is renamed to type_ to avoid conflict with built-in functions (introduced in googleapis/gapic-generator-python#595) + +### Features + +* add common resource path helpers ([#69](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/69)) ([e0bcedb](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/e0bcedb58109e38a58584d5b3087f03e1fa10835)) + + +### Bug Fixes + +* avoid collision with built-in functions by renaming type property to type_ ([#53](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/53)) ([b954411](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/b95441140f7c86dd3e833aef0532badd6280ef48)), closes [/github.com/googleapis/python-talent/blob/ef045e8eb348db36d7a2a611e6f26b11530d273b/samples/snippets/noxfile_config.py#L27-L32](https://www.github.com/googleapis//github.com/googleapis/python-talent/blob/ef045e8eb348db36d7a2a611e6f26b11530d273b/samples/snippets/noxfile_config.py/issues/L27-L32) + + +### Documentation + +* update intersphinx links ([#78](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/78)) ([a78ba39](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/a78ba39bf1507cbc9e2a51fe4553d602da7f7601)) + +## 2.1.0 + +09-29-2020 09:34 PDT + + +### Implementation Changes + +Adjust gapic namespace to `google.cloud.bigquery_datatransfer` ([#61](https://github.com/googleapis/python-bigquery-datatransfer/pull/61)) + +## 2.0.0 + +09-21-2020 08:36 PDT + + +### Implementation Changes + +- Transition the library to microgenerator. ([#56](https://github.com/googleapis/python-bigquery-datatransfer/pull/56)) + This is a **breaking change** that introduces several **method signature changes** and **drops support + for Python 2.7 and 3.5**. See [migration guide](https://googleapis.dev/python/bigquerydatatransfer/latest/UPGRADING.html) + for more info. + +## 1.1.1 + +09-16-2020 11:12 PDT + + +### Implementation Changes + +- Change default retry policies timeouts (via synth). ([#48](https://github.com/googleapis/python-bigquery-datatransfer/pull/48)) + + +### Documentation + +- Add sample for updating transfer config. ([#46](https://github.com/googleapis/python-bigquery-datatransfer/pull/46)) +- Add dataset ID in function call in samples. ([#44](https://github.com/googleapis/python-bigquery-datatransfer/pull/44)) +- Move code samples from the common samples repo to this library. ([#38](https://github.com/googleapis/python-bigquery-datatransfer/pull/38)) + + +### Internal / Testing Changes + +- Update CODEOWNERS for samples and library code. ([#47](https://github.com/googleapis/python-bigquery-datatransfer/pull/47)) + +## [1.1.0](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v1.0.0...v1.1.0) (2020-06-20) + + +### Features + +* add first party oauth ([#22](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/22)) ([a806b8b](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/a806b8b3d0e3213f1488563f25504a27af9a9cda)) + +## [1.0.0](https://www.github.com/googleapis/python-bigquery-datatransfer/compare/v0.4.1...v1.0.0) (2020-03-04) + + +### Features + +* **bigquerydatatransfer:** add `service_account_name` option to transfer configs ([#10013](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/10013)) ([9ca090a](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/9ca090af431092bc4286fa4443dd0dc0141f6de6)) +* **bigquerydatatransfer:** undeprecate resource name helper methods; add py2 deprecation warning; bump copyright year to 2020 (via synth) ([#10226](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/10226)) ([c0f9cc3](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/c0f9cc398e5558002c79a875809bb6cd1a98a8a4)) +* set release_status to production/stable ([#15](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/15)) ([a9c1160](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/a9c1160475dbc327e8cc5da3b5aee3ceaa618bd3)) + + +### Bug Fixes + +* **bigquery_datatransfer:** deprecate resource name helper methods (via synth) ([#9829](https://www.github.com/googleapis/python-bigquery-datatransfer/issues/9829)) ([fc06995](https://www.github.com/googleapis/python-bigquery-datatransfer/commit/fc0699549479cc3e34e217f9e588f5128107ba89)) + +## 0.4.1 + +07-31-2019 17:50 PDT + + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Fix links to BigQuery Datatransfer documentation. ([#8859](https://github.com/googleapis/google-cloud-python/pull/8859)) +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) + +### Internal / Testing Changes +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + +## 0.4.0 + +07-16-2019 17:11 PDT + +### Implementation Changes + +- Retry DEADLINE_EXCEEDED (via synth). ([#7920](https://github.com/googleapis/google-cloud-python/pull/7920)) +- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) + +### New Features + +- Add `DatasourceServiceClient` (via synth). ([#8630](https://github.com/googleapis/google-cloud-python/pull/8630)) +- Add `start_manual_transfer_runs` method (via synth). ([#8630](https://github.com/googleapis/google-cloud-python/pull/8630)) +- Add `client_info`/`version_info` support (via synth). ([#8630](https://github.com/googleapis/google-cloud-python/pull/8630)) +- Allow passing kwargs to `create_channel` (via synth). ([#8630](https://github.com/googleapis/google-cloud-python/pull/8630)) +- Add path helpers (via synth). ([#8630](https://github.com/googleapis/google-cloud-python/pull/8630)) +- Add protos as an artifact to library ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) + +### Documentation + +- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) +- Adjust indentation on scheduled query sample. ([#8493](https://github.com/googleapis/google-cloud-python/pull/8493)) +- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) +- Add sample to schedule query with BQ DTS. ([#7703](https://github.com/googleapis/google-cloud-python/pull/7703)) +- Add nox session `docs` (via synth). ([#7765](https://github.com/googleapis/google-cloud-python/pull/7765)) +- Updated client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) +- Pick up stub docstring fix in GAPIC generator. ([#6965](https://github.com/googleapis/google-cloud-python/pull/6965)) + +### Internal / Testing Changes + +- Blacken noxfile.py, setup.py (via synth). ([#8116](https://github.com/googleapis/google-cloud-python/pull/8116)) +- Add empty lines (via synth). ([#8050](https://github.com/googleapis/google-cloud-python/pull/8050)) +- Remove unused message exports (via synth). ([#7263](https://github.com/googleapis/google-cloud-python/pull/7263)) +- Protoc-generated serialization update. ([#7075](https://github.com/googleapis/google-cloud-python/pull/7075)) + +## 0.3.0 + +12-17-2018 17:59 PST + + +### Implementation Changes +- Pick up enum fixes in the GAPIC generator. ([#6608](https://github.com/googleapis/google-cloud-python/pull/6608)) +- Pick up fixes in GAPIC generator. ([#6491](https://github.com/googleapis/google-cloud-python/pull/6491)) +- Fix `client_info` bug, update docstrings. ([#6405](https://github.com/googleapis/google-cloud-python/pull/6405)) +- Re-generate library using bigquery_datatransfer/synth.py ([#5973](https://github.com/googleapis/google-cloud-python/pull/5973)) +- Fix stray, lint-breaking blank lines from autosynth. ([#5960](https://github.com/googleapis/google-cloud-python/pull/5960)) +- Re-generate library using `bigquery_datatransfer/synth.py`. ([#5947](https://github.com/googleapis/google-cloud-python/pull/5947)) + +### Dependencies +- Bump minimum api_core version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391)) + +### Documentation +- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) +- Fix GAX fossils ([#6264](https://github.com/googleapis/google-cloud-python/pull/6264)) +- Normalize use of support level badges ([#6159](https://github.com/googleapis/google-cloud-python/pull/6159)) +- Harmonize / DRY 'README.rst' / 'docs/index.rst'. ([#6013](https://github.com/googleapis/google-cloud-python/pull/6013)) + +### Internal / Testing Changes +- Update noxfile. +- Blacken all gen'd libs ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) +- Omit local deps ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) +- Run black at end of synth.py ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) +- Unblack bigquery gapic and protos. +- Run Black on Generated libraries ([#6666](https://github.com/googleapis/google-cloud-python/pull/6666)) +- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) +- Add synth metadata. ([#6562](https://github.com/googleapis/google-cloud-python/pull/6562)) +- Use new Nox ([#6175](https://github.com/googleapis/google-cloud-python/pull/6175)) + +## 0.2.0 + +### Implementation Changes +- Regenerate bigquery-datatransfer (#5793) + +### Internal / Testing Changes +- Avoid overwriting '__module__' of messages from shared modules. (#5364) +- Modify system tests to use prerelease versions of grpcio (#5304) +- Add Test runs for Python 3.7 and remove 3.4 (#5295) +- Fix bad trove classifier +- Rename releases to changelog and include from CHANGELOG.md (#5191) + +## 0.1.1 + +### Dependencies + +- Update dependency range for api-core to include v1.0.0 releases (#4944) + +### Documentation + +- Fix package name in readme (#4670) +- BigQueryDataTransfer: update 404 link for API documentation (#4672) +- Replacing references to `stable/` docs with `latest/`. (#4638) + +### Testing and internal changes + +- Re-enable lint for tests, remove usage of pylint (#4921) +- Normalize all setup.py files (#4909) +- Update index.rst (#4816) +- nox unittest updates (#4646) + +## 0.1.0 + +[![release level](https://img.shields.io/badge/release%20level-alpha-orange.svg?style=flat)](https://cloud.google.com/terms/launch-stages) + +The BigQuery Data Transfer Service automates data movement from SaaS +applications to Google BigQuery on a scheduled, managed basis. Your analytics +team can lay the foundation for a data warehouse without writing a single line +of code. BigQuery Data Transfer Service initially supports Google application +sources like Adwords, DoubleClick Campaign Manager, DoubleClick for Publishers +and YouTube. + +PyPI: https://pypi.org/project/google-cloud-bigquery-datatransfer/0.1.0/ diff --git a/packages/google-cloud-bigquery-datatransfer/CODE_OF_CONDUCT.md b/packages/google-cloud-bigquery-datatransfer/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/CONTRIBUTING.rst b/packages/google-cloud-bigquery-datatransfer/CONTRIBUTING.rst new file mode 100644 index 000000000000..f4a52ae00443 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.11 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-bigquery-datatransfer + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-bigquery-datatransfer/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-bigquery-datatransfer/LICENSE b/packages/google-cloud-bigquery-datatransfer/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-bigquery-datatransfer/MANIFEST.in b/packages/google-cloud-bigquery-datatransfer/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-bigquery-datatransfer/README.rst b/packages/google-cloud-bigquery-datatransfer/README.rst new file mode 100644 index 000000000000..81df75c28d6c --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/README.rst @@ -0,0 +1,108 @@ +Python Client for BigQuery Data Transfer +======================================== + +|stable| |pypi| |versions| + +`BigQuery Data Transfer`_: allows users to transfer data from partner SaaS applications to Google BigQuery on a scheduled, managed basis. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-datatransfer.svg + :target: https://pypi.org/project/google-cloud-bigquery-datatransfer/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery-datatransfer.svg + :target: https://pypi.org/project/google-cloud-bigquery-datatransfer/ +.. _BigQuery Data Transfer: https://cloud.google.com/bigquery/transfer/ +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/bigquerydatatransfer/latest +.. _Product Documentation: https://cloud.google.com/bigquery/transfer/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the BigQuery Data Transfer.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the BigQuery Data Transfer.: https://cloud.google.com/bigquery/transfer/ +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-bigquery-datatransfer + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-bigquery-datatransfer + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for BigQuery Data Transfer + to see other available methods on the client. +- Read the `BigQuery Data Transfer Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _BigQuery Data Transfer Product documentation: https://cloud.google.com/bigquery/transfer/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-bigquery-datatransfer/SECURITY.md b/packages/google-cloud-bigquery-datatransfer/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-bigquery-datatransfer/docs/CHANGELOG.md b/packages/google-cloud-bigquery-datatransfer/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/docs/README.rst b/packages/google-cloud-bigquery-datatransfer/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/docs/_static/custom.css b/packages/google-cloud-bigquery-datatransfer/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-bigquery-datatransfer/docs/_templates/layout.html b/packages/google-cloud-bigquery-datatransfer/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-bigquery-datatransfer/docs/bigquery_datatransfer_v1/data_transfer_service.rst b/packages/google-cloud-bigquery-datatransfer/docs/bigquery_datatransfer_v1/data_transfer_service.rst new file mode 100644 index 000000000000..480f43ed3d30 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/docs/bigquery_datatransfer_v1/data_transfer_service.rst @@ -0,0 +1,10 @@ +DataTransferService +------------------------------------- + +.. automodule:: google.cloud.bigquery_datatransfer_v1.services.data_transfer_service + :members: + :inherited-members: + +.. automodule:: google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-bigquery-datatransfer/docs/bigquery_datatransfer_v1/services.rst b/packages/google-cloud-bigquery-datatransfer/docs/bigquery_datatransfer_v1/services.rst new file mode 100644 index 000000000000..37a71a43a4ea --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/docs/bigquery_datatransfer_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Bigquery Datatransfer v1 API +====================================================== +.. toctree:: + :maxdepth: 2 + + data_transfer_service diff --git a/packages/google-cloud-bigquery-datatransfer/docs/bigquery_datatransfer_v1/types.rst b/packages/google-cloud-bigquery-datatransfer/docs/bigquery_datatransfer_v1/types.rst new file mode 100644 index 000000000000..ccda83a5d7b2 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/docs/bigquery_datatransfer_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Bigquery Datatransfer v1 API +=================================================== + +.. automodule:: google.cloud.bigquery_datatransfer_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-bigquery-datatransfer/docs/conf.py b/packages/google-cloud-bigquery-datatransfer/docs/conf.py new file mode 100644 index 000000000000..f6c1697c58d7 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-bigquery-datatransfer documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-bigquery-datatransfer" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-bigquery-datatransfer", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-bigquery-datatransfer-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-bigquery-datatransfer.tex", + "google-cloud-bigquery-datatransfer Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-bigquery-datatransfer", + "google-cloud-bigquery-datatransfer Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-bigquery-datatransfer", + "google-cloud-bigquery-datatransfer Documentation", + author, + "google-cloud-bigquery-datatransfer", + "google-cloud-bigquery-datatransfer Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-bigquery-datatransfer/docs/index.rst b/packages/google-cloud-bigquery-datatransfer/docs/index.rst new file mode 100644 index 000000000000..f9b6a70c2db9 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + bigquery_datatransfer_v1/services + bigquery_datatransfer_v1/types + + +Changelog +--------- + +For a list of all ``google-cloud-bigquery-datatransfer`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-bigquery-datatransfer/docs/multiprocessing.rst b/packages/google-cloud-bigquery-datatransfer/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py new file mode 100644 index 000000000000..709e170fd697 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/__init__.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.bigquery_datatransfer import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.async_client import ( + DataTransferServiceAsyncClient, +) +from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.client import ( + DataTransferServiceClient, +) +from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ( + CheckValidCredsRequest, + CheckValidCredsResponse, + CreateTransferConfigRequest, + DataSource, + DataSourceParameter, + DeleteTransferConfigRequest, + DeleteTransferRunRequest, + EnrollDataSourcesRequest, + GetDataSourceRequest, + GetTransferConfigRequest, + GetTransferRunRequest, + ListDataSourcesRequest, + ListDataSourcesResponse, + ListTransferConfigsRequest, + ListTransferConfigsResponse, + ListTransferLogsRequest, + ListTransferLogsResponse, + ListTransferRunsRequest, + ListTransferRunsResponse, + ScheduleTransferRunsRequest, + ScheduleTransferRunsResponse, + StartManualTransferRunsRequest, + StartManualTransferRunsResponse, + UpdateTransferConfigRequest, +) +from google.cloud.bigquery_datatransfer_v1.types.transfer import ( + EmailPreferences, + EncryptionConfiguration, + ScheduleOptions, + TransferConfig, + TransferMessage, + TransferRun, + TransferState, + TransferType, + UserInfo, +) + +__all__ = ( + "DataTransferServiceClient", + "DataTransferServiceAsyncClient", + "CheckValidCredsRequest", + "CheckValidCredsResponse", + "CreateTransferConfigRequest", + "DataSource", + "DataSourceParameter", + "DeleteTransferConfigRequest", + "DeleteTransferRunRequest", + "EnrollDataSourcesRequest", + "GetDataSourceRequest", + "GetTransferConfigRequest", + "GetTransferRunRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "ListTransferConfigsRequest", + "ListTransferConfigsResponse", + "ListTransferLogsRequest", + "ListTransferLogsResponse", + "ListTransferRunsRequest", + "ListTransferRunsResponse", + "ScheduleTransferRunsRequest", + "ScheduleTransferRunsResponse", + "StartManualTransferRunsRequest", + "StartManualTransferRunsResponse", + "UpdateTransferConfigRequest", + "EmailPreferences", + "EncryptionConfiguration", + "ScheduleOptions", + "TransferConfig", + "TransferMessage", + "TransferRun", + "UserInfo", + "TransferState", + "TransferType", +) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py new file mode 100644 index 000000000000..b5a6e376680a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "3.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/py.typed b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/py.typed new file mode 100644 index 000000000000..1bd9d383cee6 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-datatransfer package uses inline types. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py new file mode 100644 index 000000000000..79c9ec369344 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.bigquery_datatransfer_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.data_transfer_service import ( + DataTransferServiceAsyncClient, + DataTransferServiceClient, +) +from .types.datatransfer import ( + CheckValidCredsRequest, + CheckValidCredsResponse, + CreateTransferConfigRequest, + DataSource, + DataSourceParameter, + DeleteTransferConfigRequest, + DeleteTransferRunRequest, + EnrollDataSourcesRequest, + GetDataSourceRequest, + GetTransferConfigRequest, + GetTransferRunRequest, + ListDataSourcesRequest, + ListDataSourcesResponse, + ListTransferConfigsRequest, + ListTransferConfigsResponse, + ListTransferLogsRequest, + ListTransferLogsResponse, + ListTransferRunsRequest, + ListTransferRunsResponse, + ScheduleTransferRunsRequest, + ScheduleTransferRunsResponse, + StartManualTransferRunsRequest, + StartManualTransferRunsResponse, + UpdateTransferConfigRequest, +) +from .types.transfer import ( + EmailPreferences, + EncryptionConfiguration, + ScheduleOptions, + TransferConfig, + TransferMessage, + TransferRun, + TransferState, + TransferType, + UserInfo, +) + +__all__ = ( + "DataTransferServiceAsyncClient", + "CheckValidCredsRequest", + "CheckValidCredsResponse", + "CreateTransferConfigRequest", + "DataSource", + "DataSourceParameter", + "DataTransferServiceClient", + "DeleteTransferConfigRequest", + "DeleteTransferRunRequest", + "EmailPreferences", + "EncryptionConfiguration", + "EnrollDataSourcesRequest", + "GetDataSourceRequest", + "GetTransferConfigRequest", + "GetTransferRunRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "ListTransferConfigsRequest", + "ListTransferConfigsResponse", + "ListTransferLogsRequest", + "ListTransferLogsResponse", + "ListTransferRunsRequest", + "ListTransferRunsResponse", + "ScheduleOptions", + "ScheduleTransferRunsRequest", + "ScheduleTransferRunsResponse", + "StartManualTransferRunsRequest", + "StartManualTransferRunsResponse", + "TransferConfig", + "TransferMessage", + "TransferRun", + "TransferState", + "TransferType", + "UpdateTransferConfigRequest", + "UserInfo", +) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json new file mode 100644 index 000000000000..c349f4cf3adf --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json @@ -0,0 +1,253 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.bigquery_datatransfer_v1", + "protoPackage": "google.cloud.bigquery.datatransfer.v1", + "schema": "1.0", + "services": { + "DataTransferService": { + "clients": { + "grpc": { + "libraryClient": "DataTransferServiceClient", + "rpcs": { + "CheckValidCreds": { + "methods": [ + "check_valid_creds" + ] + }, + "CreateTransferConfig": { + "methods": [ + "create_transfer_config" + ] + }, + "DeleteTransferConfig": { + "methods": [ + "delete_transfer_config" + ] + }, + "DeleteTransferRun": { + "methods": [ + "delete_transfer_run" + ] + }, + "EnrollDataSources": { + "methods": [ + "enroll_data_sources" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, + "GetTransferConfig": { + "methods": [ + "get_transfer_config" + ] + }, + "GetTransferRun": { + "methods": [ + "get_transfer_run" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, + "ListTransferConfigs": { + "methods": [ + "list_transfer_configs" + ] + }, + "ListTransferLogs": { + "methods": [ + "list_transfer_logs" + ] + }, + "ListTransferRuns": { + "methods": [ + "list_transfer_runs" + ] + }, + "ScheduleTransferRuns": { + "methods": [ + "schedule_transfer_runs" + ] + }, + "StartManualTransferRuns": { + "methods": [ + "start_manual_transfer_runs" + ] + }, + "UpdateTransferConfig": { + "methods": [ + "update_transfer_config" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataTransferServiceAsyncClient", + "rpcs": { + "CheckValidCreds": { + "methods": [ + "check_valid_creds" + ] + }, + "CreateTransferConfig": { + "methods": [ + "create_transfer_config" + ] + }, + "DeleteTransferConfig": { + "methods": [ + "delete_transfer_config" + ] + }, + "DeleteTransferRun": { + "methods": [ + "delete_transfer_run" + ] + }, + "EnrollDataSources": { + "methods": [ + "enroll_data_sources" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, + "GetTransferConfig": { + "methods": [ + "get_transfer_config" + ] + }, + "GetTransferRun": { + "methods": [ + "get_transfer_run" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, + "ListTransferConfigs": { + "methods": [ + "list_transfer_configs" + ] + }, + "ListTransferLogs": { + "methods": [ + "list_transfer_logs" + ] + }, + "ListTransferRuns": { + "methods": [ + "list_transfer_runs" + ] + }, + "ScheduleTransferRuns": { + "methods": [ + "schedule_transfer_runs" + ] + }, + "StartManualTransferRuns": { + "methods": [ + "start_manual_transfer_runs" + ] + }, + "UpdateTransferConfig": { + "methods": [ + "update_transfer_config" + ] + } + } + }, + "rest": { + "libraryClient": "DataTransferServiceClient", + "rpcs": { + "CheckValidCreds": { + "methods": [ + "check_valid_creds" + ] + }, + "CreateTransferConfig": { + "methods": [ + "create_transfer_config" + ] + }, + "DeleteTransferConfig": { + "methods": [ + "delete_transfer_config" + ] + }, + "DeleteTransferRun": { + "methods": [ + "delete_transfer_run" + ] + }, + "EnrollDataSources": { + "methods": [ + "enroll_data_sources" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, + "GetTransferConfig": { + "methods": [ + "get_transfer_config" + ] + }, + "GetTransferRun": { + "methods": [ + "get_transfer_run" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, + "ListTransferConfigs": { + "methods": [ + "list_transfer_configs" + ] + }, + "ListTransferLogs": { + "methods": [ + "list_transfer_logs" + ] + }, + "ListTransferRuns": { + "methods": [ + "list_transfer_runs" + ] + }, + "ScheduleTransferRuns": { + "methods": [ + "schedule_transfer_runs" + ] + }, + "StartManualTransferRuns": { + "methods": [ + "start_manual_transfer_runs" + ] + }, + "UpdateTransferConfig": { + "methods": [ + "update_transfer_config" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py new file mode 100644 index 000000000000..b5a6e376680a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "3.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/py.typed b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/py.typed new file mode 100644 index 000000000000..1bd9d383cee6 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-datatransfer package uses inline types. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py new file mode 100644 index 000000000000..e1e9b28e7afa --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DataTransferServiceAsyncClient +from .client import DataTransferServiceClient + +__all__ = ( + "DataTransferServiceClient", + "DataTransferServiceAsyncClient", +) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py new file mode 100644 index 000000000000..8ab0e0d6e02a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py @@ -0,0 +1,2101 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery_datatransfer_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import pagers +from google.cloud.bigquery_datatransfer_v1.types import datatransfer, transfer + +from .client import DataTransferServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, DataTransferServiceTransport +from .transports.grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport + + +class DataTransferServiceAsyncClient: + """This API allows users to manage their data transfers into + BigQuery. + """ + + _client: DataTransferServiceClient + + DEFAULT_ENDPOINT = DataTransferServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataTransferServiceClient.DEFAULT_MTLS_ENDPOINT + + data_source_path = staticmethod(DataTransferServiceClient.data_source_path) + parse_data_source_path = staticmethod( + DataTransferServiceClient.parse_data_source_path + ) + run_path = staticmethod(DataTransferServiceClient.run_path) + parse_run_path = staticmethod(DataTransferServiceClient.parse_run_path) + transfer_config_path = staticmethod(DataTransferServiceClient.transfer_config_path) + parse_transfer_config_path = staticmethod( + DataTransferServiceClient.parse_transfer_config_path + ) + common_billing_account_path = staticmethod( + DataTransferServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DataTransferServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DataTransferServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + DataTransferServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + DataTransferServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DataTransferServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(DataTransferServiceClient.common_project_path) + parse_common_project_path = staticmethod( + DataTransferServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(DataTransferServiceClient.common_location_path) + parse_common_location_path = staticmethod( + DataTransferServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTransferServiceAsyncClient: The constructed client. + """ + return DataTransferServiceClient.from_service_account_info.__func__(DataTransferServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTransferServiceAsyncClient: The constructed client. + """ + return DataTransferServiceClient.from_service_account_file.__func__(DataTransferServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataTransferServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataTransferServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataTransferServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(DataTransferServiceClient).get_transport_class, + type(DataTransferServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DataTransferServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data transfer service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DataTransferServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataTransferServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_data_source( + self, + request: Optional[Union[datatransfer.GetDataSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.DataSource: + r"""Retrieves a supported data source and returns its + settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_get_data_source(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.GetDataSourceRequest, dict]]): + The request object. A request to get data source info. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/dataSources/{data_source_id}`` + or + ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.DataSource: + Defines the properties and custom + parameters for a data source. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.GetDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_data_source, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_data_sources( + self, + request: Optional[Union[datatransfer.ListDataSourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataSourcesAsyncPager: + r"""Lists supported data sources and returns their + settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_list_data_sources(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest, dict]]): + The request object. Request to list supported data + sources and their data transfer + settings. + parent (:class:`str`): + Required. The BigQuery project id for which data sources + should be returned. Must be in the form: + ``projects/{project_id}`` or + ``projects/{project_id}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListDataSourcesAsyncPager: + Returns list of supported data + sources and their metadata. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.ListDataSourcesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_data_sources, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataSourcesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_transfer_config( + self, + request: Optional[Union[datatransfer.CreateTransferConfigRequest, dict]] = None, + *, + parent: Optional[str] = None, + transfer_config: Optional[transfer.TransferConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Creates a new data transfer configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_create_transfer_config(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + transfer_config = bigquery_datatransfer_v1.TransferConfig() + transfer_config.destination_dataset_id = "destination_dataset_id_value" + + request = bigquery_datatransfer_v1.CreateTransferConfigRequest( + parent="parent_value", + transfer_config=transfer_config, + ) + + # Make the request + response = await client.create_transfer_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.CreateTransferConfigRequest, dict]]): + The request object. A request to create a data transfer + configuration. If new credentials are + needed for this transfer configuration, + authorization info must be provided. If + authorization info is provided, the + transfer configuration will be + associated with the user id + corresponding to the authorization info. + Otherwise, the transfer configuration + will be associated with the calling + user. + parent (:class:`str`): + Required. The BigQuery project id where the transfer + configuration should be created. Must be in the format + projects/{project_id}/locations/{location_id} or + projects/{project_id}. If specified location and + location of the destination bigquery dataset do not + match - the request will fail. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transfer_config (:class:`google.cloud.bigquery_datatransfer_v1.types.TransferConfig`): + Required. Data transfer configuration + to create. + + This corresponds to the ``transfer_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.TransferConfig: + Represents a data transfer configuration. A transfer configuration + contains all metadata needed to perform a data + transfer. For example, destination_dataset_id + specifies where data should be stored. When a new + transfer configuration is created, the specified + destination_dataset_id is created when needed and + shared with the appropriate data source service + account. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, transfer_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.CreateTransferConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if transfer_config is not None: + request.transfer_config = transfer_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_transfer_config, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_transfer_config( + self, + request: Optional[Union[datatransfer.UpdateTransferConfigRequest, dict]] = None, + *, + transfer_config: Optional[transfer.TransferConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Updates a data transfer configuration. + All fields must be set, even if they are not updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_update_transfer_config(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + transfer_config = bigquery_datatransfer_v1.TransferConfig() + transfer_config.destination_dataset_id = "destination_dataset_id_value" + + request = bigquery_datatransfer_v1.UpdateTransferConfigRequest( + transfer_config=transfer_config, + ) + + # Make the request + response = await client.update_transfer_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.UpdateTransferConfigRequest, dict]]): + The request object. A request to update a transfer + configuration. To update the user id of + the transfer configuration, + authorization info needs to be provided. + transfer_config (:class:`google.cloud.bigquery_datatransfer_v1.types.TransferConfig`): + Required. Data transfer configuration + to create. + + This corresponds to the ``transfer_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Required list of fields to + be updated in this request. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.TransferConfig: + Represents a data transfer configuration. A transfer configuration + contains all metadata needed to perform a data + transfer. For example, destination_dataset_id + specifies where data should be stored. When a new + transfer configuration is created, the specified + destination_dataset_id is created when needed and + shared with the appropriate data source service + account. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([transfer_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.UpdateTransferConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if transfer_config is not None: + request.transfer_config = transfer_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_transfer_config, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("transfer_config.name", request.transfer_config.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_transfer_config( + self, + request: Optional[Union[datatransfer.DeleteTransferConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a data transfer configuration, including any + associated transfer runs and logs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_delete_transfer_config(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.DeleteTransferConfigRequest( + name="name_value", + ) + + # Make the request + await client.delete_transfer_config(request=request) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.DeleteTransferConfigRequest, dict]]): + The request object. A request to delete data transfer + information. All associated transfer + runs and log messages will be deleted as + well. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.DeleteTransferConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_transfer_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_transfer_config( + self, + request: Optional[Union[datatransfer.GetTransferConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Returns information about a data transfer config. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_get_transfer_config(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.GetTransferConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_transfer_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.GetTransferConfigRequest, dict]]): + The request object. A request to get data transfer + information. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.TransferConfig: + Represents a data transfer configuration. A transfer configuration + contains all metadata needed to perform a data + transfer. For example, destination_dataset_id + specifies where data should be stored. When a new + transfer configuration is created, the specified + destination_dataset_id is created when needed and + shared with the appropriate data source service + account. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.GetTransferConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_transfer_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_transfer_configs( + self, + request: Optional[Union[datatransfer.ListTransferConfigsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTransferConfigsAsyncPager: + r"""Returns information about all transfer configs owned + by a project in the specified location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_list_transfer_configs(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.ListTransferConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transfer_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsRequest, dict]]): + The request object. A request to list data transfers + configured for a BigQuery project. + parent (:class:`str`): + Required. The BigQuery project id for which transfer + configs should be returned: ``projects/{project_id}`` or + ``projects/{project_id}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferConfigsAsyncPager: + The returned list of pipelines in the + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.ListTransferConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_transfer_configs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTransferConfigsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def schedule_transfer_runs( + self, + request: Optional[Union[datatransfer.ScheduleTransferRunsRequest, dict]] = None, + *, + parent: Optional[str] = None, + start_time: Optional[timestamp_pb2.Timestamp] = None, + end_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.ScheduleTransferRunsResponse: + r"""Creates transfer runs for a time range [start_time, end_time]. + For each date - or whatever granularity the data source supports + - in the range, one transfer run is created. Note that runs are + created per UTC time in the time range. DEPRECATED: use + StartManualTransferRuns instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_schedule_transfer_runs(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.ScheduleTransferRunsRequest( + parent="parent_value", + ) + + # Make the request + response = await client.schedule_transfer_runs(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsRequest, dict]]): + The request object. A request to schedule transfer runs + for a time range. + parent (:class:`str`): + Required. Transfer configuration name in the form: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + start_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Required. Start time of the range of transfer runs. For + example, ``"2017-05-25T00:00:00+00:00"``. + + This corresponds to the ``start_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + end_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Required. End time of the range of transfer runs. For + example, ``"2017-05-30T00:00:00+00:00"``. + + This corresponds to the ``end_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsResponse: + A response to schedule transfer runs + for a time range. + + """ + warnings.warn( + "DataTransferServiceAsyncClient.schedule_transfer_runs is deprecated", + DeprecationWarning, + ) + + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, start_time, end_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.ScheduleTransferRunsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if start_time is not None: + request.start_time = start_time + if end_time is not None: + request.end_time = end_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.schedule_transfer_runs, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def start_manual_transfer_runs( + self, + request: Optional[ + Union[datatransfer.StartManualTransferRunsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.StartManualTransferRunsResponse: + r"""Start manual transfer runs to be executed now with schedule_time + equal to current time. The transfer runs can be created for a + time range where the run_time is between start_time (inclusive) + and end_time (exclusive), or for a specific run_time. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_start_manual_transfer_runs(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.StartManualTransferRunsRequest( + ) + + # Make the request + response = await client.start_manual_transfer_runs(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest, dict]]): + The request object. A request to start manual transfer + runs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsResponse: + A response to start manual transfer + runs. + + """ + # Create or coerce a protobuf request object. + request = datatransfer.StartManualTransferRunsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.start_manual_transfer_runs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_transfer_run( + self, + request: Optional[Union[datatransfer.GetTransferRunRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferRun: + r"""Returns information about the particular transfer + run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_get_transfer_run(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.GetTransferRunRequest( + name="name_value", + ) + + # Make the request + response = await client.get_transfer_run(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.GetTransferRunRequest, dict]]): + The request object. A request to get data transfer run + information. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.TransferRun: + Represents a data transfer run. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.GetTransferRunRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_transfer_run, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_transfer_run( + self, + request: Optional[Union[datatransfer.DeleteTransferRunRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified transfer run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_delete_transfer_run(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.DeleteTransferRunRequest( + name="name_value", + ) + + # Make the request + await client.delete_transfer_run(request=request) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.DeleteTransferRunRequest, dict]]): + The request object. A request to delete data transfer run + information. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.DeleteTransferRunRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_transfer_run, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_transfer_runs( + self, + request: Optional[Union[datatransfer.ListTransferRunsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTransferRunsAsyncPager: + r"""Returns information about running and completed + transfer runs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_list_transfer_runs(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.ListTransferRunsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transfer_runs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest, dict]]): + The request object. A request to list data transfer runs. + parent (:class:`str`): + Required. Name of transfer configuration for which + transfer runs should be retrieved. Format of transfer + configuration resource name is: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferRunsAsyncPager: + The returned list of pipelines in the + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.ListTransferRunsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_transfer_runs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTransferRunsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_transfer_logs( + self, + request: Optional[Union[datatransfer.ListTransferLogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTransferLogsAsyncPager: + r"""Returns log messages for the transfer run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_list_transfer_logs(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.ListTransferLogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transfer_logs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest, dict]]): + The request object. A request to get user facing log + messages associated with data transfer + run. + parent (:class:`str`): + Required. Transfer run name in the form: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferLogsAsyncPager: + The returned list transfer run + messages. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.ListTransferLogsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_transfer_logs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTransferLogsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def check_valid_creds( + self, + request: Optional[Union[datatransfer.CheckValidCredsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.CheckValidCredsResponse: + r"""Returns true if valid credentials exist for the given + data source and requesting user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_check_valid_creds(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.CheckValidCredsRequest( + name="name_value", + ) + + # Make the request + response = await client.check_valid_creds(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsRequest, dict]]): + The request object. A request to determine whether the + user has valid credentials. This method + is used to limit the number of OAuth + popups in the user interface. The user + id is inferred from the API call + context. If the data source has the + Google+ authorization type, this method + returns false, as it cannot be + determined whether the credentials are + already valid merely based on the user + id. + name (:class:`str`): + Required. The data source in the form: + ``projects/{project_id}/dataSources/{data_source_id}`` + or + ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsResponse: + A response indicating whether the + credentials exist and are valid. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.CheckValidCredsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.check_valid_creds, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def enroll_data_sources( + self, + request: Optional[Union[datatransfer.EnrollDataSourcesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Enroll data sources in a user project. This allows users to + create transfer configurations for these data sources. They will + also appear in the ListDataSources RPC and as such, will appear + in the `BigQuery + UI `__, and the + documents can be found in the public guide for `BigQuery Web + UI `__ and + `Data Transfer + Service `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + async def sample_enroll_data_sources(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.EnrollDataSourcesRequest( + ) + + # Make the request + await client.enroll_data_sources(request=request) + + Args: + request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.EnrollDataSourcesRequest, dict]]): + The request object. A request to enroll a set of data sources so they are + visible in the BigQuery UI's ``Transfer`` tab. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = datatransfer.EnrollDataSourcesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.enroll_data_sources, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DataTransferServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DataTransferServiceAsyncClient",) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py new file mode 100644 index 000000000000..524f8f0171b6 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -0,0 +1,2271 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery_datatransfer_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import pagers +from google.cloud.bigquery_datatransfer_v1.types import datatransfer, transfer + +from .transports.base import DEFAULT_CLIENT_INFO, DataTransferServiceTransport +from .transports.grpc import DataTransferServiceGrpcTransport +from .transports.grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport +from .transports.rest import DataTransferServiceRestTransport + + +class DataTransferServiceClientMeta(type): + """Metaclass for the DataTransferService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DataTransferServiceTransport]] + _transport_registry["grpc"] = DataTransferServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataTransferServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DataTransferServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DataTransferServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataTransferServiceClient(metaclass=DataTransferServiceClientMeta): + """This API allows users to manage their data transfers into + BigQuery. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "bigquerydatatransfer.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTransferServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTransferServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataTransferServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataTransferServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def data_source_path( + project: str, + data_source: str, + ) -> str: + """Returns a fully-qualified data_source string.""" + return "projects/{project}/dataSources/{data_source}".format( + project=project, + data_source=data_source, + ) + + @staticmethod + def parse_data_source_path(path: str) -> Dict[str, str]: + """Parses a data_source path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/dataSources/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def run_path( + project: str, + transfer_config: str, + run: str, + ) -> str: + """Returns a fully-qualified run string.""" + return "projects/{project}/transferConfigs/{transfer_config}/runs/{run}".format( + project=project, + transfer_config=transfer_config, + run=run, + ) + + @staticmethod + def parse_run_path(path: str) -> Dict[str, str]: + """Parses a run path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/transferConfigs/(?P.+?)/runs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def transfer_config_path( + project: str, + transfer_config: str, + ) -> str: + """Returns a fully-qualified transfer_config string.""" + return "projects/{project}/transferConfigs/{transfer_config}".format( + project=project, + transfer_config=transfer_config, + ) + + @staticmethod + def parse_transfer_config_path(path: str) -> Dict[str, str]: + """Parses a transfer_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/transferConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataTransferServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data transfer service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DataTransferServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DataTransferServiceTransport): + # transport is a DataTransferServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_data_source( + self, + request: Optional[Union[datatransfer.GetDataSourceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.DataSource: + r"""Retrieves a supported data source and returns its + settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_get_data_source(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.GetDataSourceRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_source(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.GetDataSourceRequest, dict]): + The request object. A request to get data source info. + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/dataSources/{data_source_id}`` + or + ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.DataSource: + Defines the properties and custom + parameters for a data source. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.GetDataSourceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.GetDataSourceRequest): + request = datatransfer.GetDataSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_data_sources( + self, + request: Optional[Union[datatransfer.ListDataSourcesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataSourcesPager: + r"""Lists supported data sources and returns their + settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_list_data_sources(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.ListDataSourcesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_sources(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest, dict]): + The request object. Request to list supported data + sources and their data transfer + settings. + parent (str): + Required. The BigQuery project id for which data sources + should be returned. Must be in the form: + ``projects/{project_id}`` or + ``projects/{project_id}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListDataSourcesPager: + Returns list of supported data + sources and their metadata. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.ListDataSourcesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.ListDataSourcesRequest): + request = datatransfer.ListDataSourcesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_sources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataSourcesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_transfer_config( + self, + request: Optional[Union[datatransfer.CreateTransferConfigRequest, dict]] = None, + *, + parent: Optional[str] = None, + transfer_config: Optional[transfer.TransferConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Creates a new data transfer configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_create_transfer_config(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + transfer_config = bigquery_datatransfer_v1.TransferConfig() + transfer_config.destination_dataset_id = "destination_dataset_id_value" + + request = bigquery_datatransfer_v1.CreateTransferConfigRequest( + parent="parent_value", + transfer_config=transfer_config, + ) + + # Make the request + response = client.create_transfer_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.CreateTransferConfigRequest, dict]): + The request object. A request to create a data transfer + configuration. If new credentials are + needed for this transfer configuration, + authorization info must be provided. If + authorization info is provided, the + transfer configuration will be + associated with the user id + corresponding to the authorization info. + Otherwise, the transfer configuration + will be associated with the calling + user. + parent (str): + Required. The BigQuery project id where the transfer + configuration should be created. Must be in the format + projects/{project_id}/locations/{location_id} or + projects/{project_id}. If specified location and + location of the destination bigquery dataset do not + match - the request will fail. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): + Required. Data transfer configuration + to create. + + This corresponds to the ``transfer_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.TransferConfig: + Represents a data transfer configuration. A transfer configuration + contains all metadata needed to perform a data + transfer. For example, destination_dataset_id + specifies where data should be stored. When a new + transfer configuration is created, the specified + destination_dataset_id is created when needed and + shared with the appropriate data source service + account. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, transfer_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.CreateTransferConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.CreateTransferConfigRequest): + request = datatransfer.CreateTransferConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if transfer_config is not None: + request.transfer_config = transfer_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_transfer_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_transfer_config( + self, + request: Optional[Union[datatransfer.UpdateTransferConfigRequest, dict]] = None, + *, + transfer_config: Optional[transfer.TransferConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Updates a data transfer configuration. + All fields must be set, even if they are not updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_update_transfer_config(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + transfer_config = bigquery_datatransfer_v1.TransferConfig() + transfer_config.destination_dataset_id = "destination_dataset_id_value" + + request = bigquery_datatransfer_v1.UpdateTransferConfigRequest( + transfer_config=transfer_config, + ) + + # Make the request + response = client.update_transfer_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.UpdateTransferConfigRequest, dict]): + The request object. A request to update a transfer + configuration. To update the user id of + the transfer configuration, + authorization info needs to be provided. + transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): + Required. Data transfer configuration + to create. + + This corresponds to the ``transfer_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Required list of fields to + be updated in this request. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.TransferConfig: + Represents a data transfer configuration. A transfer configuration + contains all metadata needed to perform a data + transfer. For example, destination_dataset_id + specifies where data should be stored. When a new + transfer configuration is created, the specified + destination_dataset_id is created when needed and + shared with the appropriate data source service + account. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([transfer_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.UpdateTransferConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.UpdateTransferConfigRequest): + request = datatransfer.UpdateTransferConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if transfer_config is not None: + request.transfer_config = transfer_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_transfer_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("transfer_config.name", request.transfer_config.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_transfer_config( + self, + request: Optional[Union[datatransfer.DeleteTransferConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a data transfer configuration, including any + associated transfer runs and logs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_delete_transfer_config(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.DeleteTransferConfigRequest( + name="name_value", + ) + + # Make the request + client.delete_transfer_config(request=request) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.DeleteTransferConfigRequest, dict]): + The request object. A request to delete data transfer + information. All associated transfer + runs and log messages will be deleted as + well. + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.DeleteTransferConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.DeleteTransferConfigRequest): + request = datatransfer.DeleteTransferConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_transfer_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_transfer_config( + self, + request: Optional[Union[datatransfer.GetTransferConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Returns information about a data transfer config. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_get_transfer_config(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.GetTransferConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_transfer_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.GetTransferConfigRequest, dict]): + The request object. A request to get data transfer + information. + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.TransferConfig: + Represents a data transfer configuration. A transfer configuration + contains all metadata needed to perform a data + transfer. For example, destination_dataset_id + specifies where data should be stored. When a new + transfer configuration is created, the specified + destination_dataset_id is created when needed and + shared with the appropriate data source service + account. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.GetTransferConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.GetTransferConfigRequest): + request = datatransfer.GetTransferConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_transfer_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_transfer_configs( + self, + request: Optional[Union[datatransfer.ListTransferConfigsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTransferConfigsPager: + r"""Returns information about all transfer configs owned + by a project in the specified location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_list_transfer_configs(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.ListTransferConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transfer_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsRequest, dict]): + The request object. A request to list data transfers + configured for a BigQuery project. + parent (str): + Required. The BigQuery project id for which transfer + configs should be returned: ``projects/{project_id}`` or + ``projects/{project_id}/locations/{location_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferConfigsPager: + The returned list of pipelines in the + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.ListTransferConfigsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.ListTransferConfigsRequest): + request = datatransfer.ListTransferConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_transfer_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTransferConfigsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def schedule_transfer_runs( + self, + request: Optional[Union[datatransfer.ScheduleTransferRunsRequest, dict]] = None, + *, + parent: Optional[str] = None, + start_time: Optional[timestamp_pb2.Timestamp] = None, + end_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.ScheduleTransferRunsResponse: + r"""Creates transfer runs for a time range [start_time, end_time]. + For each date - or whatever granularity the data source supports + - in the range, one transfer run is created. Note that runs are + created per UTC time in the time range. DEPRECATED: use + StartManualTransferRuns instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_schedule_transfer_runs(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.ScheduleTransferRunsRequest( + parent="parent_value", + ) + + # Make the request + response = client.schedule_transfer_runs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsRequest, dict]): + The request object. A request to schedule transfer runs + for a time range. + parent (str): + Required. Transfer configuration name in the form: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Required. Start time of the range of transfer runs. For + example, ``"2017-05-25T00:00:00+00:00"``. + + This corresponds to the ``start_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Required. End time of the range of transfer runs. For + example, ``"2017-05-30T00:00:00+00:00"``. + + This corresponds to the ``end_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsResponse: + A response to schedule transfer runs + for a time range. + + """ + warnings.warn( + "DataTransferServiceClient.schedule_transfer_runs is deprecated", + DeprecationWarning, + ) + + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, start_time, end_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.ScheduleTransferRunsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.ScheduleTransferRunsRequest): + request = datatransfer.ScheduleTransferRunsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if start_time is not None: + request.start_time = start_time + if end_time is not None: + request.end_time = end_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.schedule_transfer_runs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def start_manual_transfer_runs( + self, + request: Optional[ + Union[datatransfer.StartManualTransferRunsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.StartManualTransferRunsResponse: + r"""Start manual transfer runs to be executed now with schedule_time + equal to current time. The transfer runs can be created for a + time range where the run_time is between start_time (inclusive) + and end_time (exclusive), or for a specific run_time. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_start_manual_transfer_runs(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.StartManualTransferRunsRequest( + ) + + # Make the request + response = client.start_manual_transfer_runs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest, dict]): + The request object. A request to start manual transfer + runs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsResponse: + A response to start manual transfer + runs. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.StartManualTransferRunsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.StartManualTransferRunsRequest): + request = datatransfer.StartManualTransferRunsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.start_manual_transfer_runs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_transfer_run( + self, + request: Optional[Union[datatransfer.GetTransferRunRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferRun: + r"""Returns information about the particular transfer + run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_get_transfer_run(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.GetTransferRunRequest( + name="name_value", + ) + + # Make the request + response = client.get_transfer_run(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.GetTransferRunRequest, dict]): + The request object. A request to get data transfer run + information. + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.TransferRun: + Represents a data transfer run. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.GetTransferRunRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.GetTransferRunRequest): + request = datatransfer.GetTransferRunRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_transfer_run] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_transfer_run( + self, + request: Optional[Union[datatransfer.DeleteTransferRunRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified transfer run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_delete_transfer_run(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.DeleteTransferRunRequest( + name="name_value", + ) + + # Make the request + client.delete_transfer_run(request=request) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.DeleteTransferRunRequest, dict]): + The request object. A request to delete data transfer run + information. + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.DeleteTransferRunRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.DeleteTransferRunRequest): + request = datatransfer.DeleteTransferRunRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_transfer_run] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_transfer_runs( + self, + request: Optional[Union[datatransfer.ListTransferRunsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTransferRunsPager: + r"""Returns information about running and completed + transfer runs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_list_transfer_runs(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.ListTransferRunsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transfer_runs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest, dict]): + The request object. A request to list data transfer runs. + parent (str): + Required. Name of transfer configuration for which + transfer runs should be retrieved. Format of transfer + configuration resource name is: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferRunsPager: + The returned list of pipelines in the + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.ListTransferRunsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.ListTransferRunsRequest): + request = datatransfer.ListTransferRunsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_transfer_runs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTransferRunsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_transfer_logs( + self, + request: Optional[Union[datatransfer.ListTransferLogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTransferLogsPager: + r"""Returns log messages for the transfer run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_list_transfer_logs(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.ListTransferLogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_transfer_logs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest, dict]): + The request object. A request to get user facing log + messages associated with data transfer + run. + parent (str): + Required. Transfer run name in the form: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferLogsPager: + The returned list transfer run + messages. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.ListTransferLogsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.ListTransferLogsRequest): + request = datatransfer.ListTransferLogsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_transfer_logs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTransferLogsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def check_valid_creds( + self, + request: Optional[Union[datatransfer.CheckValidCredsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.CheckValidCredsResponse: + r"""Returns true if valid credentials exist for the given + data source and requesting user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_check_valid_creds(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.CheckValidCredsRequest( + name="name_value", + ) + + # Make the request + response = client.check_valid_creds(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsRequest, dict]): + The request object. A request to determine whether the + user has valid credentials. This method + is used to limit the number of OAuth + popups in the user interface. The user + id is inferred from the API call + context. If the data source has the + Google+ authorization type, this method + returns false, as it cannot be + determined whether the credentials are + already valid merely based on the user + id. + name (str): + Required. The data source in the form: + ``projects/{project_id}/dataSources/{data_source_id}`` + or + ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsResponse: + A response indicating whether the + credentials exist and are valid. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.CheckValidCredsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.CheckValidCredsRequest): + request = datatransfer.CheckValidCredsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.check_valid_creds] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def enroll_data_sources( + self, + request: Optional[Union[datatransfer.EnrollDataSourcesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Enroll data sources in a user project. This allows users to + create transfer configurations for these data sources. They will + also appear in the ListDataSources RPC and as such, will appear + in the `BigQuery + UI `__, and the + documents can be found in the public guide for `BigQuery Web + UI `__ and + `Data Transfer + Service `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_datatransfer_v1 + + def sample_enroll_data_sources(): + # Create a client + client = bigquery_datatransfer_v1.DataTransferServiceClient() + + # Initialize request argument(s) + request = bigquery_datatransfer_v1.EnrollDataSourcesRequest( + ) + + # Make the request + client.enroll_data_sources(request=request) + + Args: + request (Union[google.cloud.bigquery_datatransfer_v1.types.EnrollDataSourcesRequest, dict]): + The request object. A request to enroll a set of data sources so they are + visible in the BigQuery UI's ``Transfer`` tab. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.EnrollDataSourcesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.EnrollDataSourcesRequest): + request = datatransfer.EnrollDataSourcesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.enroll_data_sources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "DataTransferServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DataTransferServiceClient",) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py new file mode 100644 index 000000000000..a140fdf32990 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py @@ -0,0 +1,539 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.bigquery_datatransfer_v1.types import datatransfer, transfer + + +class ListDataSourcesPager: + """A pager for iterating through ``list_data_sources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_sources`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataSources`` requests and continue to iterate + through the ``data_sources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., datatransfer.ListDataSourcesResponse], + request: datatransfer.ListDataSourcesRequest, + response: datatransfer.ListDataSourcesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest): + The initial request object. + response (google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListDataSourcesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datatransfer.ListDataSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[datatransfer.DataSource]: + for page in self.pages: + yield from page.data_sources + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataSourcesAsyncPager: + """A pager for iterating through ``list_data_sources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_sources`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataSources`` requests and continue to iterate + through the ``data_sources`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[datatransfer.ListDataSourcesResponse]], + request: datatransfer.ListDataSourcesRequest, + response: datatransfer.ListDataSourcesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest): + The initial request object. + response (google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListDataSourcesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datatransfer.ListDataSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[datatransfer.DataSource]: + async def async_generator(): + async for page in self.pages: + for response in page.data_sources: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTransferConfigsPager: + """A pager for iterating through ``list_transfer_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``transfer_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTransferConfigs`` requests and continue to iterate + through the ``transfer_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., datatransfer.ListTransferConfigsResponse], + request: datatransfer.ListTransferConfigsRequest, + response: datatransfer.ListTransferConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsRequest): + The initial request object. + response (google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListTransferConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datatransfer.ListTransferConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[transfer.TransferConfig]: + for page in self.pages: + yield from page.transfer_configs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTransferConfigsAsyncPager: + """A pager for iterating through ``list_transfer_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``transfer_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTransferConfigs`` requests and continue to iterate + through the ``transfer_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[datatransfer.ListTransferConfigsResponse]], + request: datatransfer.ListTransferConfigsRequest, + response: datatransfer.ListTransferConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsRequest): + The initial request object. + response (google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListTransferConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datatransfer.ListTransferConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[transfer.TransferConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.transfer_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTransferRunsPager: + """A pager for iterating through ``list_transfer_runs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``transfer_runs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTransferRuns`` requests and continue to iterate + through the ``transfer_runs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., datatransfer.ListTransferRunsResponse], + request: datatransfer.ListTransferRunsRequest, + response: datatransfer.ListTransferRunsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest): + The initial request object. + response (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListTransferRunsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datatransfer.ListTransferRunsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[transfer.TransferRun]: + for page in self.pages: + yield from page.transfer_runs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTransferRunsAsyncPager: + """A pager for iterating through ``list_transfer_runs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``transfer_runs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTransferRuns`` requests and continue to iterate + through the ``transfer_runs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[datatransfer.ListTransferRunsResponse]], + request: datatransfer.ListTransferRunsRequest, + response: datatransfer.ListTransferRunsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest): + The initial request object. + response (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListTransferRunsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datatransfer.ListTransferRunsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[transfer.TransferRun]: + async def async_generator(): + async for page in self.pages: + for response in page.transfer_runs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTransferLogsPager: + """A pager for iterating through ``list_transfer_logs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``transfer_messages`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTransferLogs`` requests and continue to iterate + through the ``transfer_messages`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., datatransfer.ListTransferLogsResponse], + request: datatransfer.ListTransferLogsRequest, + response: datatransfer.ListTransferLogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest): + The initial request object. + response (google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListTransferLogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datatransfer.ListTransferLogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[transfer.TransferMessage]: + for page in self.pages: + yield from page.transfer_messages + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTransferLogsAsyncPager: + """A pager for iterating through ``list_transfer_logs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``transfer_messages`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTransferLogs`` requests and continue to iterate + through the ``transfer_messages`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[datatransfer.ListTransferLogsResponse]], + request: datatransfer.ListTransferLogsRequest, + response: datatransfer.ListTransferLogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest): + The initial request object. + response (google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListTransferLogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datatransfer.ListTransferLogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[transfer.TransferMessage]: + async def async_generator(): + async for page in self.pages: + for response in page.transfer_messages: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py new file mode 100644 index 000000000000..c55048b97000 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataTransferServiceTransport +from .grpc import DataTransferServiceGrpcTransport +from .grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport +from .rest import DataTransferServiceRestInterceptor, DataTransferServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[DataTransferServiceTransport]] +_transport_registry["grpc"] = DataTransferServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DataTransferServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DataTransferServiceRestTransport + +__all__ = ( + "DataTransferServiceTransport", + "DataTransferServiceGrpcTransport", + "DataTransferServiceGrpcAsyncIOTransport", + "DataTransferServiceRestTransport", + "DataTransferServiceRestInterceptor", +) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py new file mode 100644 index 000000000000..10a7b7a664eb --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py @@ -0,0 +1,495 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.bigquery_datatransfer_v1 import gapic_version as package_version +from google.cloud.bigquery_datatransfer_v1.types import datatransfer, transfer + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class DataTransferServiceTransport(abc.ABC): + """Abstract transport class for DataTransferService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "bigquerydatatransfer.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_data_source: gapic_v1.method.wrap_method( + self.get_data_source, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_data_sources: gapic_v1.method.wrap_method( + self.list_data_sources, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_transfer_config: gapic_v1.method.wrap_method( + self.create_transfer_config, + default_timeout=30.0, + client_info=client_info, + ), + self.update_transfer_config: gapic_v1.method.wrap_method( + self.update_transfer_config, + default_timeout=30.0, + client_info=client_info, + ), + self.delete_transfer_config: gapic_v1.method.wrap_method( + self.delete_transfer_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_transfer_config: gapic_v1.method.wrap_method( + self.get_transfer_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_transfer_configs: gapic_v1.method.wrap_method( + self.list_transfer_configs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.schedule_transfer_runs: gapic_v1.method.wrap_method( + self.schedule_transfer_runs, + default_timeout=30.0, + client_info=client_info, + ), + self.start_manual_transfer_runs: gapic_v1.method.wrap_method( + self.start_manual_transfer_runs, + default_timeout=None, + client_info=client_info, + ), + self.get_transfer_run: gapic_v1.method.wrap_method( + self.get_transfer_run, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.delete_transfer_run: gapic_v1.method.wrap_method( + self.delete_transfer_run, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_transfer_runs: gapic_v1.method.wrap_method( + self.list_transfer_runs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_transfer_logs: gapic_v1.method.wrap_method( + self.list_transfer_logs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.check_valid_creds: gapic_v1.method.wrap_method( + self.check_valid_creds, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.enroll_data_sources: gapic_v1.method.wrap_method( + self.enroll_data_sources, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_data_source( + self, + ) -> Callable[ + [datatransfer.GetDataSourceRequest], + Union[datatransfer.DataSource, Awaitable[datatransfer.DataSource]], + ]: + raise NotImplementedError() + + @property + def list_data_sources( + self, + ) -> Callable[ + [datatransfer.ListDataSourcesRequest], + Union[ + datatransfer.ListDataSourcesResponse, + Awaitable[datatransfer.ListDataSourcesResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_transfer_config( + self, + ) -> Callable[ + [datatransfer.CreateTransferConfigRequest], + Union[transfer.TransferConfig, Awaitable[transfer.TransferConfig]], + ]: + raise NotImplementedError() + + @property + def update_transfer_config( + self, + ) -> Callable[ + [datatransfer.UpdateTransferConfigRequest], + Union[transfer.TransferConfig, Awaitable[transfer.TransferConfig]], + ]: + raise NotImplementedError() + + @property + def delete_transfer_config( + self, + ) -> Callable[ + [datatransfer.DeleteTransferConfigRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_transfer_config( + self, + ) -> Callable[ + [datatransfer.GetTransferConfigRequest], + Union[transfer.TransferConfig, Awaitable[transfer.TransferConfig]], + ]: + raise NotImplementedError() + + @property + def list_transfer_configs( + self, + ) -> Callable[ + [datatransfer.ListTransferConfigsRequest], + Union[ + datatransfer.ListTransferConfigsResponse, + Awaitable[datatransfer.ListTransferConfigsResponse], + ], + ]: + raise NotImplementedError() + + @property + def schedule_transfer_runs( + self, + ) -> Callable[ + [datatransfer.ScheduleTransferRunsRequest], + Union[ + datatransfer.ScheduleTransferRunsResponse, + Awaitable[datatransfer.ScheduleTransferRunsResponse], + ], + ]: + raise NotImplementedError() + + @property + def start_manual_transfer_runs( + self, + ) -> Callable[ + [datatransfer.StartManualTransferRunsRequest], + Union[ + datatransfer.StartManualTransferRunsResponse, + Awaitable[datatransfer.StartManualTransferRunsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_transfer_run( + self, + ) -> Callable[ + [datatransfer.GetTransferRunRequest], + Union[transfer.TransferRun, Awaitable[transfer.TransferRun]], + ]: + raise NotImplementedError() + + @property + def delete_transfer_run( + self, + ) -> Callable[ + [datatransfer.DeleteTransferRunRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_transfer_runs( + self, + ) -> Callable[ + [datatransfer.ListTransferRunsRequest], + Union[ + datatransfer.ListTransferRunsResponse, + Awaitable[datatransfer.ListTransferRunsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_transfer_logs( + self, + ) -> Callable[ + [datatransfer.ListTransferLogsRequest], + Union[ + datatransfer.ListTransferLogsResponse, + Awaitable[datatransfer.ListTransferLogsResponse], + ], + ]: + raise NotImplementedError() + + @property + def check_valid_creds( + self, + ) -> Callable[ + [datatransfer.CheckValidCredsRequest], + Union[ + datatransfer.CheckValidCredsResponse, + Awaitable[datatransfer.CheckValidCredsResponse], + ], + ]: + raise NotImplementedError() + + @property + def enroll_data_sources( + self, + ) -> Callable[ + [datatransfer.EnrollDataSourcesRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DataTransferServiceTransport",) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py new file mode 100644 index 000000000000..0b1709516d29 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py @@ -0,0 +1,708 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.bigquery_datatransfer_v1.types import datatransfer, transfer + +from .base import DEFAULT_CLIENT_INFO, DataTransferServiceTransport + + +class DataTransferServiceGrpcTransport(DataTransferServiceTransport): + """gRPC backend transport for DataTransferService. + + This API allows users to manage their data transfers into + BigQuery. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "bigquerydatatransfer.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "bigquerydatatransfer.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_data_source( + self, + ) -> Callable[[datatransfer.GetDataSourceRequest], datatransfer.DataSource]: + r"""Return a callable for the get data source method over gRPC. + + Retrieves a supported data source and returns its + settings. + + Returns: + Callable[[~.GetDataSourceRequest], + ~.DataSource]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source" not in self._stubs: + self._stubs["get_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetDataSource", + request_serializer=datatransfer.GetDataSourceRequest.serialize, + response_deserializer=datatransfer.DataSource.deserialize, + ) + return self._stubs["get_data_source"] + + @property + def list_data_sources( + self, + ) -> Callable[ + [datatransfer.ListDataSourcesRequest], datatransfer.ListDataSourcesResponse + ]: + r"""Return a callable for the list data sources method over gRPC. + + Lists supported data sources and returns their + settings. + + Returns: + Callable[[~.ListDataSourcesRequest], + ~.ListDataSourcesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_sources" not in self._stubs: + self._stubs["list_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListDataSources", + request_serializer=datatransfer.ListDataSourcesRequest.serialize, + response_deserializer=datatransfer.ListDataSourcesResponse.deserialize, + ) + return self._stubs["list_data_sources"] + + @property + def create_transfer_config( + self, + ) -> Callable[[datatransfer.CreateTransferConfigRequest], transfer.TransferConfig]: + r"""Return a callable for the create transfer config method over gRPC. + + Creates a new data transfer configuration. + + Returns: + Callable[[~.CreateTransferConfigRequest], + ~.TransferConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_transfer_config" not in self._stubs: + self._stubs["create_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CreateTransferConfig", + request_serializer=datatransfer.CreateTransferConfigRequest.serialize, + response_deserializer=transfer.TransferConfig.deserialize, + ) + return self._stubs["create_transfer_config"] + + @property + def update_transfer_config( + self, + ) -> Callable[[datatransfer.UpdateTransferConfigRequest], transfer.TransferConfig]: + r"""Return a callable for the update transfer config method over gRPC. + + Updates a data transfer configuration. + All fields must be set, even if they are not updated. + + Returns: + Callable[[~.UpdateTransferConfigRequest], + ~.TransferConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_transfer_config" not in self._stubs: + self._stubs["update_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/UpdateTransferConfig", + request_serializer=datatransfer.UpdateTransferConfigRequest.serialize, + response_deserializer=transfer.TransferConfig.deserialize, + ) + return self._stubs["update_transfer_config"] + + @property + def delete_transfer_config( + self, + ) -> Callable[[datatransfer.DeleteTransferConfigRequest], empty_pb2.Empty]: + r"""Return a callable for the delete transfer config method over gRPC. + + Deletes a data transfer configuration, including any + associated transfer runs and logs. + + Returns: + Callable[[~.DeleteTransferConfigRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_transfer_config" not in self._stubs: + self._stubs["delete_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig", + request_serializer=datatransfer.DeleteTransferConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_transfer_config"] + + @property + def get_transfer_config( + self, + ) -> Callable[[datatransfer.GetTransferConfigRequest], transfer.TransferConfig]: + r"""Return a callable for the get transfer config method over gRPC. + + Returns information about a data transfer config. + + Returns: + Callable[[~.GetTransferConfigRequest], + ~.TransferConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_transfer_config" not in self._stubs: + self._stubs["get_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferConfig", + request_serializer=datatransfer.GetTransferConfigRequest.serialize, + response_deserializer=transfer.TransferConfig.deserialize, + ) + return self._stubs["get_transfer_config"] + + @property + def list_transfer_configs( + self, + ) -> Callable[ + [datatransfer.ListTransferConfigsRequest], + datatransfer.ListTransferConfigsResponse, + ]: + r"""Return a callable for the list transfer configs method over gRPC. + + Returns information about all transfer configs owned + by a project in the specified location. + + Returns: + Callable[[~.ListTransferConfigsRequest], + ~.ListTransferConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transfer_configs" not in self._stubs: + self._stubs["list_transfer_configs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferConfigs", + request_serializer=datatransfer.ListTransferConfigsRequest.serialize, + response_deserializer=datatransfer.ListTransferConfigsResponse.deserialize, + ) + return self._stubs["list_transfer_configs"] + + @property + def schedule_transfer_runs( + self, + ) -> Callable[ + [datatransfer.ScheduleTransferRunsRequest], + datatransfer.ScheduleTransferRunsResponse, + ]: + r"""Return a callable for the schedule transfer runs method over gRPC. + + Creates transfer runs for a time range [start_time, end_time]. + For each date - or whatever granularity the data source supports + - in the range, one transfer run is created. Note that runs are + created per UTC time in the time range. DEPRECATED: use + StartManualTransferRuns instead. + + Returns: + Callable[[~.ScheduleTransferRunsRequest], + ~.ScheduleTransferRunsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "schedule_transfer_runs" not in self._stubs: + self._stubs["schedule_transfer_runs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ScheduleTransferRuns", + request_serializer=datatransfer.ScheduleTransferRunsRequest.serialize, + response_deserializer=datatransfer.ScheduleTransferRunsResponse.deserialize, + ) + return self._stubs["schedule_transfer_runs"] + + @property + def start_manual_transfer_runs( + self, + ) -> Callable[ + [datatransfer.StartManualTransferRunsRequest], + datatransfer.StartManualTransferRunsResponse, + ]: + r"""Return a callable for the start manual transfer runs method over gRPC. + + Start manual transfer runs to be executed now with schedule_time + equal to current time. The transfer runs can be created for a + time range where the run_time is between start_time (inclusive) + and end_time (exclusive), or for a specific run_time. + + Returns: + Callable[[~.StartManualTransferRunsRequest], + ~.StartManualTransferRunsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_manual_transfer_runs" not in self._stubs: + self._stubs["start_manual_transfer_runs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/StartManualTransferRuns", + request_serializer=datatransfer.StartManualTransferRunsRequest.serialize, + response_deserializer=datatransfer.StartManualTransferRunsResponse.deserialize, + ) + return self._stubs["start_manual_transfer_runs"] + + @property + def get_transfer_run( + self, + ) -> Callable[[datatransfer.GetTransferRunRequest], transfer.TransferRun]: + r"""Return a callable for the get transfer run method over gRPC. + + Returns information about the particular transfer + run. + + Returns: + Callable[[~.GetTransferRunRequest], + ~.TransferRun]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_transfer_run" not in self._stubs: + self._stubs["get_transfer_run"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferRun", + request_serializer=datatransfer.GetTransferRunRequest.serialize, + response_deserializer=transfer.TransferRun.deserialize, + ) + return self._stubs["get_transfer_run"] + + @property + def delete_transfer_run( + self, + ) -> Callable[[datatransfer.DeleteTransferRunRequest], empty_pb2.Empty]: + r"""Return a callable for the delete transfer run method over gRPC. + + Deletes the specified transfer run. + + Returns: + Callable[[~.DeleteTransferRunRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_transfer_run" not in self._stubs: + self._stubs["delete_transfer_run"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun", + request_serializer=datatransfer.DeleteTransferRunRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_transfer_run"] + + @property + def list_transfer_runs( + self, + ) -> Callable[ + [datatransfer.ListTransferRunsRequest], datatransfer.ListTransferRunsResponse + ]: + r"""Return a callable for the list transfer runs method over gRPC. + + Returns information about running and completed + transfer runs. + + Returns: + Callable[[~.ListTransferRunsRequest], + ~.ListTransferRunsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transfer_runs" not in self._stubs: + self._stubs["list_transfer_runs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferRuns", + request_serializer=datatransfer.ListTransferRunsRequest.serialize, + response_deserializer=datatransfer.ListTransferRunsResponse.deserialize, + ) + return self._stubs["list_transfer_runs"] + + @property + def list_transfer_logs( + self, + ) -> Callable[ + [datatransfer.ListTransferLogsRequest], datatransfer.ListTransferLogsResponse + ]: + r"""Return a callable for the list transfer logs method over gRPC. + + Returns log messages for the transfer run. + + Returns: + Callable[[~.ListTransferLogsRequest], + ~.ListTransferLogsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transfer_logs" not in self._stubs: + self._stubs["list_transfer_logs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferLogs", + request_serializer=datatransfer.ListTransferLogsRequest.serialize, + response_deserializer=datatransfer.ListTransferLogsResponse.deserialize, + ) + return self._stubs["list_transfer_logs"] + + @property + def check_valid_creds( + self, + ) -> Callable[ + [datatransfer.CheckValidCredsRequest], datatransfer.CheckValidCredsResponse + ]: + r"""Return a callable for the check valid creds method over gRPC. + + Returns true if valid credentials exist for the given + data source and requesting user. + + Returns: + Callable[[~.CheckValidCredsRequest], + ~.CheckValidCredsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_valid_creds" not in self._stubs: + self._stubs["check_valid_creds"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CheckValidCreds", + request_serializer=datatransfer.CheckValidCredsRequest.serialize, + response_deserializer=datatransfer.CheckValidCredsResponse.deserialize, + ) + return self._stubs["check_valid_creds"] + + @property + def enroll_data_sources( + self, + ) -> Callable[[datatransfer.EnrollDataSourcesRequest], empty_pb2.Empty]: + r"""Return a callable for the enroll data sources method over gRPC. + + Enroll data sources in a user project. This allows users to + create transfer configurations for these data sources. They will + also appear in the ListDataSources RPC and as such, will appear + in the `BigQuery + UI `__, and the + documents can be found in the public guide for `BigQuery Web + UI `__ and + `Data Transfer + Service `__. + + Returns: + Callable[[~.EnrollDataSourcesRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enroll_data_sources" not in self._stubs: + self._stubs["enroll_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/EnrollDataSources", + request_serializer=datatransfer.EnrollDataSourcesRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["enroll_data_sources"] + + def close(self): + self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DataTransferServiceGrpcTransport",) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..6adec040e06a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py @@ -0,0 +1,723 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.bigquery_datatransfer_v1.types import datatransfer, transfer + +from .base import DEFAULT_CLIENT_INFO, DataTransferServiceTransport +from .grpc import DataTransferServiceGrpcTransport + + +class DataTransferServiceGrpcAsyncIOTransport(DataTransferServiceTransport): + """gRPC AsyncIO backend transport for DataTransferService. + + This API allows users to manage their data transfers into + BigQuery. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "bigquerydatatransfer.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "bigquerydatatransfer.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_data_source( + self, + ) -> Callable[ + [datatransfer.GetDataSourceRequest], Awaitable[datatransfer.DataSource] + ]: + r"""Return a callable for the get data source method over gRPC. + + Retrieves a supported data source and returns its + settings. + + Returns: + Callable[[~.GetDataSourceRequest], + Awaitable[~.DataSource]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source" not in self._stubs: + self._stubs["get_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetDataSource", + request_serializer=datatransfer.GetDataSourceRequest.serialize, + response_deserializer=datatransfer.DataSource.deserialize, + ) + return self._stubs["get_data_source"] + + @property + def list_data_sources( + self, + ) -> Callable[ + [datatransfer.ListDataSourcesRequest], + Awaitable[datatransfer.ListDataSourcesResponse], + ]: + r"""Return a callable for the list data sources method over gRPC. + + Lists supported data sources and returns their + settings. + + Returns: + Callable[[~.ListDataSourcesRequest], + Awaitable[~.ListDataSourcesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_sources" not in self._stubs: + self._stubs["list_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListDataSources", + request_serializer=datatransfer.ListDataSourcesRequest.serialize, + response_deserializer=datatransfer.ListDataSourcesResponse.deserialize, + ) + return self._stubs["list_data_sources"] + + @property + def create_transfer_config( + self, + ) -> Callable[ + [datatransfer.CreateTransferConfigRequest], Awaitable[transfer.TransferConfig] + ]: + r"""Return a callable for the create transfer config method over gRPC. + + Creates a new data transfer configuration. + + Returns: + Callable[[~.CreateTransferConfigRequest], + Awaitable[~.TransferConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_transfer_config" not in self._stubs: + self._stubs["create_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CreateTransferConfig", + request_serializer=datatransfer.CreateTransferConfigRequest.serialize, + response_deserializer=transfer.TransferConfig.deserialize, + ) + return self._stubs["create_transfer_config"] + + @property + def update_transfer_config( + self, + ) -> Callable[ + [datatransfer.UpdateTransferConfigRequest], Awaitable[transfer.TransferConfig] + ]: + r"""Return a callable for the update transfer config method over gRPC. + + Updates a data transfer configuration. + All fields must be set, even if they are not updated. + + Returns: + Callable[[~.UpdateTransferConfigRequest], + Awaitable[~.TransferConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_transfer_config" not in self._stubs: + self._stubs["update_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/UpdateTransferConfig", + request_serializer=datatransfer.UpdateTransferConfigRequest.serialize, + response_deserializer=transfer.TransferConfig.deserialize, + ) + return self._stubs["update_transfer_config"] + + @property + def delete_transfer_config( + self, + ) -> Callable[ + [datatransfer.DeleteTransferConfigRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete transfer config method over gRPC. + + Deletes a data transfer configuration, including any + associated transfer runs and logs. + + Returns: + Callable[[~.DeleteTransferConfigRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_transfer_config" not in self._stubs: + self._stubs["delete_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig", + request_serializer=datatransfer.DeleteTransferConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_transfer_config"] + + @property + def get_transfer_config( + self, + ) -> Callable[ + [datatransfer.GetTransferConfigRequest], Awaitable[transfer.TransferConfig] + ]: + r"""Return a callable for the get transfer config method over gRPC. + + Returns information about a data transfer config. + + Returns: + Callable[[~.GetTransferConfigRequest], + Awaitable[~.TransferConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_transfer_config" not in self._stubs: + self._stubs["get_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferConfig", + request_serializer=datatransfer.GetTransferConfigRequest.serialize, + response_deserializer=transfer.TransferConfig.deserialize, + ) + return self._stubs["get_transfer_config"] + + @property + def list_transfer_configs( + self, + ) -> Callable[ + [datatransfer.ListTransferConfigsRequest], + Awaitable[datatransfer.ListTransferConfigsResponse], + ]: + r"""Return a callable for the list transfer configs method over gRPC. + + Returns information about all transfer configs owned + by a project in the specified location. + + Returns: + Callable[[~.ListTransferConfigsRequest], + Awaitable[~.ListTransferConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transfer_configs" not in self._stubs: + self._stubs["list_transfer_configs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferConfigs", + request_serializer=datatransfer.ListTransferConfigsRequest.serialize, + response_deserializer=datatransfer.ListTransferConfigsResponse.deserialize, + ) + return self._stubs["list_transfer_configs"] + + @property + def schedule_transfer_runs( + self, + ) -> Callable[ + [datatransfer.ScheduleTransferRunsRequest], + Awaitable[datatransfer.ScheduleTransferRunsResponse], + ]: + r"""Return a callable for the schedule transfer runs method over gRPC. + + Creates transfer runs for a time range [start_time, end_time]. + For each date - or whatever granularity the data source supports + - in the range, one transfer run is created. Note that runs are + created per UTC time in the time range. DEPRECATED: use + StartManualTransferRuns instead. + + Returns: + Callable[[~.ScheduleTransferRunsRequest], + Awaitable[~.ScheduleTransferRunsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "schedule_transfer_runs" not in self._stubs: + self._stubs["schedule_transfer_runs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ScheduleTransferRuns", + request_serializer=datatransfer.ScheduleTransferRunsRequest.serialize, + response_deserializer=datatransfer.ScheduleTransferRunsResponse.deserialize, + ) + return self._stubs["schedule_transfer_runs"] + + @property + def start_manual_transfer_runs( + self, + ) -> Callable[ + [datatransfer.StartManualTransferRunsRequest], + Awaitable[datatransfer.StartManualTransferRunsResponse], + ]: + r"""Return a callable for the start manual transfer runs method over gRPC. + + Start manual transfer runs to be executed now with schedule_time + equal to current time. The transfer runs can be created for a + time range where the run_time is between start_time (inclusive) + and end_time (exclusive), or for a specific run_time. + + Returns: + Callable[[~.StartManualTransferRunsRequest], + Awaitable[~.StartManualTransferRunsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_manual_transfer_runs" not in self._stubs: + self._stubs["start_manual_transfer_runs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/StartManualTransferRuns", + request_serializer=datatransfer.StartManualTransferRunsRequest.serialize, + response_deserializer=datatransfer.StartManualTransferRunsResponse.deserialize, + ) + return self._stubs["start_manual_transfer_runs"] + + @property + def get_transfer_run( + self, + ) -> Callable[ + [datatransfer.GetTransferRunRequest], Awaitable[transfer.TransferRun] + ]: + r"""Return a callable for the get transfer run method over gRPC. + + Returns information about the particular transfer + run. + + Returns: + Callable[[~.GetTransferRunRequest], + Awaitable[~.TransferRun]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_transfer_run" not in self._stubs: + self._stubs["get_transfer_run"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferRun", + request_serializer=datatransfer.GetTransferRunRequest.serialize, + response_deserializer=transfer.TransferRun.deserialize, + ) + return self._stubs["get_transfer_run"] + + @property + def delete_transfer_run( + self, + ) -> Callable[[datatransfer.DeleteTransferRunRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete transfer run method over gRPC. + + Deletes the specified transfer run. + + Returns: + Callable[[~.DeleteTransferRunRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_transfer_run" not in self._stubs: + self._stubs["delete_transfer_run"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun", + request_serializer=datatransfer.DeleteTransferRunRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_transfer_run"] + + @property + def list_transfer_runs( + self, + ) -> Callable[ + [datatransfer.ListTransferRunsRequest], + Awaitable[datatransfer.ListTransferRunsResponse], + ]: + r"""Return a callable for the list transfer runs method over gRPC. + + Returns information about running and completed + transfer runs. + + Returns: + Callable[[~.ListTransferRunsRequest], + Awaitable[~.ListTransferRunsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transfer_runs" not in self._stubs: + self._stubs["list_transfer_runs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferRuns", + request_serializer=datatransfer.ListTransferRunsRequest.serialize, + response_deserializer=datatransfer.ListTransferRunsResponse.deserialize, + ) + return self._stubs["list_transfer_runs"] + + @property + def list_transfer_logs( + self, + ) -> Callable[ + [datatransfer.ListTransferLogsRequest], + Awaitable[datatransfer.ListTransferLogsResponse], + ]: + r"""Return a callable for the list transfer logs method over gRPC. + + Returns log messages for the transfer run. + + Returns: + Callable[[~.ListTransferLogsRequest], + Awaitable[~.ListTransferLogsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transfer_logs" not in self._stubs: + self._stubs["list_transfer_logs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferLogs", + request_serializer=datatransfer.ListTransferLogsRequest.serialize, + response_deserializer=datatransfer.ListTransferLogsResponse.deserialize, + ) + return self._stubs["list_transfer_logs"] + + @property + def check_valid_creds( + self, + ) -> Callable[ + [datatransfer.CheckValidCredsRequest], + Awaitable[datatransfer.CheckValidCredsResponse], + ]: + r"""Return a callable for the check valid creds method over gRPC. + + Returns true if valid credentials exist for the given + data source and requesting user. + + Returns: + Callable[[~.CheckValidCredsRequest], + Awaitable[~.CheckValidCredsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_valid_creds" not in self._stubs: + self._stubs["check_valid_creds"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CheckValidCreds", + request_serializer=datatransfer.CheckValidCredsRequest.serialize, + response_deserializer=datatransfer.CheckValidCredsResponse.deserialize, + ) + return self._stubs["check_valid_creds"] + + @property + def enroll_data_sources( + self, + ) -> Callable[[datatransfer.EnrollDataSourcesRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the enroll data sources method over gRPC. + + Enroll data sources in a user project. This allows users to + create transfer configurations for these data sources. They will + also appear in the ListDataSources RPC and as such, will appear + in the `BigQuery + UI `__, and the + documents can be found in the public guide for `BigQuery Web + UI `__ and + `Data Transfer + Service `__. + + Returns: + Callable[[~.EnrollDataSourcesRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "enroll_data_sources" not in self._stubs: + self._stubs["enroll_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/EnrollDataSources", + request_serializer=datatransfer.EnrollDataSourcesRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["enroll_data_sources"] + + def close(self): + return self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("DataTransferServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py new file mode 100644 index 000000000000..0cb62f38e323 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py @@ -0,0 +1,2375 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.bigquery_datatransfer_v1.types import datatransfer, transfer + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DataTransferServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DataTransferServiceRestInterceptor: + """Interceptor for DataTransferService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DataTransferServiceRestTransport. + + .. code-block:: python + class MyCustomDataTransferServiceInterceptor(DataTransferServiceRestInterceptor): + def pre_check_valid_creds(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_check_valid_creds(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_transfer_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_transfer_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_transfer_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_transfer_run(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_enroll_data_sources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_data_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_source(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_transfer_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_transfer_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_transfer_run(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_transfer_run(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_sources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_sources(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_transfer_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_transfer_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_transfer_logs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_transfer_logs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_transfer_runs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_transfer_runs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_schedule_transfer_runs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_schedule_transfer_runs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_start_manual_transfer_runs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_start_manual_transfer_runs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_transfer_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_transfer_config(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DataTransferServiceRestTransport(interceptor=MyCustomDataTransferServiceInterceptor()) + client = DataTransferServiceClient(transport=transport) + + + """ + + def pre_check_valid_creds( + self, + request: datatransfer.CheckValidCredsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.CheckValidCredsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for check_valid_creds + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_check_valid_creds( + self, response: datatransfer.CheckValidCredsResponse + ) -> datatransfer.CheckValidCredsResponse: + """Post-rpc interceptor for check_valid_creds + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_create_transfer_config( + self, + request: datatransfer.CreateTransferConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.CreateTransferConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_transfer_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_create_transfer_config( + self, response: transfer.TransferConfig + ) -> transfer.TransferConfig: + """Post-rpc interceptor for create_transfer_config + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_delete_transfer_config( + self, + request: datatransfer.DeleteTransferConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.DeleteTransferConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_transfer_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def pre_delete_transfer_run( + self, + request: datatransfer.DeleteTransferRunRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.DeleteTransferRunRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_transfer_run + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def pre_enroll_data_sources( + self, + request: datatransfer.EnrollDataSourcesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.EnrollDataSourcesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for enroll_data_sources + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def pre_get_data_source( + self, + request: datatransfer.GetDataSourceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.GetDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_data_source + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_get_data_source( + self, response: datatransfer.DataSource + ) -> datatransfer.DataSource: + """Post-rpc interceptor for get_data_source + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_get_transfer_config( + self, + request: datatransfer.GetTransferConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.GetTransferConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_transfer_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_get_transfer_config( + self, response: transfer.TransferConfig + ) -> transfer.TransferConfig: + """Post-rpc interceptor for get_transfer_config + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_get_transfer_run( + self, + request: datatransfer.GetTransferRunRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.GetTransferRunRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_transfer_run + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_get_transfer_run( + self, response: transfer.TransferRun + ) -> transfer.TransferRun: + """Post-rpc interceptor for get_transfer_run + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_list_data_sources( + self, + request: datatransfer.ListDataSourcesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.ListDataSourcesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_data_sources + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_list_data_sources( + self, response: datatransfer.ListDataSourcesResponse + ) -> datatransfer.ListDataSourcesResponse: + """Post-rpc interceptor for list_data_sources + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_list_transfer_configs( + self, + request: datatransfer.ListTransferConfigsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.ListTransferConfigsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_transfer_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_list_transfer_configs( + self, response: datatransfer.ListTransferConfigsResponse + ) -> datatransfer.ListTransferConfigsResponse: + """Post-rpc interceptor for list_transfer_configs + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_list_transfer_logs( + self, + request: datatransfer.ListTransferLogsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.ListTransferLogsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_transfer_logs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_list_transfer_logs( + self, response: datatransfer.ListTransferLogsResponse + ) -> datatransfer.ListTransferLogsResponse: + """Post-rpc interceptor for list_transfer_logs + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_list_transfer_runs( + self, + request: datatransfer.ListTransferRunsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.ListTransferRunsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_transfer_runs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_list_transfer_runs( + self, response: datatransfer.ListTransferRunsResponse + ) -> datatransfer.ListTransferRunsResponse: + """Post-rpc interceptor for list_transfer_runs + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_schedule_transfer_runs( + self, + request: datatransfer.ScheduleTransferRunsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.ScheduleTransferRunsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for schedule_transfer_runs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_schedule_transfer_runs( + self, response: datatransfer.ScheduleTransferRunsResponse + ) -> datatransfer.ScheduleTransferRunsResponse: + """Post-rpc interceptor for schedule_transfer_runs + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_start_manual_transfer_runs( + self, + request: datatransfer.StartManualTransferRunsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.StartManualTransferRunsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for start_manual_transfer_runs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_start_manual_transfer_runs( + self, response: datatransfer.StartManualTransferRunsResponse + ) -> datatransfer.StartManualTransferRunsResponse: + """Post-rpc interceptor for start_manual_transfer_runs + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_update_transfer_config( + self, + request: datatransfer.UpdateTransferConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.UpdateTransferConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_transfer_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_update_transfer_config( + self, response: transfer.TransferConfig + ) -> transfer.TransferConfig: + """Post-rpc interceptor for update_transfer_config + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DataTransferServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DataTransferServiceRestInterceptor + + +class DataTransferServiceRestTransport(DataTransferServiceTransport): + """REST backend transport for DataTransferService. + + This API allows users to manage their data transfers into + BigQuery. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "bigquerydatatransfer.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DataTransferServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DataTransferServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CheckValidCreds(DataTransferServiceRestStub): + def __hash__(self): + return hash("CheckValidCreds") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.CheckValidCredsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.CheckValidCredsResponse: + r"""Call the check valid creds method over HTTP. + + Args: + request (~.datatransfer.CheckValidCredsRequest): + The request object. A request to determine whether the + user has valid credentials. This method + is used to limit the number of OAuth + popups in the user interface. The user + id is inferred from the API call + context. If the data source has the + Google+ authorization type, this method + returns false, as it cannot be + determined whether the credentials are + already valid merely based on the user + id. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.CheckValidCredsResponse: + A response indicating whether the + credentials exist and are valid. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/dataSources/*}:checkValidCreds", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_check_valid_creds( + request, metadata + ) + pb_request = datatransfer.CheckValidCredsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.CheckValidCredsResponse() + pb_resp = datatransfer.CheckValidCredsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_check_valid_creds(resp) + return resp + + class _CreateTransferConfig(DataTransferServiceRestStub): + def __hash__(self): + return hash("CreateTransferConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.CreateTransferConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Call the create transfer config method over HTTP. + + Args: + request (~.datatransfer.CreateTransferConfigRequest): + The request object. A request to create a data transfer + configuration. If new credentials are + needed for this transfer configuration, + authorization info must be provided. If + authorization info is provided, the + transfer configuration will be + associated with the user id + corresponding to the authorization info. + Otherwise, the transfer configuration + will be associated with the calling + user. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferConfig: + Represents a data transfer configuration. A transfer + configuration contains all metadata needed to perform a + data transfer. For example, ``destination_dataset_id`` + specifies where data should be stored. When a new + transfer configuration is created, the specified + ``destination_dataset_id`` is created when needed and + shared with the appropriate data source service account. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/transferConfigs", + "body": "transfer_config", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*}/transferConfigs", + "body": "transfer_config", + }, + ] + request, metadata = self._interceptor.pre_create_transfer_config( + request, metadata + ) + pb_request = datatransfer.CreateTransferConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = transfer.TransferConfig() + pb_resp = transfer.TransferConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_transfer_config(resp) + return resp + + class _DeleteTransferConfig(DataTransferServiceRestStub): + def __hash__(self): + return hash("DeleteTransferConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.DeleteTransferConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete transfer config method over HTTP. + + Args: + request (~.datatransfer.DeleteTransferConfigRequest): + The request object. A request to delete data transfer + information. All associated transfer + runs and log messages will be deleted as + well. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/transferConfigs/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/transferConfigs/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_transfer_config( + request, metadata + ) + pb_request = datatransfer.DeleteTransferConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteTransferRun(DataTransferServiceRestStub): + def __hash__(self): + return hash("DeleteTransferRun") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.DeleteTransferRunRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete transfer run method over HTTP. + + Args: + request (~.datatransfer.DeleteTransferRunRequest): + The request object. A request to delete data transfer run + information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/transferConfigs/*/runs/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_transfer_run( + request, metadata + ) + pb_request = datatransfer.DeleteTransferRunRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _EnrollDataSources(DataTransferServiceRestStub): + def __hash__(self): + return hash("EnrollDataSources") + + def __call__( + self, + request: datatransfer.EnrollDataSourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the enroll data sources method over HTTP. + + Args: + request (~.datatransfer.EnrollDataSourcesRequest): + The request object. A request to enroll a set of data sources so they are + visible in the BigQuery UI's ``Transfer`` tab. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*}:enrollDataSources", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*}:enrollDataSources", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_enroll_data_sources( + request, metadata + ) + pb_request = datatransfer.EnrollDataSourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDataSource(DataTransferServiceRestStub): + def __hash__(self): + return hash("GetDataSource") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.GetDataSourceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.DataSource: + r"""Call the get data source method over HTTP. + + Args: + request (~.datatransfer.GetDataSourceRequest): + The request object. A request to get data source info. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.DataSource: + Defines the properties and custom + parameters for a data source. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataSources/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/dataSources/*}", + }, + ] + request, metadata = self._interceptor.pre_get_data_source(request, metadata) + pb_request = datatransfer.GetDataSourceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.DataSource() + pb_resp = datatransfer.DataSource.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_data_source(resp) + return resp + + class _GetTransferConfig(DataTransferServiceRestStub): + def __hash__(self): + return hash("GetTransferConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.GetTransferConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Call the get transfer config method over HTTP. + + Args: + request (~.datatransfer.GetTransferConfigRequest): + The request object. A request to get data transfer + information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferConfig: + Represents a data transfer configuration. A transfer + configuration contains all metadata needed to perform a + data transfer. For example, ``destination_dataset_id`` + specifies where data should be stored. When a new + transfer configuration is created, the specified + ``destination_dataset_id`` is created when needed and + shared with the appropriate data source service account. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/transferConfigs/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/transferConfigs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_transfer_config( + request, metadata + ) + pb_request = datatransfer.GetTransferConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = transfer.TransferConfig() + pb_resp = transfer.TransferConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_transfer_config(resp) + return resp + + class _GetTransferRun(DataTransferServiceRestStub): + def __hash__(self): + return hash("GetTransferRun") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.GetTransferRunRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferRun: + r"""Call the get transfer run method over HTTP. + + Args: + request (~.datatransfer.GetTransferRunRequest): + The request object. A request to get data transfer run + information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferRun: + Represents a data transfer run. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/transferConfigs/*/runs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_transfer_run( + request, metadata + ) + pb_request = datatransfer.GetTransferRunRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = transfer.TransferRun() + pb_resp = transfer.TransferRun.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_transfer_run(resp) + return resp + + class _ListDataSources(DataTransferServiceRestStub): + def __hash__(self): + return hash("ListDataSources") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.ListDataSourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.ListDataSourcesResponse: + r"""Call the list data sources method over HTTP. + + Args: + request (~.datatransfer.ListDataSourcesRequest): + The request object. Request to list supported data + sources and their data transfer + settings. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.ListDataSourcesResponse: + Returns list of supported data + sources and their metadata. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/dataSources", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*}/dataSources", + }, + ] + request, metadata = self._interceptor.pre_list_data_sources( + request, metadata + ) + pb_request = datatransfer.ListDataSourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.ListDataSourcesResponse() + pb_resp = datatransfer.ListDataSourcesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_data_sources(resp) + return resp + + class _ListTransferConfigs(DataTransferServiceRestStub): + def __hash__(self): + return hash("ListTransferConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.ListTransferConfigsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.ListTransferConfigsResponse: + r"""Call the list transfer configs method over HTTP. + + Args: + request (~.datatransfer.ListTransferConfigsRequest): + The request object. A request to list data transfers + configured for a BigQuery project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.ListTransferConfigsResponse: + The returned list of pipelines in the + project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/transferConfigs", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*}/transferConfigs", + }, + ] + request, metadata = self._interceptor.pre_list_transfer_configs( + request, metadata + ) + pb_request = datatransfer.ListTransferConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.ListTransferConfigsResponse() + pb_resp = datatransfer.ListTransferConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_transfer_configs(resp) + return resp + + class _ListTransferLogs(DataTransferServiceRestStub): + def __hash__(self): + return hash("ListTransferLogs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.ListTransferLogsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.ListTransferLogsResponse: + r"""Call the list transfer logs method over HTTP. + + Args: + request (~.datatransfer.ListTransferLogsRequest): + The request object. A request to get user facing log + messages associated with data transfer + run. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.ListTransferLogsResponse: + The returned list transfer run + messages. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogs", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs", + }, + ] + request, metadata = self._interceptor.pre_list_transfer_logs( + request, metadata + ) + pb_request = datatransfer.ListTransferLogsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.ListTransferLogsResponse() + pb_resp = datatransfer.ListTransferLogsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_transfer_logs(resp) + return resp + + class _ListTransferRuns(DataTransferServiceRestStub): + def __hash__(self): + return hash("ListTransferRuns") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.ListTransferRunsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.ListTransferRunsResponse: + r"""Call the list transfer runs method over HTTP. + + Args: + request (~.datatransfer.ListTransferRunsRequest): + The request object. A request to list data transfer runs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.ListTransferRunsResponse: + The returned list of pipelines in the + project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/transferConfigs/*}/runs", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/transferConfigs/*}/runs", + }, + ] + request, metadata = self._interceptor.pre_list_transfer_runs( + request, metadata + ) + pb_request = datatransfer.ListTransferRunsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.ListTransferRunsResponse() + pb_resp = datatransfer.ListTransferRunsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_transfer_runs(resp) + return resp + + class _ScheduleTransferRuns(DataTransferServiceRestStub): + def __hash__(self): + return hash("ScheduleTransferRuns") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.ScheduleTransferRunsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.ScheduleTransferRunsResponse: + r"""Call the schedule transfer runs method over HTTP. + + Args: + request (~.datatransfer.ScheduleTransferRunsRequest): + The request object. A request to schedule transfer runs + for a time range. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.ScheduleTransferRunsResponse: + A response to schedule transfer runs + for a time range. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_schedule_transfer_runs( + request, metadata + ) + pb_request = datatransfer.ScheduleTransferRunsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.ScheduleTransferRunsResponse() + pb_resp = datatransfer.ScheduleTransferRunsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_schedule_transfer_runs(resp) + return resp + + class _StartManualTransferRuns(DataTransferServiceRestStub): + def __hash__(self): + return hash("StartManualTransferRuns") + + def __call__( + self, + request: datatransfer.StartManualTransferRunsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.StartManualTransferRunsResponse: + r"""Call the start manual transfer + runs method over HTTP. + + Args: + request (~.datatransfer.StartManualTransferRunsRequest): + The request object. A request to start manual transfer + runs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.StartManualTransferRunsResponse: + A response to start manual transfer + runs. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/transferConfigs/*}:startManualRuns", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_start_manual_transfer_runs( + request, metadata + ) + pb_request = datatransfer.StartManualTransferRunsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.StartManualTransferRunsResponse() + pb_resp = datatransfer.StartManualTransferRunsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_start_manual_transfer_runs(resp) + return resp + + class _UpdateTransferConfig(DataTransferServiceRestStub): + def __hash__(self): + return hash("UpdateTransferConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.UpdateTransferConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Call the update transfer config method over HTTP. + + Args: + request (~.datatransfer.UpdateTransferConfigRequest): + The request object. A request to update a transfer + configuration. To update the user id of + the transfer configuration, + authorization info needs to be provided. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferConfig: + Represents a data transfer configuration. A transfer + configuration contains all metadata needed to perform a + data transfer. For example, ``destination_dataset_id`` + specifies where data should be stored. When a new + transfer configuration is created, the specified + ``destination_dataset_id`` is created when needed and + shared with the appropriate data source service account. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}", + "body": "transfer_config", + }, + { + "method": "patch", + "uri": "/v1/{transfer_config.name=projects/*/transferConfigs/*}", + "body": "transfer_config", + }, + ] + request, metadata = self._interceptor.pre_update_transfer_config( + request, metadata + ) + pb_request = datatransfer.UpdateTransferConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = transfer.TransferConfig() + pb_resp = transfer.TransferConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_transfer_config(resp) + return resp + + @property + def check_valid_creds( + self, + ) -> Callable[ + [datatransfer.CheckValidCredsRequest], datatransfer.CheckValidCredsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CheckValidCreds(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_transfer_config( + self, + ) -> Callable[[datatransfer.CreateTransferConfigRequest], transfer.TransferConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTransferConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_transfer_config( + self, + ) -> Callable[[datatransfer.DeleteTransferConfigRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTransferConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_transfer_run( + self, + ) -> Callable[[datatransfer.DeleteTransferRunRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTransferRun(self._session, self._host, self._interceptor) # type: ignore + + @property + def enroll_data_sources( + self, + ) -> Callable[[datatransfer.EnrollDataSourcesRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnrollDataSources(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_source( + self, + ) -> Callable[[datatransfer.GetDataSourceRequest], datatransfer.DataSource]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataSource(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_transfer_config( + self, + ) -> Callable[[datatransfer.GetTransferConfigRequest], transfer.TransferConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTransferConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_transfer_run( + self, + ) -> Callable[[datatransfer.GetTransferRunRequest], transfer.TransferRun]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTransferRun(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_sources( + self, + ) -> Callable[ + [datatransfer.ListDataSourcesRequest], datatransfer.ListDataSourcesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataSources(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_transfer_configs( + self, + ) -> Callable[ + [datatransfer.ListTransferConfigsRequest], + datatransfer.ListTransferConfigsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTransferConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_transfer_logs( + self, + ) -> Callable[ + [datatransfer.ListTransferLogsRequest], datatransfer.ListTransferLogsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTransferLogs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_transfer_runs( + self, + ) -> Callable[ + [datatransfer.ListTransferRunsRequest], datatransfer.ListTransferRunsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTransferRuns(self._session, self._host, self._interceptor) # type: ignore + + @property + def schedule_transfer_runs( + self, + ) -> Callable[ + [datatransfer.ScheduleTransferRunsRequest], + datatransfer.ScheduleTransferRunsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ScheduleTransferRuns(self._session, self._host, self._interceptor) # type: ignore + + @property + def start_manual_transfer_runs( + self, + ) -> Callable[ + [datatransfer.StartManualTransferRunsRequest], + datatransfer.StartManualTransferRunsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StartManualTransferRuns(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_transfer_config( + self, + ) -> Callable[[datatransfer.UpdateTransferConfigRequest], transfer.TransferConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTransferConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(DataTransferServiceRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(DataTransferServiceRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DataTransferServiceRestTransport",) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py new file mode 100644 index 000000000000..51d83e95c795 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/__init__.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .datatransfer import ( + CheckValidCredsRequest, + CheckValidCredsResponse, + CreateTransferConfigRequest, + DataSource, + DataSourceParameter, + DeleteTransferConfigRequest, + DeleteTransferRunRequest, + EnrollDataSourcesRequest, + GetDataSourceRequest, + GetTransferConfigRequest, + GetTransferRunRequest, + ListDataSourcesRequest, + ListDataSourcesResponse, + ListTransferConfigsRequest, + ListTransferConfigsResponse, + ListTransferLogsRequest, + ListTransferLogsResponse, + ListTransferRunsRequest, + ListTransferRunsResponse, + ScheduleTransferRunsRequest, + ScheduleTransferRunsResponse, + StartManualTransferRunsRequest, + StartManualTransferRunsResponse, + UpdateTransferConfigRequest, +) +from .transfer import ( + EmailPreferences, + EncryptionConfiguration, + ScheduleOptions, + TransferConfig, + TransferMessage, + TransferRun, + TransferState, + TransferType, + UserInfo, +) + +__all__ = ( + "CheckValidCredsRequest", + "CheckValidCredsResponse", + "CreateTransferConfigRequest", + "DataSource", + "DataSourceParameter", + "DeleteTransferConfigRequest", + "DeleteTransferRunRequest", + "EnrollDataSourcesRequest", + "GetDataSourceRequest", + "GetTransferConfigRequest", + "GetTransferRunRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "ListTransferConfigsRequest", + "ListTransferConfigsResponse", + "ListTransferLogsRequest", + "ListTransferLogsResponse", + "ListTransferRunsRequest", + "ListTransferRunsResponse", + "ScheduleTransferRunsRequest", + "ScheduleTransferRunsResponse", + "StartManualTransferRunsRequest", + "StartManualTransferRunsResponse", + "UpdateTransferConfigRequest", + "EmailPreferences", + "EncryptionConfiguration", + "ScheduleOptions", + "TransferConfig", + "TransferMessage", + "TransferRun", + "UserInfo", + "TransferState", + "TransferType", +) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py new file mode 100644 index 000000000000..6ef903b55ec3 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py @@ -0,0 +1,1172 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.bigquery_datatransfer_v1.types import transfer + +__protobuf__ = proto.module( + package="google.cloud.bigquery.datatransfer.v1", + manifest={ + "DataSourceParameter", + "DataSource", + "GetDataSourceRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "CreateTransferConfigRequest", + "UpdateTransferConfigRequest", + "GetTransferConfigRequest", + "DeleteTransferConfigRequest", + "GetTransferRunRequest", + "DeleteTransferRunRequest", + "ListTransferConfigsRequest", + "ListTransferConfigsResponse", + "ListTransferRunsRequest", + "ListTransferRunsResponse", + "ListTransferLogsRequest", + "ListTransferLogsResponse", + "CheckValidCredsRequest", + "CheckValidCredsResponse", + "ScheduleTransferRunsRequest", + "ScheduleTransferRunsResponse", + "StartManualTransferRunsRequest", + "StartManualTransferRunsResponse", + "EnrollDataSourcesRequest", + }, +) + + +class DataSourceParameter(proto.Message): + r"""A parameter used to define custom fields in a data source + definition. + + Attributes: + param_id (str): + Parameter identifier. + display_name (str): + Parameter display name in the user interface. + description (str): + Parameter description. + type_ (google.cloud.bigquery_datatransfer_v1.types.DataSourceParameter.Type): + Parameter type. + required (bool): + Is parameter required. + repeated (bool): + Deprecated. This field has no effect. + validation_regex (str): + Regular expression which can be used for + parameter validation. + allowed_values (MutableSequence[str]): + All possible values for the parameter. + min_value (google.protobuf.wrappers_pb2.DoubleValue): + For integer and double values specifies + minimum allowed value. + max_value (google.protobuf.wrappers_pb2.DoubleValue): + For integer and double values specifies + maximum allowed value. + fields (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.DataSourceParameter]): + Deprecated. This field has no effect. + validation_description (str): + Description of the requirements for this + field, in case the user input does not fulfill + the regex pattern or min/max values. + validation_help_url (str): + URL to a help document to further explain the + naming requirements. + immutable (bool): + Cannot be changed after initial creation. + recurse (bool): + Deprecated. This field has no effect. + deprecated (bool): + If true, it should not be used in new + transfers, and it should not be visible to + users. + """ + + class Type(proto.Enum): + r"""Parameter type. + + Values: + TYPE_UNSPECIFIED (0): + Type unspecified. + STRING (1): + String parameter. + INTEGER (2): + Integer parameter (64-bits). + Will be serialized to json as string. + DOUBLE (3): + Double precision floating point parameter. + BOOLEAN (4): + Boolean parameter. + RECORD (5): + Deprecated. This field has no effect. + PLUS_PAGE (6): + Page ID for a Google+ Page. + LIST (7): + List of strings parameter. + """ + TYPE_UNSPECIFIED = 0 + STRING = 1 + INTEGER = 2 + DOUBLE = 3 + BOOLEAN = 4 + RECORD = 5 + PLUS_PAGE = 6 + LIST = 7 + + param_id: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + type_: Type = proto.Field( + proto.ENUM, + number=4, + enum=Type, + ) + required: bool = proto.Field( + proto.BOOL, + number=5, + ) + repeated: bool = proto.Field( + proto.BOOL, + number=6, + ) + validation_regex: str = proto.Field( + proto.STRING, + number=7, + ) + allowed_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + min_value: wrappers_pb2.DoubleValue = proto.Field( + proto.MESSAGE, + number=9, + message=wrappers_pb2.DoubleValue, + ) + max_value: wrappers_pb2.DoubleValue = proto.Field( + proto.MESSAGE, + number=10, + message=wrappers_pb2.DoubleValue, + ) + fields: MutableSequence["DataSourceParameter"] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="DataSourceParameter", + ) + validation_description: str = proto.Field( + proto.STRING, + number=12, + ) + validation_help_url: str = proto.Field( + proto.STRING, + number=13, + ) + immutable: bool = proto.Field( + proto.BOOL, + number=14, + ) + recurse: bool = proto.Field( + proto.BOOL, + number=15, + ) + deprecated: bool = proto.Field( + proto.BOOL, + number=20, + ) + + +class DataSource(proto.Message): + r"""Defines the properties and custom parameters for a data + source. + + Attributes: + name (str): + Output only. Data source resource name. + data_source_id (str): + Data source id. + display_name (str): + User friendly data source name. + description (str): + User friendly data source description string. + client_id (str): + Data source client id which should be used to + receive refresh token. + scopes (MutableSequence[str]): + Api auth scopes for which refresh token needs + to be obtained. These are scopes needed by a + data source to prepare data and ingest them into + BigQuery, e.g., + https://www.googleapis.com/auth/bigquery + transfer_type (google.cloud.bigquery_datatransfer_v1.types.TransferType): + Deprecated. This field has no effect. + supports_multiple_transfers (bool): + Deprecated. This field has no effect. + update_deadline_seconds (int): + The number of seconds to wait for an update + from the data source before the Data Transfer + Service marks the transfer as FAILED. + default_schedule (str): + Default data transfer schedule. Examples of valid schedules + include: ``1st,3rd monday of month 15:30``, + ``every wed,fri of jan,jun 13:15``, and + ``first sunday of quarter 00:00``. + supports_custom_schedule (bool): + Specifies whether the data source supports a user defined + schedule, or operates on the default schedule. When set to + ``true``, user can override default schedule. + parameters (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.DataSourceParameter]): + Data source parameters. + help_url (str): + Url for the help document for this data + source. + authorization_type (google.cloud.bigquery_datatransfer_v1.types.DataSource.AuthorizationType): + Indicates the type of authorization. + data_refresh_type (google.cloud.bigquery_datatransfer_v1.types.DataSource.DataRefreshType): + Specifies whether the data source supports + automatic data refresh for the past few days, + and how it's supported. For some data sources, + data might not be complete until a few days + later, so it's useful to refresh data + automatically. + default_data_refresh_window_days (int): + Default data refresh window on days. Only meaningful when + ``data_refresh_type`` = ``SLIDING_WINDOW``. + manual_runs_disabled (bool): + Disables backfilling and manual run + scheduling for the data source. + minimum_schedule_interval (google.protobuf.duration_pb2.Duration): + The minimum interval for scheduler to + schedule runs. + """ + + class AuthorizationType(proto.Enum): + r"""The type of authorization needed for this data source. + + Values: + AUTHORIZATION_TYPE_UNSPECIFIED (0): + Type unspecified. + AUTHORIZATION_CODE (1): + Use OAuth 2 authorization codes that can be + exchanged for a refresh token on the backend. + GOOGLE_PLUS_AUTHORIZATION_CODE (2): + Return an authorization code for a given + Google+ page that can then be exchanged for a + refresh token on the backend. + FIRST_PARTY_OAUTH (3): + Use First Party OAuth. + """ + AUTHORIZATION_TYPE_UNSPECIFIED = 0 + AUTHORIZATION_CODE = 1 + GOOGLE_PLUS_AUTHORIZATION_CODE = 2 + FIRST_PARTY_OAUTH = 3 + + class DataRefreshType(proto.Enum): + r"""Represents how the data source supports data auto refresh. + + Values: + DATA_REFRESH_TYPE_UNSPECIFIED (0): + The data source won't support data auto + refresh, which is default value. + SLIDING_WINDOW (1): + The data source supports data auto refresh, + and runs will be scheduled for the past few + days. Does not allow custom values to be set for + each transfer config. + CUSTOM_SLIDING_WINDOW (2): + The data source supports data auto refresh, + and runs will be scheduled for the past few + days. Allows custom values to be set for each + transfer config. + """ + DATA_REFRESH_TYPE_UNSPECIFIED = 0 + SLIDING_WINDOW = 1 + CUSTOM_SLIDING_WINDOW = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + data_source_id: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + client_id: str = proto.Field( + proto.STRING, + number=5, + ) + scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + transfer_type: transfer.TransferType = proto.Field( + proto.ENUM, + number=7, + enum=transfer.TransferType, + ) + supports_multiple_transfers: bool = proto.Field( + proto.BOOL, + number=8, + ) + update_deadline_seconds: int = proto.Field( + proto.INT32, + number=9, + ) + default_schedule: str = proto.Field( + proto.STRING, + number=10, + ) + supports_custom_schedule: bool = proto.Field( + proto.BOOL, + number=11, + ) + parameters: MutableSequence["DataSourceParameter"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="DataSourceParameter", + ) + help_url: str = proto.Field( + proto.STRING, + number=13, + ) + authorization_type: AuthorizationType = proto.Field( + proto.ENUM, + number=14, + enum=AuthorizationType, + ) + data_refresh_type: DataRefreshType = proto.Field( + proto.ENUM, + number=15, + enum=DataRefreshType, + ) + default_data_refresh_window_days: int = proto.Field( + proto.INT32, + number=16, + ) + manual_runs_disabled: bool = proto.Field( + proto.BOOL, + number=17, + ) + minimum_schedule_interval: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=18, + message=duration_pb2.Duration, + ) + + +class GetDataSourceRequest(proto.Message): + r"""A request to get data source info. + + Attributes: + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/dataSources/{data_source_id}`` or + ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDataSourcesRequest(proto.Message): + r"""Request to list supported data sources and their data + transfer settings. + + Attributes: + parent (str): + Required. The BigQuery project id for which data sources + should be returned. Must be in the form: + ``projects/{project_id}`` or + ``projects/{project_id}/locations/{location_id}`` + page_token (str): + Pagination token, which can be used to request a specific + page of ``ListDataSourcesRequest`` list results. For + multiple-page results, ``ListDataSourcesResponse`` outputs a + ``next_page`` token, which can be used as the ``page_token`` + value to request the next page of list results. + page_size (int): + Page size. The default page size is the + maximum value of 1000 results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + + +class ListDataSourcesResponse(proto.Message): + r"""Returns list of supported data sources and their metadata. + + Attributes: + data_sources (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.DataSource]): + List of supported data sources and their + transfer settings. + next_page_token (str): + Output only. The next-pagination token. For multiple-page + list results, this token can be used as the + ``ListDataSourcesRequest.page_token`` to request the next + page of list results. + """ + + @property + def raw_page(self): + return self + + data_sources: MutableSequence["DataSource"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataSource", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateTransferConfigRequest(proto.Message): + r"""A request to create a data transfer configuration. If new + credentials are needed for this transfer configuration, + authorization info must be provided. If authorization info is + provided, the transfer configuration will be associated with the + user id corresponding to the authorization info. Otherwise, the + transfer configuration will be associated with the calling user. + + Attributes: + parent (str): + Required. The BigQuery project id where the transfer + configuration should be created. Must be in the format + projects/{project_id}/locations/{location_id} or + projects/{project_id}. If specified location and location of + the destination bigquery dataset do not match - the request + will fail. + transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): + Required. Data transfer configuration to + create. + authorization_code (str): + Optional OAuth2 authorization code to use with this transfer + configuration. This is required only if + ``transferConfig.dataSourceId`` is 'youtube_channel' and new + credentials are needed, as indicated by ``CheckValidCreds``. + In order to obtain authorization_code, make a request to the + following URL: + + .. raw:: html + +
+                https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
+                
+ + - The client_id is the OAuth client_id of the a data source + as returned by ListDataSources method. + - data_source_scopes are the scopes returned by + ListDataSources method. + + Note that this should not be set when + ``service_account_name`` is used to create the transfer + config. + version_info (str): + Optional version info. This is required only if + ``transferConfig.dataSourceId`` is not 'youtube_channel' and + new credentials are needed, as indicated by + ``CheckValidCreds``. In order to obtain version info, make a + request to the following URL: + + .. raw:: html + +
+                https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
+                
+ + - The client_id is the OAuth client_id of the a data source + as returned by ListDataSources method. + - data_source_scopes are the scopes returned by + ListDataSources method. + + Note that this should not be set when + ``service_account_name`` is used to create the transfer + config. + service_account_name (str): + Optional service account email. If this field is set, the + transfer config will be created with this service account's + credentials. It requires that the requesting user calling + this API has permissions to act as this service account. + + Note that not all data sources support service account + credentials when creating a transfer config. For the latest + list of data sources, read about `using service + accounts `__. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + transfer_config: transfer.TransferConfig = proto.Field( + proto.MESSAGE, + number=2, + message=transfer.TransferConfig, + ) + authorization_code: str = proto.Field( + proto.STRING, + number=3, + ) + version_info: str = proto.Field( + proto.STRING, + number=5, + ) + service_account_name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class UpdateTransferConfigRequest(proto.Message): + r"""A request to update a transfer configuration. To update the + user id of the transfer configuration, authorization info needs + to be provided. + + Attributes: + transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): + Required. Data transfer configuration to + create. + authorization_code (str): + Optional OAuth2 authorization code to use with this transfer + configuration. This is required only if + ``transferConfig.dataSourceId`` is 'youtube_channel' and new + credentials are needed, as indicated by ``CheckValidCreds``. + In order to obtain authorization_code, make a request to the + following URL: + + .. raw:: html + +
+                https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
+                
+ + - The client_id is the OAuth client_id of the a data source + as returned by ListDataSources method. + - data_source_scopes are the scopes returned by + ListDataSources method. + + Note that this should not be set when + ``service_account_name`` is used to update the transfer + config. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Required list of fields to be + updated in this request. + version_info (str): + Optional version info. This is required only if + ``transferConfig.dataSourceId`` is not 'youtube_channel' and + new credentials are needed, as indicated by + ``CheckValidCreds``. In order to obtain version info, make a + request to the following URL: + + .. raw:: html + +
+                https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
+                
+ + - The client_id is the OAuth client_id of the a data source + as returned by ListDataSources method. + - data_source_scopes are the scopes returned by + ListDataSources method. + + Note that this should not be set when + ``service_account_name`` is used to update the transfer + config. + service_account_name (str): + Optional service account email. If this field is set, the + transfer config will be created with this service account's + credentials. It requires that the requesting user calling + this API has permissions to act as this service account. + + Note that not all data sources support service account + credentials when creating a transfer config. For the latest + list of data sources, read about `using service + accounts `__. + """ + + transfer_config: transfer.TransferConfig = proto.Field( + proto.MESSAGE, + number=1, + message=transfer.TransferConfig, + ) + authorization_code: str = proto.Field( + proto.STRING, + number=3, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, + ) + version_info: str = proto.Field( + proto.STRING, + number=5, + ) + service_account_name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class GetTransferConfigRequest(proto.Message): + r"""A request to get data transfer information. + + Attributes: + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteTransferConfigRequest(proto.Message): + r"""A request to delete data transfer information. All associated + transfer runs and log messages will be deleted as well. + + Attributes: + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetTransferRunRequest(proto.Message): + r"""A request to get data transfer run information. + + Attributes: + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteTransferRunRequest(proto.Message): + r"""A request to delete data transfer run information. + + Attributes: + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTransferConfigsRequest(proto.Message): + r"""A request to list data transfers configured for a BigQuery + project. + + Attributes: + parent (str): + Required. The BigQuery project id for which transfer configs + should be returned: ``projects/{project_id}`` or + ``projects/{project_id}/locations/{location_id}`` + data_source_ids (MutableSequence[str]): + When specified, only configurations of + requested data sources are returned. + page_token (str): + Pagination token, which can be used to request a specific + page of ``ListTransfersRequest`` list results. For + multiple-page results, ``ListTransfersResponse`` outputs a + ``next_page`` token, which can be used as the ``page_token`` + value to request the next page of list results. + page_size (int): + Page size. The default page size is the + maximum value of 1000 results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + data_source_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + + +class ListTransferConfigsResponse(proto.Message): + r"""The returned list of pipelines in the project. + + Attributes: + transfer_configs (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): + Output only. The stored pipeline transfer + configurations. + next_page_token (str): + Output only. The next-pagination token. For multiple-page + list results, this token can be used as the + ``ListTransferConfigsRequest.page_token`` to request the + next page of list results. + """ + + @property + def raw_page(self): + return self + + transfer_configs: MutableSequence[transfer.TransferConfig] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=transfer.TransferConfig, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListTransferRunsRequest(proto.Message): + r"""A request to list data transfer runs. + + Attributes: + parent (str): + Required. Name of transfer configuration for which transfer + runs should be retrieved. Format of transfer configuration + resource name is: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + states (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.TransferState]): + When specified, only transfer runs with + requested states are returned. + page_token (str): + Pagination token, which can be used to request a specific + page of ``ListTransferRunsRequest`` list results. For + multiple-page results, ``ListTransferRunsResponse`` outputs + a ``next_page`` token, which can be used as the + ``page_token`` value to request the next page of list + results. + page_size (int): + Page size. The default page size is the + maximum value of 1000 results. + run_attempt (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest.RunAttempt): + Indicates how run attempts are to be pulled. + """ + + class RunAttempt(proto.Enum): + r"""Represents which runs should be pulled. + + Values: + RUN_ATTEMPT_UNSPECIFIED (0): + All runs should be returned. + LATEST (1): + Only latest run per day should be returned. + """ + RUN_ATTEMPT_UNSPECIFIED = 0 + LATEST = 1 + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + states: MutableSequence[transfer.TransferState] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=transfer.TransferState, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + run_attempt: RunAttempt = proto.Field( + proto.ENUM, + number=5, + enum=RunAttempt, + ) + + +class ListTransferRunsResponse(proto.Message): + r"""The returned list of pipelines in the project. + + Attributes: + transfer_runs (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]): + Output only. The stored pipeline transfer + runs. + next_page_token (str): + Output only. The next-pagination token. For multiple-page + list results, this token can be used as the + ``ListTransferRunsRequest.page_token`` to request the next + page of list results. + """ + + @property + def raw_page(self): + return self + + transfer_runs: MutableSequence[transfer.TransferRun] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=transfer.TransferRun, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListTransferLogsRequest(proto.Message): + r"""A request to get user facing log messages associated with + data transfer run. + + Attributes: + parent (str): + Required. Transfer run name in the form: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + page_token (str): + Pagination token, which can be used to request a specific + page of ``ListTransferLogsRequest`` list results. For + multiple-page results, ``ListTransferLogsResponse`` outputs + a ``next_page`` token, which can be used as the + ``page_token`` value to request the next page of list + results. + page_size (int): + Page size. The default page size is the + maximum value of 1000 results. + message_types (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.TransferMessage.MessageSeverity]): + Message types to return. If not populated - + INFO, WARNING and ERROR messages are returned. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + page_size: int = proto.Field( + proto.INT32, + number=5, + ) + message_types: MutableSequence[ + transfer.TransferMessage.MessageSeverity + ] = proto.RepeatedField( + proto.ENUM, + number=6, + enum=transfer.TransferMessage.MessageSeverity, + ) + + +class ListTransferLogsResponse(proto.Message): + r"""The returned list transfer run messages. + + Attributes: + transfer_messages (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.TransferMessage]): + Output only. The stored pipeline transfer + messages. + next_page_token (str): + Output only. The next-pagination token. For multiple-page + list results, this token can be used as the + ``GetTransferRunLogRequest.page_token`` to request the next + page of list results. + """ + + @property + def raw_page(self): + return self + + transfer_messages: MutableSequence[transfer.TransferMessage] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=transfer.TransferMessage, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CheckValidCredsRequest(proto.Message): + r"""A request to determine whether the user has valid + credentials. This method is used to limit the number of OAuth + popups in the user interface. The user id is inferred from the + API call context. + If the data source has the Google+ authorization type, this + method returns false, as it cannot be determined whether the + credentials are already valid merely based on the user id. + + Attributes: + name (str): + Required. The data source in the form: + ``projects/{project_id}/dataSources/{data_source_id}`` or + ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CheckValidCredsResponse(proto.Message): + r"""A response indicating whether the credentials exist and are + valid. + + Attributes: + has_valid_creds (bool): + If set to ``true``, the credentials exist and are valid. + """ + + has_valid_creds: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ScheduleTransferRunsRequest(proto.Message): + r"""A request to schedule transfer runs for a time range. + + Attributes: + parent (str): + Required. Transfer configuration name in the form: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Required. Start time of the range of transfer runs. For + example, ``"2017-05-25T00:00:00+00:00"``. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Required. End time of the range of transfer runs. For + example, ``"2017-05-30T00:00:00+00:00"``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class ScheduleTransferRunsResponse(proto.Message): + r"""A response to schedule transfer runs for a time range. + + Attributes: + runs (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]): + The transfer runs that were scheduled. + """ + + runs: MutableSequence[transfer.TransferRun] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=transfer.TransferRun, + ) + + +class StartManualTransferRunsRequest(proto.Message): + r"""A request to start manual transfer runs. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Transfer configuration name in the form: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + requested_time_range (google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest.TimeRange): + A time_range start and end timestamp for historical data + files or reports that are scheduled to be transferred by the + scheduled transfer run. requested_time_range must be a past + time and cannot include future time values. + + This field is a member of `oneof`_ ``time``. + requested_run_time (google.protobuf.timestamp_pb2.Timestamp): + A run_time timestamp for historical data files or reports + that are scheduled to be transferred by the scheduled + transfer run. requested_run_time must be a past time and + cannot include future time values. + + This field is a member of `oneof`_ ``time``. + """ + + class TimeRange(proto.Message): + r"""A specification for a time range, this will request transfer runs + with run_time between start_time (inclusive) and end_time + (exclusive). + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Start time of the range of transfer runs. For example, + ``"2017-05-25T00:00:00+00:00"``. The start_time must be + strictly less than the end_time. Creates transfer runs where + run_time is in the range between start_time (inclusive) and + end_time (exclusive). + end_time (google.protobuf.timestamp_pb2.Timestamp): + End time of the range of transfer runs. For example, + ``"2017-05-30T00:00:00+00:00"``. The end_time must not be in + the future. Creates transfer runs where run_time is in the + range between start_time (inclusive) and end_time + (exclusive). + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requested_time_range: TimeRange = proto.Field( + proto.MESSAGE, + number=3, + oneof="time", + message=TimeRange, + ) + requested_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + oneof="time", + message=timestamp_pb2.Timestamp, + ) + + +class StartManualTransferRunsResponse(proto.Message): + r"""A response to start manual transfer runs. + + Attributes: + runs (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]): + The transfer runs that were created. + """ + + runs: MutableSequence[transfer.TransferRun] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=transfer.TransferRun, + ) + + +class EnrollDataSourcesRequest(proto.Message): + r"""A request to enroll a set of data sources so they are visible in the + BigQuery UI's ``Transfer`` tab. + + Attributes: + name (str): + The name of the project resource in the form: + ``projects/{project_id}`` + data_source_ids (MutableSequence[str]): + Data sources that are enrolled. It is + required to provide at least one data source id. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + data_source_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py new file mode 100644 index 000000000000..9bb3d659eba0 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types/transfer.py @@ -0,0 +1,567 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.bigquery.datatransfer.v1", + manifest={ + "TransferType", + "TransferState", + "EmailPreferences", + "ScheduleOptions", + "UserInfo", + "TransferConfig", + "EncryptionConfiguration", + "TransferRun", + "TransferMessage", + }, +) + + +class TransferType(proto.Enum): + r"""DEPRECATED. Represents data transfer type. + + Values: + TRANSFER_TYPE_UNSPECIFIED (0): + Invalid or Unknown transfer type placeholder. + BATCH (1): + Batch data transfer. + STREAMING (2): + Streaming data transfer. Streaming data + source currently doesn't support multiple + transfer configs per project. + """ + _pb_options = {"deprecated": True} + TRANSFER_TYPE_UNSPECIFIED = 0 + BATCH = 1 + STREAMING = 2 + + +class TransferState(proto.Enum): + r"""Represents data transfer run state. + + Values: + TRANSFER_STATE_UNSPECIFIED (0): + State placeholder (0). + PENDING (2): + Data transfer is scheduled and is waiting to + be picked up by data transfer backend (2). + RUNNING (3): + Data transfer is in progress (3). + SUCCEEDED (4): + Data transfer completed successfully (4). + FAILED (5): + Data transfer failed (5). + CANCELLED (6): + Data transfer is cancelled (6). + """ + TRANSFER_STATE_UNSPECIFIED = 0 + PENDING = 2 + RUNNING = 3 + SUCCEEDED = 4 + FAILED = 5 + CANCELLED = 6 + + +class EmailPreferences(proto.Message): + r"""Represents preferences for sending email notifications for + transfer run events. + + Attributes: + enable_failure_email (bool): + If true, email notifications will be sent on + transfer run failures. + """ + + enable_failure_email: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ScheduleOptions(proto.Message): + r"""Options customizing the data transfer schedule. + + Attributes: + disable_auto_scheduling (bool): + If true, automatic scheduling of data + transfer runs for this configuration will be + disabled. The runs can be started on ad-hoc + basis using StartManualTransferRuns API. When + automatic scheduling is disabled, the + TransferConfig.schedule field will be ignored. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Specifies time to start scheduling transfer + runs. The first run will be scheduled at or + after the start time according to a recurrence + pattern defined in the schedule string. The + start time can be changed at any moment. The + time when a data transfer can be trigerred + manually is not limited by this option. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Defines time to stop scheduling transfer + runs. A transfer run cannot be scheduled at or + after the end time. The end time can be changed + at any moment. The time when a data transfer can + be trigerred manually is not limited by this + option. + """ + + disable_auto_scheduling: bool = proto.Field( + proto.BOOL, + number=3, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class UserInfo(proto.Message): + r"""Information about a user. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + email (str): + E-mail address of the user. + + This field is a member of `oneof`_ ``_email``. + """ + + email: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + + +class TransferConfig(proto.Message): + r"""Represents a data transfer configuration. A transfer configuration + contains all metadata needed to perform a data transfer. For + example, ``destination_dataset_id`` specifies where data should be + stored. When a new transfer configuration is created, the specified + ``destination_dataset_id`` is created when needed and shared with + the appropriate data source service account. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The resource name of the transfer config. Transfer config + names have the form either + ``projects/{project_id}/locations/{region}/transferConfigs/{config_id}`` + or ``projects/{project_id}/transferConfigs/{config_id}``, + where ``config_id`` is usually a UUID, even though it is not + guaranteed or required. The name is ignored when creating a + transfer config. + destination_dataset_id (str): + The BigQuery target dataset id. + + This field is a member of `oneof`_ ``destination``. + display_name (str): + User specified display name for the data + transfer. + data_source_id (str): + Data source ID. This cannot be changed once + data transfer is created. The full list of + available data source IDs can be returned + through an API call: + + https://cloud.google.com/bigquery-transfer/docs/reference/datatransfer/rest/v1/projects.locations.dataSources/list + params (google.protobuf.struct_pb2.Struct): + Parameters specific to each data source. For + more information see the bq tab in the 'Setting + up a data transfer' section for each data + source. For example the parameters for Cloud + Storage transfers are listed here: + + https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq + schedule (str): + Data transfer schedule. If the data source does not support + a custom schedule, this should be empty. If it is empty, the + default value for the data source will be used. The + specified times are in UTC. Examples of valid format: + ``1st,3rd monday of month 15:30``, + ``every wed,fri of jan,jun 13:15``, and + ``first sunday of quarter 00:00``. See more explanation + about the format here: + https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + + NOTE: The minimum interval time between recurring transfers + depends on the data source; refer to the documentation for + your data source. + schedule_options (google.cloud.bigquery_datatransfer_v1.types.ScheduleOptions): + Options customizing the data transfer + schedule. + data_refresh_window_days (int): + The number of days to look back to automatically refresh the + data. For example, if ``data_refresh_window_days = 10``, + then every day BigQuery reingests data for [today-10, + today-1], rather than ingesting data for just [today-1]. + Only valid if the data source supports the feature. Set the + value to 0 to use the default value. + disabled (bool): + Is this config disabled. When set to true, no + runs are scheduled for a given transfer. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Data transfer modification time. + Ignored by server on input. + next_run_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Next time when data transfer + will run. + state (google.cloud.bigquery_datatransfer_v1.types.TransferState): + Output only. State of the most recently + updated transfer run. + user_id (int): + Deprecated. Unique ID of the user on whose + behalf transfer is done. + dataset_region (str): + Output only. Region in which BigQuery dataset + is located. + notification_pubsub_topic (str): + Pub/Sub topic where notifications will be sent after + transfer runs associated with this transfer config finish. + + The format for specifying a pubsub topic is: + ``projects/{project}/topics/{topic}`` + email_preferences (google.cloud.bigquery_datatransfer_v1.types.EmailPreferences): + Email notifications will be sent according to + these preferences to the email address of the + user who owns this transfer config. + owner_info (google.cloud.bigquery_datatransfer_v1.types.UserInfo): + Output only. Information about the user whose credentials + are used to transfer data. Populated only for + ``transferConfigs.get`` requests. In case the user + information is not available, this field will not be + populated. + + This field is a member of `oneof`_ ``_owner_info``. + encryption_configuration (google.cloud.bigquery_datatransfer_v1.types.EncryptionConfiguration): + The encryption configuration part. Currently, + it is only used for the optional KMS key name. + The BigQuery service account of your project + must be granted permissions to use the key. Read + methods will return the key name applied in + effect. Write methods will apply the key if it + is present, or otherwise try to apply project + default keys if it is absent. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + destination_dataset_id: str = proto.Field( + proto.STRING, + number=2, + oneof="destination", + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + data_source_id: str = proto.Field( + proto.STRING, + number=5, + ) + params: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=9, + message=struct_pb2.Struct, + ) + schedule: str = proto.Field( + proto.STRING, + number=7, + ) + schedule_options: "ScheduleOptions" = proto.Field( + proto.MESSAGE, + number=24, + message="ScheduleOptions", + ) + data_refresh_window_days: int = proto.Field( + proto.INT32, + number=12, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=13, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + next_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + state: "TransferState" = proto.Field( + proto.ENUM, + number=10, + enum="TransferState", + ) + user_id: int = proto.Field( + proto.INT64, + number=11, + ) + dataset_region: str = proto.Field( + proto.STRING, + number=14, + ) + notification_pubsub_topic: str = proto.Field( + proto.STRING, + number=15, + ) + email_preferences: "EmailPreferences" = proto.Field( + proto.MESSAGE, + number=18, + message="EmailPreferences", + ) + owner_info: "UserInfo" = proto.Field( + proto.MESSAGE, + number=27, + optional=True, + message="UserInfo", + ) + encryption_configuration: "EncryptionConfiguration" = proto.Field( + proto.MESSAGE, + number=28, + message="EncryptionConfiguration", + ) + + +class EncryptionConfiguration(proto.Message): + r"""Represents the encryption configuration for a transfer. + + Attributes: + kms_key_name (google.protobuf.wrappers_pb2.StringValue): + The name of the KMS key used for encrypting + BigQuery data. + """ + + kms_key_name: wrappers_pb2.StringValue = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.StringValue, + ) + + +class TransferRun(proto.Message): + r"""Represents a data transfer run. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The resource name of the transfer run. Transfer run names + have the form + ``projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}``. + The name is ignored when creating a transfer run. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Minimum time after which a transfer run can + be started. + run_time (google.protobuf.timestamp_pb2.Timestamp): + For batch transfer runs, specifies the date + and time of the data should be ingested. + error_status (google.rpc.status_pb2.Status): + Status of the transfer run. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when transfer run was + started. Parameter ignored by server for input + requests. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when transfer run ended. + Parameter ignored by server for input requests. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last time the data transfer run + state was updated. + params (google.protobuf.struct_pb2.Struct): + Output only. Parameters specific to each data + source. For more information see the bq tab in + the 'Setting up a data transfer' section for + each data source. For example the parameters for + Cloud Storage transfers are listed here: + + https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq + destination_dataset_id (str): + Output only. The BigQuery target dataset id. + + This field is a member of `oneof`_ ``destination``. + data_source_id (str): + Output only. Data source id. + state (google.cloud.bigquery_datatransfer_v1.types.TransferState): + Data transfer run state. Ignored for input + requests. + user_id (int): + Deprecated. Unique ID of the user on whose + behalf transfer is done. + schedule (str): + Output only. Describes the schedule of this transfer run if + it was created as part of a regular schedule. For batch + transfer runs that are scheduled manually, this is empty. + NOTE: the system might choose to delay the schedule + depending on the current load, so ``schedule_time`` doesn't + always match this. + notification_pubsub_topic (str): + Output only. Pub/Sub topic where a notification will be sent + after this transfer run finishes. + + The format for specifying a pubsub topic is: + ``projects/{project}/topics/{topic}`` + email_preferences (google.cloud.bigquery_datatransfer_v1.types.EmailPreferences): + Output only. Email notifications will be sent + according to these preferences to the email + address of the user who owns the transfer config + this run was derived from. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + error_status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=21, + message=status_pb2.Status, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + params: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=9, + message=struct_pb2.Struct, + ) + destination_dataset_id: str = proto.Field( + proto.STRING, + number=2, + oneof="destination", + ) + data_source_id: str = proto.Field( + proto.STRING, + number=7, + ) + state: "TransferState" = proto.Field( + proto.ENUM, + number=8, + enum="TransferState", + ) + user_id: int = proto.Field( + proto.INT64, + number=11, + ) + schedule: str = proto.Field( + proto.STRING, + number=12, + ) + notification_pubsub_topic: str = proto.Field( + proto.STRING, + number=23, + ) + email_preferences: "EmailPreferences" = proto.Field( + proto.MESSAGE, + number=25, + message="EmailPreferences", + ) + + +class TransferMessage(proto.Message): + r"""Represents a user facing message for a particular data + transfer run. + + Attributes: + message_time (google.protobuf.timestamp_pb2.Timestamp): + Time when message was logged. + severity (google.cloud.bigquery_datatransfer_v1.types.TransferMessage.MessageSeverity): + Message severity. + message_text (str): + Message text. + """ + + class MessageSeverity(proto.Enum): + r"""Represents data transfer user facing message severity. + + Values: + MESSAGE_SEVERITY_UNSPECIFIED (0): + No severity specified. + INFO (1): + Informational message. + WARNING (2): + Warning message. + ERROR (3): + Error message. + """ + MESSAGE_SEVERITY_UNSPECIFIED = 0 + INFO = 1 + WARNING = 2 + ERROR = 3 + + message_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + severity: MessageSeverity = proto.Field( + proto.ENUM, + number=2, + enum=MessageSeverity, + ) + message_text: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-datatransfer/mypy.ini b/packages/google-cloud-bigquery-datatransfer/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-bigquery-datatransfer/noxfile.py b/packages/google-cloud-bigquery-datatransfer/noxfile.py new file mode 100644 index 000000000000..be54712bfa8f --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/noxfile.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-bigquery-datatransfer/renovate.json b/packages/google-cloud-bigquery-datatransfer/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-bigquery-datatransfer/samples/AUTHORING_GUIDE.md b/packages/google-cloud-bigquery-datatransfer/samples/AUTHORING_GUIDE.md new file mode 100644 index 000000000000..55c97b32f4c1 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/samples/CONTRIBUTING.md b/packages/google-cloud-bigquery-datatransfer/samples/CONTRIBUTING.md new file mode 100644 index 000000000000..34c882b6f1a3 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/decrypt-secrets.sh b/packages/google-cloud-bigquery-datatransfer/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/fixup_bigquery_datatransfer_v1_keywords.py b/packages/google-cloud-bigquery-datatransfer/scripts/fixup_bigquery_datatransfer_v1_keywords.py new file mode 100644 index 000000000000..329e19b517b1 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/fixup_bigquery_datatransfer_v1_keywords.py @@ -0,0 +1,190 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class bigquery_datatransferCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'check_valid_creds': ('name', ), + 'create_transfer_config': ('parent', 'transfer_config', 'authorization_code', 'version_info', 'service_account_name', ), + 'delete_transfer_config': ('name', ), + 'delete_transfer_run': ('name', ), + 'enroll_data_sources': ('name', 'data_source_ids', ), + 'get_data_source': ('name', ), + 'get_transfer_config': ('name', ), + 'get_transfer_run': ('name', ), + 'list_data_sources': ('parent', 'page_token', 'page_size', ), + 'list_transfer_configs': ('parent', 'data_source_ids', 'page_token', 'page_size', ), + 'list_transfer_logs': ('parent', 'page_token', 'page_size', 'message_types', ), + 'list_transfer_runs': ('parent', 'states', 'page_token', 'page_size', 'run_attempt', ), + 'schedule_transfer_runs': ('parent', 'start_time', 'end_time', ), + 'start_manual_transfer_runs': ('parent', 'requested_time_range', 'requested_run_time', ), + 'update_transfer_config': ('transfer_config', 'update_mask', 'authorization_code', 'version_info', 'service_account_name', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=bigquery_datatransferCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the bigquery_datatransfer client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/readme_gen.py b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..1acc119835b5 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-bigquery-datatransfer/setup.cfg b/packages/google-cloud-bigquery-datatransfer/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-bigquery-datatransfer/setup.py b/packages/google-cloud-bigquery-datatransfer/setup.py new file mode 100644 index 000000000000..fd72264f11c2 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-bigquery-datatransfer" + + +description = "Google Cloud Bigquery Datatransfer API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/bigquery_datatransfer/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-bigquery-datatransfer/testing/.gitignore b/packages/google-cloud-bigquery-datatransfer/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.10.txt b/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.11.txt b/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.12.txt b/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.7.txt b/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.8.txt b/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.9.txt b/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-datatransfer/tests/__init__.py b/packages/google-cloud-bigquery-datatransfer/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-datatransfer/tests/system/__init__.py b/packages/google-cloud-bigquery-datatransfer/tests/system/__init__.py new file mode 100644 index 000000000000..6cfb48f4cdf8 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/tests/system/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-bigquery-datatransfer/tests/system/smoke_test.py b/packages/google-cloud-bigquery-datatransfer/tests/system/smoke_test.py new file mode 100644 index 000000000000..9de8e86f15cc --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/tests/system/smoke_test.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + +from google.cloud import bigquery_datatransfer_v1 + + +@pytest.fixture(scope="session") +def project_id(): + return os.environ["PROJECT_ID"] + + +@pytest.mark.parametrize("transport", ["grpc", "rest"]) +def test_list_data_sources(project_id: str, transport: str): + client = bigquery_datatransfer_v1.DataTransferServiceClient(transport=transport) + + parent = client.common_location_path(project_id, location="us-central1") + client.list_data_sources(parent=parent) + + # The purpose of this smoke test is to test the communication with the API server, + # rather than API-specific functionality. + # If the smoke test fails, we won't reach this line. + assert True diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/__init__.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/__init__.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/__init__.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py new file mode 100644 index 000000000000..faa28f67833e --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -0,0 +1,10827 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import ( + DataTransferServiceAsyncClient, + DataTransferServiceClient, + pagers, + transports, +) +from google.cloud.bigquery_datatransfer_v1.types import datatransfer, transfer + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataTransferServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DataTransferServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DataTransferServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DataTransferServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataTransferServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataTransferServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DataTransferServiceClient, "grpc"), + (DataTransferServiceAsyncClient, "grpc_asyncio"), + (DataTransferServiceClient, "rest"), + ], +) +def test_data_transfer_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "bigquerydatatransfer.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigquerydatatransfer.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DataTransferServiceGrpcTransport, "grpc"), + (transports.DataTransferServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DataTransferServiceRestTransport, "rest"), + ], +) +def test_data_transfer_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DataTransferServiceClient, "grpc"), + (DataTransferServiceAsyncClient, "grpc_asyncio"), + (DataTransferServiceClient, "rest"), + ], +) +def test_data_transfer_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "bigquerydatatransfer.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigquerydatatransfer.googleapis.com" + ) + + +def test_data_transfer_service_client_get_transport_class(): + transport = DataTransferServiceClient.get_transport_class() + available_transports = [ + transports.DataTransferServiceGrpcTransport, + transports.DataTransferServiceRestTransport, + ] + assert transport in available_transports + + transport = DataTransferServiceClient.get_transport_class("grpc") + assert transport == transports.DataTransferServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + DataTransferServiceClient, + transports.DataTransferServiceGrpcTransport, + "grpc", + ), + ( + DataTransferServiceAsyncClient, + transports.DataTransferServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + DataTransferServiceClient, + transports.DataTransferServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + DataTransferServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataTransferServiceClient), +) +@mock.patch.object( + DataTransferServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataTransferServiceAsyncClient), +) +def test_data_transfer_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataTransferServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataTransferServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + DataTransferServiceClient, + transports.DataTransferServiceGrpcTransport, + "grpc", + "true", + ), + ( + DataTransferServiceAsyncClient, + transports.DataTransferServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + DataTransferServiceClient, + transports.DataTransferServiceGrpcTransport, + "grpc", + "false", + ), + ( + DataTransferServiceAsyncClient, + transports.DataTransferServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + DataTransferServiceClient, + transports.DataTransferServiceRestTransport, + "rest", + "true", + ), + ( + DataTransferServiceClient, + transports.DataTransferServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + DataTransferServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataTransferServiceClient), +) +@mock.patch.object( + DataTransferServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataTransferServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_transfer_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [DataTransferServiceClient, DataTransferServiceAsyncClient] +) +@mock.patch.object( + DataTransferServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataTransferServiceClient), +) +@mock.patch.object( + DataTransferServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataTransferServiceAsyncClient), +) +def test_data_transfer_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + DataTransferServiceClient, + transports.DataTransferServiceGrpcTransport, + "grpc", + ), + ( + DataTransferServiceAsyncClient, + transports.DataTransferServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + DataTransferServiceClient, + transports.DataTransferServiceRestTransport, + "rest", + ), + ], +) +def test_data_transfer_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DataTransferServiceClient, + transports.DataTransferServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DataTransferServiceAsyncClient, + transports.DataTransferServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + DataTransferServiceClient, + transports.DataTransferServiceRestTransport, + "rest", + None, + ), + ], +) +def test_data_transfer_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_data_transfer_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DataTransferServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DataTransferServiceClient, + transports.DataTransferServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DataTransferServiceAsyncClient, + transports.DataTransferServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_data_transfer_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "bigquerydatatransfer.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="bigquerydatatransfer.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.GetDataSourceRequest, + dict, + ], +) +def test_get_data_source(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.DataSource( + name="name_value", + data_source_id="data_source_id_value", + display_name="display_name_value", + description="description_value", + client_id="client_id_value", + scopes=["scopes_value"], + transfer_type=transfer.TransferType.BATCH, + supports_multiple_transfers=True, + update_deadline_seconds=2406, + default_schedule="default_schedule_value", + supports_custom_schedule=True, + help_url="help_url_value", + authorization_type=datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE, + data_refresh_type=datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW, + default_data_refresh_window_days=3379, + manual_runs_disabled=True, + ) + response = client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.GetDataSourceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.DataSource) + assert response.name == "name_value" + assert response.data_source_id == "data_source_id_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.client_id == "client_id_value" + assert response.scopes == ["scopes_value"] + assert response.transfer_type == transfer.TransferType.BATCH + assert response.supports_multiple_transfers is True + assert response.update_deadline_seconds == 2406 + assert response.default_schedule == "default_schedule_value" + assert response.supports_custom_schedule is True + assert response.help_url == "help_url_value" + assert ( + response.authorization_type + == datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE + ) + assert ( + response.data_refresh_type + == datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW + ) + assert response.default_data_refresh_window_days == 3379 + assert response.manual_runs_disabled is True + + +def test_get_data_source_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + client.get_data_source() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.GetDataSourceRequest() + + +@pytest.mark.asyncio +async def test_get_data_source_async( + transport: str = "grpc_asyncio", request_type=datatransfer.GetDataSourceRequest +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.DataSource( + name="name_value", + data_source_id="data_source_id_value", + display_name="display_name_value", + description="description_value", + client_id="client_id_value", + scopes=["scopes_value"], + transfer_type=transfer.TransferType.BATCH, + supports_multiple_transfers=True, + update_deadline_seconds=2406, + default_schedule="default_schedule_value", + supports_custom_schedule=True, + help_url="help_url_value", + authorization_type=datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE, + data_refresh_type=datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW, + default_data_refresh_window_days=3379, + manual_runs_disabled=True, + ) + ) + response = await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.GetDataSourceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.DataSource) + assert response.name == "name_value" + assert response.data_source_id == "data_source_id_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.client_id == "client_id_value" + assert response.scopes == ["scopes_value"] + assert response.transfer_type == transfer.TransferType.BATCH + assert response.supports_multiple_transfers is True + assert response.update_deadline_seconds == 2406 + assert response.default_schedule == "default_schedule_value" + assert response.supports_custom_schedule is True + assert response.help_url == "help_url_value" + assert ( + response.authorization_type + == datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE + ) + assert ( + response.data_refresh_type + == datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW + ) + assert response.default_data_refresh_window_days == 3379 + assert response.manual_runs_disabled is True + + +@pytest.mark.asyncio +async def test_get_data_source_async_from_dict(): + await test_get_data_source_async(request_type=dict) + + +def test_get_data_source_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.GetDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value = datatransfer.DataSource() + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_data_source_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.GetDataSourceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.DataSource() + ) + await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_data_source_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.DataSource() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_source( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_data_source_flattened_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + datatransfer.GetDataSourceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_data_source_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.DataSource() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.DataSource() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_source( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_data_source_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_source( + datatransfer.GetDataSourceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.ListDataSourcesRequest, + dict, + ], +) +def test_list_data_sources(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListDataSourcesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ListDataSourcesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_sources_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + client.list_data_sources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ListDataSourcesRequest() + + +@pytest.mark.asyncio +async def test_list_data_sources_async( + transport: str = "grpc_asyncio", request_type=datatransfer.ListDataSourcesRequest +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListDataSourcesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ListDataSourcesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_data_sources_async_from_dict(): + await test_list_data_sources_async(request_type=dict) + + +def test_list_data_sources_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListDataSourcesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value = datatransfer.ListDataSourcesResponse() + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_data_sources_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListDataSourcesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListDataSourcesResponse() + ) + await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_data_sources_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListDataSourcesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_sources( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_data_sources_flattened_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_sources( + datatransfer.ListDataSourcesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_data_sources_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListDataSourcesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListDataSourcesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_sources( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_data_sources_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_sources( + datatransfer.ListDataSourcesRequest(), + parent="parent_value", + ) + + +def test_list_data_sources_pager(transport_name: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + next_page_token="abc", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + ], + next_page_token="ghi", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_data_sources(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datatransfer.DataSource) for i in results) + + +def test_list_data_sources_pages(transport_name: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + next_page_token="abc", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + ], + next_page_token="ghi", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_sources(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_data_sources_async_pager(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + next_page_token="abc", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + ], + next_page_token="ghi", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_sources( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, datatransfer.DataSource) for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_sources_async_pages(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + next_page_token="abc", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + ], + next_page_token="ghi", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_sources(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.CreateTransferConfigRequest, + dict, + ], +) +def test_create_transfer_config(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + response = client.create_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.CreateTransferConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_source_id == "data_source_id_value" + assert response.schedule == "schedule_value" + assert response.data_refresh_window_days == 2543 + assert response.disabled is True + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.dataset_region == "dataset_region_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_create_transfer_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_transfer_config), "__call__" + ) as call: + client.create_transfer_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.CreateTransferConfigRequest() + + +@pytest.mark.asyncio +async def test_create_transfer_config_async( + transport: str = "grpc_asyncio", + request_type=datatransfer.CreateTransferConfigRequest, +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + ) + ) + response = await client.create_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.CreateTransferConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_source_id == "data_source_id_value" + assert response.schedule == "schedule_value" + assert response.data_refresh_window_days == 2543 + assert response.disabled is True + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.dataset_region == "dataset_region_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +@pytest.mark.asyncio +async def test_create_transfer_config_async_from_dict(): + await test_create_transfer_config_async(request_type=dict) + + +def test_create_transfer_config_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.CreateTransferConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_transfer_config), "__call__" + ) as call: + call.return_value = transfer.TransferConfig() + client.create_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_transfer_config_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.CreateTransferConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_transfer_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig() + ) + await client.create_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_transfer_config_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_transfer_config( + parent="parent_value", + transfer_config=transfer.TransferConfig(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].transfer_config + mock_val = transfer.TransferConfig(name="name_value") + assert arg == mock_val + + +def test_create_transfer_config_flattened_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_transfer_config( + datatransfer.CreateTransferConfigRequest(), + parent="parent_value", + transfer_config=transfer.TransferConfig(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_transfer_config_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_transfer_config( + parent="parent_value", + transfer_config=transfer.TransferConfig(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].transfer_config + mock_val = transfer.TransferConfig(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_transfer_config_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_transfer_config( + datatransfer.CreateTransferConfigRequest(), + parent="parent_value", + transfer_config=transfer.TransferConfig(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.UpdateTransferConfigRequest, + dict, + ], +) +def test_update_transfer_config(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + response = client.update_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.UpdateTransferConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_source_id == "data_source_id_value" + assert response.schedule == "schedule_value" + assert response.data_refresh_window_days == 2543 + assert response.disabled is True + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.dataset_region == "dataset_region_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_update_transfer_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_transfer_config), "__call__" + ) as call: + client.update_transfer_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.UpdateTransferConfigRequest() + + +@pytest.mark.asyncio +async def test_update_transfer_config_async( + transport: str = "grpc_asyncio", + request_type=datatransfer.UpdateTransferConfigRequest, +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + ) + ) + response = await client.update_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.UpdateTransferConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_source_id == "data_source_id_value" + assert response.schedule == "schedule_value" + assert response.data_refresh_window_days == 2543 + assert response.disabled is True + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.dataset_region == "dataset_region_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +@pytest.mark.asyncio +async def test_update_transfer_config_async_from_dict(): + await test_update_transfer_config_async(request_type=dict) + + +def test_update_transfer_config_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.UpdateTransferConfigRequest() + + request.transfer_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_transfer_config), "__call__" + ) as call: + call.return_value = transfer.TransferConfig() + client.update_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "transfer_config.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_transfer_config_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.UpdateTransferConfigRequest() + + request.transfer_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_transfer_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig() + ) + await client.update_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "transfer_config.name=name_value", + ) in kw["metadata"] + + +def test_update_transfer_config_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_transfer_config( + transfer_config=transfer.TransferConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].transfer_config + mock_val = transfer.TransferConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_transfer_config_flattened_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_transfer_config( + datatransfer.UpdateTransferConfigRequest(), + transfer_config=transfer.TransferConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_transfer_config_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_transfer_config( + transfer_config=transfer.TransferConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].transfer_config + mock_val = transfer.TransferConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_transfer_config_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_transfer_config( + datatransfer.UpdateTransferConfigRequest(), + transfer_config=transfer.TransferConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.DeleteTransferConfigRequest, + dict, + ], +) +def test_delete_transfer_config(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.DeleteTransferConfigRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_transfer_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_transfer_config), "__call__" + ) as call: + client.delete_transfer_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.DeleteTransferConfigRequest() + + +@pytest.mark.asyncio +async def test_delete_transfer_config_async( + transport: str = "grpc_asyncio", + request_type=datatransfer.DeleteTransferConfigRequest, +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.DeleteTransferConfigRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_transfer_config_async_from_dict(): + await test_delete_transfer_config_async(request_type=dict) + + +def test_delete_transfer_config_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.DeleteTransferConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_transfer_config), "__call__" + ) as call: + call.return_value = None + client.delete_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_transfer_config_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.DeleteTransferConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_transfer_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_transfer_config_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_transfer_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_transfer_config_flattened_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_transfer_config( + datatransfer.DeleteTransferConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_transfer_config_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_transfer_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_transfer_config_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_transfer_config( + datatransfer.DeleteTransferConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.GetTransferConfigRequest, + dict, + ], +) +def test_get_transfer_config(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + response = client.get_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.GetTransferConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_source_id == "data_source_id_value" + assert response.schedule == "schedule_value" + assert response.data_refresh_window_days == 2543 + assert response.disabled is True + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.dataset_region == "dataset_region_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_get_transfer_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_transfer_config), "__call__" + ) as call: + client.get_transfer_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.GetTransferConfigRequest() + + +@pytest.mark.asyncio +async def test_get_transfer_config_async( + transport: str = "grpc_asyncio", request_type=datatransfer.GetTransferConfigRequest +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + ) + ) + response = await client.get_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.GetTransferConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_source_id == "data_source_id_value" + assert response.schedule == "schedule_value" + assert response.data_refresh_window_days == 2543 + assert response.disabled is True + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.dataset_region == "dataset_region_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +@pytest.mark.asyncio +async def test_get_transfer_config_async_from_dict(): + await test_get_transfer_config_async(request_type=dict) + + +def test_get_transfer_config_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.GetTransferConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_transfer_config), "__call__" + ) as call: + call.return_value = transfer.TransferConfig() + client.get_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_transfer_config_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.GetTransferConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_transfer_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig() + ) + await client.get_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_transfer_config_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_transfer_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_transfer_config_flattened_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_transfer_config( + datatransfer.GetTransferConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_transfer_config_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_transfer_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_transfer_config_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_transfer_config( + datatransfer.GetTransferConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.ListTransferConfigsRequest, + dict, + ], +) +def test_list_transfer_configs(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferConfigsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_transfer_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ListTransferConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferConfigsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_configs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_configs), "__call__" + ) as call: + client.list_transfer_configs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ListTransferConfigsRequest() + + +@pytest.mark.asyncio +async def test_list_transfer_configs_async( + transport: str = "grpc_asyncio", + request_type=datatransfer.ListTransferConfigsRequest, +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferConfigsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_transfer_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ListTransferConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferConfigsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_transfer_configs_async_from_dict(): + await test_list_transfer_configs_async(request_type=dict) + + +def test_list_transfer_configs_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListTransferConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_configs), "__call__" + ) as call: + call.return_value = datatransfer.ListTransferConfigsResponse() + client.list_transfer_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_transfer_configs_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListTransferConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_configs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferConfigsResponse() + ) + await client.list_transfer_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_transfer_configs_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_transfer_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_transfer_configs_flattened_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transfer_configs( + datatransfer.ListTransferConfigsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_transfer_configs_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferConfigsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_transfer_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_transfer_configs_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_transfer_configs( + datatransfer.ListTransferConfigsRequest(), + parent="parent_value", + ) + + +def test_list_transfer_configs_pager(transport_name: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[], + next_page_token="def", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_transfer_configs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, transfer.TransferConfig) for i in results) + + +def test_list_transfer_configs_pages(transport_name: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[], + next_page_token="def", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_transfer_configs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_transfer_configs_async_pager(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[], + next_page_token="def", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_transfer_configs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, transfer.TransferConfig) for i in responses) + + +@pytest.mark.asyncio +async def test_list_transfer_configs_async_pages(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[], + next_page_token="def", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_transfer_configs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.ScheduleTransferRunsRequest, + dict, + ], +) +def test_schedule_transfer_runs(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.schedule_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ScheduleTransferRunsResponse() + response = client.schedule_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ScheduleTransferRunsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.ScheduleTransferRunsResponse) + + +def test_schedule_transfer_runs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.schedule_transfer_runs), "__call__" + ) as call: + client.schedule_transfer_runs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ScheduleTransferRunsRequest() + + +@pytest.mark.asyncio +async def test_schedule_transfer_runs_async( + transport: str = "grpc_asyncio", + request_type=datatransfer.ScheduleTransferRunsRequest, +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.schedule_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ScheduleTransferRunsResponse() + ) + response = await client.schedule_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ScheduleTransferRunsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.ScheduleTransferRunsResponse) + + +@pytest.mark.asyncio +async def test_schedule_transfer_runs_async_from_dict(): + await test_schedule_transfer_runs_async(request_type=dict) + + +def test_schedule_transfer_runs_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ScheduleTransferRunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.schedule_transfer_runs), "__call__" + ) as call: + call.return_value = datatransfer.ScheduleTransferRunsResponse() + client.schedule_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_schedule_transfer_runs_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ScheduleTransferRunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.schedule_transfer_runs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ScheduleTransferRunsResponse() + ) + await client.schedule_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_schedule_transfer_runs_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.schedule_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ScheduleTransferRunsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.schedule_transfer_runs( + parent="parent_value", + start_time=timestamp_pb2.Timestamp(seconds=751), + end_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + assert TimestampRule().to_proto(args[0].start_time) == timestamp_pb2.Timestamp( + seconds=751 + ) + assert TimestampRule().to_proto(args[0].end_time) == timestamp_pb2.Timestamp( + seconds=751 + ) + + +def test_schedule_transfer_runs_flattened_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.schedule_transfer_runs( + datatransfer.ScheduleTransferRunsRequest(), + parent="parent_value", + start_time=timestamp_pb2.Timestamp(seconds=751), + end_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_schedule_transfer_runs_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.schedule_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ScheduleTransferRunsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ScheduleTransferRunsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.schedule_transfer_runs( + parent="parent_value", + start_time=timestamp_pb2.Timestamp(seconds=751), + end_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + assert TimestampRule().to_proto(args[0].start_time) == timestamp_pb2.Timestamp( + seconds=751 + ) + assert TimestampRule().to_proto(args[0].end_time) == timestamp_pb2.Timestamp( + seconds=751 + ) + + +@pytest.mark.asyncio +async def test_schedule_transfer_runs_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.schedule_transfer_runs( + datatransfer.ScheduleTransferRunsRequest(), + parent="parent_value", + start_time=timestamp_pb2.Timestamp(seconds=751), + end_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.StartManualTransferRunsRequest, + dict, + ], +) +def test_start_manual_transfer_runs(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_manual_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.StartManualTransferRunsResponse() + response = client.start_manual_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.StartManualTransferRunsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.StartManualTransferRunsResponse) + + +def test_start_manual_transfer_runs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_manual_transfer_runs), "__call__" + ) as call: + client.start_manual_transfer_runs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.StartManualTransferRunsRequest() + + +@pytest.mark.asyncio +async def test_start_manual_transfer_runs_async( + transport: str = "grpc_asyncio", + request_type=datatransfer.StartManualTransferRunsRequest, +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_manual_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.StartManualTransferRunsResponse() + ) + response = await client.start_manual_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.StartManualTransferRunsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.StartManualTransferRunsResponse) + + +@pytest.mark.asyncio +async def test_start_manual_transfer_runs_async_from_dict(): + await test_start_manual_transfer_runs_async(request_type=dict) + + +def test_start_manual_transfer_runs_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.StartManualTransferRunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_manual_transfer_runs), "__call__" + ) as call: + call.return_value = datatransfer.StartManualTransferRunsResponse() + client.start_manual_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_start_manual_transfer_runs_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.StartManualTransferRunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_manual_transfer_runs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.StartManualTransferRunsResponse() + ) + await client.start_manual_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.GetTransferRunRequest, + dict, + ], +) +def test_get_transfer_run(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferRun( + name="name_value", + data_source_id="data_source_id_value", + state=transfer.TransferState.PENDING, + user_id=747, + schedule="schedule_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + response = client.get_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.GetTransferRunRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferRun) + assert response.name == "name_value" + assert response.data_source_id == "data_source_id_value" + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.schedule == "schedule_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_get_transfer_run_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call: + client.get_transfer_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.GetTransferRunRequest() + + +@pytest.mark.asyncio +async def test_get_transfer_run_async( + transport: str = "grpc_asyncio", request_type=datatransfer.GetTransferRunRequest +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferRun( + name="name_value", + data_source_id="data_source_id_value", + state=transfer.TransferState.PENDING, + user_id=747, + schedule="schedule_value", + notification_pubsub_topic="notification_pubsub_topic_value", + ) + ) + response = await client.get_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.GetTransferRunRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferRun) + assert response.name == "name_value" + assert response.data_source_id == "data_source_id_value" + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.schedule == "schedule_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +@pytest.mark.asyncio +async def test_get_transfer_run_async_from_dict(): + await test_get_transfer_run_async(request_type=dict) + + +def test_get_transfer_run_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.GetTransferRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call: + call.return_value = transfer.TransferRun() + client.get_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_transfer_run_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.GetTransferRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferRun() + ) + await client.get_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_transfer_run_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferRun() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_transfer_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_transfer_run_flattened_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_transfer_run( + datatransfer.GetTransferRunRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_transfer_run_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_transfer_run), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferRun() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferRun() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_transfer_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_transfer_run_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_transfer_run( + datatransfer.GetTransferRunRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.DeleteTransferRunRequest, + dict, + ], +) +def test_delete_transfer_run(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_transfer_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.DeleteTransferRunRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_transfer_run_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_transfer_run), "__call__" + ) as call: + client.delete_transfer_run() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.DeleteTransferRunRequest() + + +@pytest.mark.asyncio +async def test_delete_transfer_run_async( + transport: str = "grpc_asyncio", request_type=datatransfer.DeleteTransferRunRequest +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_transfer_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.DeleteTransferRunRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_transfer_run_async_from_dict(): + await test_delete_transfer_run_async(request_type=dict) + + +def test_delete_transfer_run_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.DeleteTransferRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_transfer_run), "__call__" + ) as call: + call.return_value = None + client.delete_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_transfer_run_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.DeleteTransferRunRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_transfer_run), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_transfer_run_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_transfer_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_transfer_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_transfer_run_flattened_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_transfer_run( + datatransfer.DeleteTransferRunRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_transfer_run_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_transfer_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_transfer_run( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_transfer_run_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_transfer_run( + datatransfer.DeleteTransferRunRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.ListTransferRunsRequest, + dict, + ], +) +def test_list_transfer_runs(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferRunsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ListTransferRunsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferRunsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_runs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_runs), "__call__" + ) as call: + client.list_transfer_runs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ListTransferRunsRequest() + + +@pytest.mark.asyncio +async def test_list_transfer_runs_async( + transport: str = "grpc_asyncio", request_type=datatransfer.ListTransferRunsRequest +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferRunsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ListTransferRunsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferRunsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_transfer_runs_async_from_dict(): + await test_list_transfer_runs_async(request_type=dict) + + +def test_list_transfer_runs_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListTransferRunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_runs), "__call__" + ) as call: + call.return_value = datatransfer.ListTransferRunsResponse() + client.list_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_transfer_runs_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListTransferRunsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_runs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferRunsResponse() + ) + await client.list_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_transfer_runs_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferRunsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_transfer_runs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_transfer_runs_flattened_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transfer_runs( + datatransfer.ListTransferRunsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_transfer_runs_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferRunsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferRunsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_transfer_runs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_transfer_runs_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_transfer_runs( + datatransfer.ListTransferRunsRequest(), + parent="parent_value", + ) + + +def test_list_transfer_runs_pager(transport_name: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_runs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + transfer.TransferRun(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[], + next_page_token="def", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_transfer_runs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, transfer.TransferRun) for i in results) + + +def test_list_transfer_runs_pages(transport_name: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_runs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + transfer.TransferRun(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[], + next_page_token="def", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + ], + ), + RuntimeError, + ) + pages = list(client.list_transfer_runs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_transfer_runs_async_pager(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_runs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + transfer.TransferRun(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[], + next_page_token="def", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_transfer_runs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, transfer.TransferRun) for i in responses) + + +@pytest.mark.asyncio +async def test_list_transfer_runs_async_pages(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_runs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + transfer.TransferRun(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[], + next_page_token="def", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_transfer_runs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.ListTransferLogsRequest, + dict, + ], +) +def test_list_transfer_logs(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_logs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferLogsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_transfer_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ListTransferLogsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferLogsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_logs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_logs), "__call__" + ) as call: + client.list_transfer_logs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ListTransferLogsRequest() + + +@pytest.mark.asyncio +async def test_list_transfer_logs_async( + transport: str = "grpc_asyncio", request_type=datatransfer.ListTransferLogsRequest +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_logs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferLogsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_transfer_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.ListTransferLogsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferLogsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_transfer_logs_async_from_dict(): + await test_list_transfer_logs_async(request_type=dict) + + +def test_list_transfer_logs_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListTransferLogsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_logs), "__call__" + ) as call: + call.return_value = datatransfer.ListTransferLogsResponse() + client.list_transfer_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_transfer_logs_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListTransferLogsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_logs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferLogsResponse() + ) + await client.list_transfer_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_transfer_logs_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_logs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferLogsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_transfer_logs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_transfer_logs_flattened_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transfer_logs( + datatransfer.ListTransferLogsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_transfer_logs_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_logs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferLogsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferLogsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_transfer_logs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_transfer_logs_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_transfer_logs( + datatransfer.ListTransferLogsRequest(), + parent="parent_value", + ) + + +def test_list_transfer_logs_pager(transport_name: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_logs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[], + next_page_token="def", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_transfer_logs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, transfer.TransferMessage) for i in results) + + +def test_list_transfer_logs_pages(transport_name: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_logs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[], + next_page_token="def", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + ), + RuntimeError, + ) + pages = list(client.list_transfer_logs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_transfer_logs_async_pager(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_logs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[], + next_page_token="def", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_transfer_logs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, transfer.TransferMessage) for i in responses) + + +@pytest.mark.asyncio +async def test_list_transfer_logs_async_pages(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_transfer_logs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[], + next_page_token="def", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_transfer_logs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.CheckValidCredsRequest, + dict, + ], +) +def test_check_valid_creds(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_valid_creds), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.CheckValidCredsResponse( + has_valid_creds=True, + ) + response = client.check_valid_creds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.CheckValidCredsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.CheckValidCredsResponse) + assert response.has_valid_creds is True + + +def test_check_valid_creds_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_valid_creds), "__call__" + ) as call: + client.check_valid_creds() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.CheckValidCredsRequest() + + +@pytest.mark.asyncio +async def test_check_valid_creds_async( + transport: str = "grpc_asyncio", request_type=datatransfer.CheckValidCredsRequest +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_valid_creds), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.CheckValidCredsResponse( + has_valid_creds=True, + ) + ) + response = await client.check_valid_creds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.CheckValidCredsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.CheckValidCredsResponse) + assert response.has_valid_creds is True + + +@pytest.mark.asyncio +async def test_check_valid_creds_async_from_dict(): + await test_check_valid_creds_async(request_type=dict) + + +def test_check_valid_creds_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.CheckValidCredsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_valid_creds), "__call__" + ) as call: + call.return_value = datatransfer.CheckValidCredsResponse() + client.check_valid_creds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_check_valid_creds_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.CheckValidCredsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_valid_creds), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.CheckValidCredsResponse() + ) + await client.check_valid_creds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_check_valid_creds_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_valid_creds), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.CheckValidCredsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.check_valid_creds( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_check_valid_creds_flattened_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.check_valid_creds( + datatransfer.CheckValidCredsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_check_valid_creds_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_valid_creds), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.CheckValidCredsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.CheckValidCredsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.check_valid_creds( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_check_valid_creds_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.check_valid_creds( + datatransfer.CheckValidCredsRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.EnrollDataSourcesRequest, + dict, + ], +) +def test_enroll_data_sources(request_type, transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enroll_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.enroll_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.EnrollDataSourcesRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_enroll_data_sources_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enroll_data_sources), "__call__" + ) as call: + client.enroll_data_sources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.EnrollDataSourcesRequest() + + +@pytest.mark.asyncio +async def test_enroll_data_sources_async( + transport: str = "grpc_asyncio", request_type=datatransfer.EnrollDataSourcesRequest +): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enroll_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.enroll_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datatransfer.EnrollDataSourcesRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_enroll_data_sources_async_from_dict(): + await test_enroll_data_sources_async(request_type=dict) + + +def test_enroll_data_sources_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.EnrollDataSourcesRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enroll_data_sources), "__call__" + ) as call: + call.return_value = None + client.enroll_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_enroll_data_sources_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.EnrollDataSourcesRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.enroll_data_sources), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.enroll_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.GetDataSourceRequest, + dict, + ], +) +def test_get_data_source_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataSources/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.DataSource( + name="name_value", + data_source_id="data_source_id_value", + display_name="display_name_value", + description="description_value", + client_id="client_id_value", + scopes=["scopes_value"], + transfer_type=transfer.TransferType.BATCH, + supports_multiple_transfers=True, + update_deadline_seconds=2406, + default_schedule="default_schedule_value", + supports_custom_schedule=True, + help_url="help_url_value", + authorization_type=datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE, + data_refresh_type=datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW, + default_data_refresh_window_days=3379, + manual_runs_disabled=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_data_source(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.DataSource) + assert response.name == "name_value" + assert response.data_source_id == "data_source_id_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.client_id == "client_id_value" + assert response.scopes == ["scopes_value"] + assert response.transfer_type == transfer.TransferType.BATCH + assert response.supports_multiple_transfers is True + assert response.update_deadline_seconds == 2406 + assert response.default_schedule == "default_schedule_value" + assert response.supports_custom_schedule is True + assert response.help_url == "help_url_value" + assert ( + response.authorization_type + == datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE + ) + assert ( + response.data_refresh_type + == datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW + ) + assert response.default_data_refresh_window_days == 3379 + assert response.manual_runs_disabled is True + + +def test_get_data_source_rest_required_fields( + request_type=datatransfer.GetDataSourceRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datatransfer.DataSource() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datatransfer.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_data_source_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_data_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_source_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_get_data_source" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_get_data_source" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.GetDataSourceRequest.pb( + datatransfer.GetDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datatransfer.DataSource.to_json( + datatransfer.DataSource() + ) + + request = datatransfer.GetDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.DataSource() + + client.get_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_data_source_rest_bad_request( + transport: str = "rest", request_type=datatransfer.GetDataSourceRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataSources/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_data_source(request) + + +def test_get_data_source_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.DataSource() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataSources/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataSources/*}" + % client.transport._host, + args[1], + ) + + +def test_get_data_source_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + datatransfer.GetDataSourceRequest(), + name="name_value", + ) + + +def test_get_data_source_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.ListDataSourcesRequest, + dict, + ], +) +def test_list_data_sources_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListDataSourcesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_data_sources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_sources_rest_required_fields( + request_type=datatransfer.ListDataSourcesRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_sources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_sources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListDataSourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datatransfer.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_data_sources(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_data_sources_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_data_sources._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_sources_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_list_data_sources" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_list_data_sources" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.ListDataSourcesRequest.pb( + datatransfer.ListDataSourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datatransfer.ListDataSourcesResponse.to_json( + datatransfer.ListDataSourcesResponse() + ) + + request = datatransfer.ListDataSourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.ListDataSourcesResponse() + + client.list_data_sources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_data_sources_rest_bad_request( + transport: str = "rest", request_type=datatransfer.ListDataSourcesRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_data_sources(request) + + +def test_list_data_sources_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListDataSourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_data_sources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dataSources" + % client.transport._host, + args[1], + ) + + +def test_list_data_sources_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_sources( + datatransfer.ListDataSourcesRequest(), + parent="parent_value", + ) + + +def test_list_data_sources_rest_pager(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + next_page_token="abc", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + ], + next_page_token="ghi", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datatransfer.ListDataSourcesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_data_sources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datatransfer.DataSource) for i in results) + + pages = list(client.list_data_sources(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.CreateTransferConfigRequest, + dict, + ], +) +def test_create_transfer_config_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["transfer_config"] = { + "name": "name_value", + "destination_dataset_id": "destination_dataset_id_value", + "display_name": "display_name_value", + "data_source_id": "data_source_id_value", + "params": {"fields": {}}, + "schedule": "schedule_value", + "schedule_options": { + "disable_auto_scheduling": True, + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + "data_refresh_window_days": 2543, + "disabled": True, + "update_time": {}, + "next_run_time": {}, + "state": 2, + "user_id": 747, + "dataset_region": "dataset_region_value", + "notification_pubsub_topic": "notification_pubsub_topic_value", + "email_preferences": {"enable_failure_email": True}, + "owner_info": {"email": "email_value"}, + "encryption_configuration": {"kms_key_name": {"value": "value_value"}}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_transfer_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_source_id == "data_source_id_value" + assert response.schedule == "schedule_value" + assert response.data_refresh_window_days == 2543 + assert response.disabled is True + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.dataset_region == "dataset_region_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_create_transfer_config_rest_required_fields( + request_type=datatransfer.CreateTransferConfigRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_transfer_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_transfer_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "authorization_code", + "service_account_name", + "version_info", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_transfer_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_transfer_config_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_transfer_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "authorizationCode", + "serviceAccountName", + "versionInfo", + ) + ) + & set( + ( + "parent", + "transferConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_transfer_config_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_create_transfer_config" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_create_transfer_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.CreateTransferConfigRequest.pb( + datatransfer.CreateTransferConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = transfer.TransferConfig.to_json( + transfer.TransferConfig() + ) + + request = datatransfer.CreateTransferConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = transfer.TransferConfig() + + client.create_transfer_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_transfer_config_rest_bad_request( + transport: str = "rest", request_type=datatransfer.CreateTransferConfigRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["transfer_config"] = { + "name": "name_value", + "destination_dataset_id": "destination_dataset_id_value", + "display_name": "display_name_value", + "data_source_id": "data_source_id_value", + "params": {"fields": {}}, + "schedule": "schedule_value", + "schedule_options": { + "disable_auto_scheduling": True, + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + "data_refresh_window_days": 2543, + "disabled": True, + "update_time": {}, + "next_run_time": {}, + "state": 2, + "user_id": 747, + "dataset_region": "dataset_region_value", + "notification_pubsub_topic": "notification_pubsub_topic_value", + "email_preferences": {"enable_failure_email": True}, + "owner_info": {"email": "email_value"}, + "encryption_configuration": {"kms_key_name": {"value": "value_value"}}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_transfer_config(request) + + +def test_create_transfer_config_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + transfer_config=transfer.TransferConfig(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_transfer_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/transferConfigs" + % client.transport._host, + args[1], + ) + + +def test_create_transfer_config_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_transfer_config( + datatransfer.CreateTransferConfigRequest(), + parent="parent_value", + transfer_config=transfer.TransferConfig(name="name_value"), + ) + + +def test_create_transfer_config_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.UpdateTransferConfigRequest, + dict, + ], +) +def test_update_transfer_config_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "transfer_config": { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + } + request_init["transfer_config"] = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3", + "destination_dataset_id": "destination_dataset_id_value", + "display_name": "display_name_value", + "data_source_id": "data_source_id_value", + "params": {"fields": {}}, + "schedule": "schedule_value", + "schedule_options": { + "disable_auto_scheduling": True, + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + "data_refresh_window_days": 2543, + "disabled": True, + "update_time": {}, + "next_run_time": {}, + "state": 2, + "user_id": 747, + "dataset_region": "dataset_region_value", + "notification_pubsub_topic": "notification_pubsub_topic_value", + "email_preferences": {"enable_failure_email": True}, + "owner_info": {"email": "email_value"}, + "encryption_configuration": {"kms_key_name": {"value": "value_value"}}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_transfer_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_source_id == "data_source_id_value" + assert response.schedule == "schedule_value" + assert response.data_refresh_window_days == 2543 + assert response.disabled is True + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.dataset_region == "dataset_region_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_update_transfer_config_rest_required_fields( + request_type=datatransfer.UpdateTransferConfigRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_transfer_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_transfer_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "authorization_code", + "service_account_name", + "update_mask", + "version_info", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_transfer_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_transfer_config_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_transfer_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "authorizationCode", + "serviceAccountName", + "updateMask", + "versionInfo", + ) + ) + & set( + ( + "transferConfig", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_transfer_config_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_update_transfer_config" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_update_transfer_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.UpdateTransferConfigRequest.pb( + datatransfer.UpdateTransferConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = transfer.TransferConfig.to_json( + transfer.TransferConfig() + ) + + request = datatransfer.UpdateTransferConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = transfer.TransferConfig() + + client.update_transfer_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_transfer_config_rest_bad_request( + transport: str = "rest", request_type=datatransfer.UpdateTransferConfigRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "transfer_config": { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + } + request_init["transfer_config"] = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3", + "destination_dataset_id": "destination_dataset_id_value", + "display_name": "display_name_value", + "data_source_id": "data_source_id_value", + "params": {"fields": {}}, + "schedule": "schedule_value", + "schedule_options": { + "disable_auto_scheduling": True, + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + "data_refresh_window_days": 2543, + "disabled": True, + "update_time": {}, + "next_run_time": {}, + "state": 2, + "user_id": 747, + "dataset_region": "dataset_region_value", + "notification_pubsub_topic": "notification_pubsub_topic_value", + "email_preferences": {"enable_failure_email": True}, + "owner_info": {"email": "email_value"}, + "encryption_configuration": {"kms_key_name": {"value": "value_value"}}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_transfer_config(request) + + +def test_update_transfer_config_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "transfer_config": { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + transfer_config=transfer.TransferConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_transfer_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_update_transfer_config_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_transfer_config( + datatransfer.UpdateTransferConfigRequest(), + transfer_config=transfer.TransferConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_transfer_config_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.DeleteTransferConfigRequest, + dict, + ], +) +def test_delete_transfer_config_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_transfer_config(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_transfer_config_rest_required_fields( + request_type=datatransfer.DeleteTransferConfigRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_transfer_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_transfer_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_transfer_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_transfer_config_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_transfer_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_transfer_config_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_delete_transfer_config" + ) as pre: + pre.assert_not_called() + pb_message = datatransfer.DeleteTransferConfigRequest.pb( + datatransfer.DeleteTransferConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = datatransfer.DeleteTransferConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_transfer_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_transfer_config_rest_bad_request( + transport: str = "rest", request_type=datatransfer.DeleteTransferConfigRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_transfer_config(request) + + +def test_delete_transfer_config_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_transfer_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/transferConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_transfer_config_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_transfer_config( + datatransfer.DeleteTransferConfigRequest(), + name="name_value", + ) + + +def test_delete_transfer_config_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.GetTransferConfigRequest, + dict, + ], +) +def test_get_transfer_config_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_transfer_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_source_id == "data_source_id_value" + assert response.schedule == "schedule_value" + assert response.data_refresh_window_days == 2543 + assert response.disabled is True + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.dataset_region == "dataset_region_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_get_transfer_config_rest_required_fields( + request_type=datatransfer.GetTransferConfigRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_transfer_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_transfer_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_transfer_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_transfer_config_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_transfer_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_transfer_config_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_get_transfer_config" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_get_transfer_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.GetTransferConfigRequest.pb( + datatransfer.GetTransferConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = transfer.TransferConfig.to_json( + transfer.TransferConfig() + ) + + request = datatransfer.GetTransferConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = transfer.TransferConfig() + + client.get_transfer_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_transfer_config_rest_bad_request( + transport: str = "rest", request_type=datatransfer.GetTransferConfigRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_transfer_config(request) + + +def test_get_transfer_config_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_transfer_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/transferConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_get_transfer_config_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_transfer_config( + datatransfer.GetTransferConfigRequest(), + name="name_value", + ) + + +def test_get_transfer_config_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.ListTransferConfigsRequest, + dict, + ], +) +def test_list_transfer_configs_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferConfigsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListTransferConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_transfer_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferConfigsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_configs_rest_required_fields( + request_type=datatransfer.ListTransferConfigsRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transfer_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transfer_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "data_source_ids", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datatransfer.ListTransferConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_transfer_configs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_transfer_configs_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_transfer_configs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "dataSourceIds", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_transfer_configs_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_list_transfer_configs" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_list_transfer_configs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.ListTransferConfigsRequest.pb( + datatransfer.ListTransferConfigsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datatransfer.ListTransferConfigsResponse.to_json( + datatransfer.ListTransferConfigsResponse() + ) + + request = datatransfer.ListTransferConfigsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.ListTransferConfigsResponse() + + client.list_transfer_configs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_transfer_configs_rest_bad_request( + transport: str = "rest", request_type=datatransfer.ListTransferConfigsRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_transfer_configs(request) + + +def test_list_transfer_configs_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListTransferConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_transfer_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/transferConfigs" + % client.transport._host, + args[1], + ) + + +def test_list_transfer_configs_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transfer_configs( + datatransfer.ListTransferConfigsRequest(), + parent="parent_value", + ) + + +def test_list_transfer_configs_rest_pager(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[], + next_page_token="def", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datatransfer.ListTransferConfigsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_transfer_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, transfer.TransferConfig) for i in results) + + pages = list(client.list_transfer_configs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.ScheduleTransferRunsRequest, + dict, + ], +) +def test_schedule_transfer_runs_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ScheduleTransferRunsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ScheduleTransferRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.schedule_transfer_runs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.ScheduleTransferRunsResponse) + + +def test_schedule_transfer_runs_rest_required_fields( + request_type=datatransfer.ScheduleTransferRunsRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).schedule_transfer_runs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).schedule_transfer_runs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datatransfer.ScheduleTransferRunsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datatransfer.ScheduleTransferRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.schedule_transfer_runs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_schedule_transfer_runs_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.schedule_transfer_runs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "startTime", + "endTime", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_schedule_transfer_runs_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_schedule_transfer_runs" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_schedule_transfer_runs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.ScheduleTransferRunsRequest.pb( + datatransfer.ScheduleTransferRunsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datatransfer.ScheduleTransferRunsResponse.to_json( + datatransfer.ScheduleTransferRunsResponse() + ) + + request = datatransfer.ScheduleTransferRunsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.ScheduleTransferRunsResponse() + + client.schedule_transfer_runs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_schedule_transfer_runs_rest_bad_request( + transport: str = "rest", request_type=datatransfer.ScheduleTransferRunsRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.schedule_transfer_runs(request) + + +def test_schedule_transfer_runs_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ScheduleTransferRunsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + start_time=timestamp_pb2.Timestamp(seconds=751), + end_time=timestamp_pb2.Timestamp(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ScheduleTransferRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.schedule_transfer_runs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns" + % client.transport._host, + args[1], + ) + + +def test_schedule_transfer_runs_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.schedule_transfer_runs( + datatransfer.ScheduleTransferRunsRequest(), + parent="parent_value", + start_time=timestamp_pb2.Timestamp(seconds=751), + end_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_schedule_transfer_runs_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.StartManualTransferRunsRequest, + dict, + ], +) +def test_start_manual_transfer_runs_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.StartManualTransferRunsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.StartManualTransferRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.start_manual_transfer_runs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.StartManualTransferRunsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_manual_transfer_runs_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_start_manual_transfer_runs" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_start_manual_transfer_runs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.StartManualTransferRunsRequest.pb( + datatransfer.StartManualTransferRunsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + datatransfer.StartManualTransferRunsResponse.to_json( + datatransfer.StartManualTransferRunsResponse() + ) + ) + + request = datatransfer.StartManualTransferRunsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.StartManualTransferRunsResponse() + + client.start_manual_transfer_runs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_manual_transfer_runs_rest_bad_request( + transport: str = "rest", request_type=datatransfer.StartManualTransferRunsRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_manual_transfer_runs(request) + + +def test_start_manual_transfer_runs_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.GetTransferRunRequest, + dict, + ], +) +def test_get_transfer_run_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferRun( + name="name_value", + data_source_id="data_source_id_value", + state=transfer.TransferState.PENDING, + user_id=747, + schedule="schedule_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferRun.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_transfer_run(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferRun) + assert response.name == "name_value" + assert response.data_source_id == "data_source_id_value" + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.schedule == "schedule_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_get_transfer_run_rest_required_fields( + request_type=datatransfer.GetTransferRunRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_transfer_run._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_transfer_run._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = transfer.TransferRun() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = transfer.TransferRun.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_transfer_run(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_transfer_run_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_transfer_run._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_transfer_run_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_get_transfer_run" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_get_transfer_run" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.GetTransferRunRequest.pb( + datatransfer.GetTransferRunRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = transfer.TransferRun.to_json(transfer.TransferRun()) + + request = datatransfer.GetTransferRunRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = transfer.TransferRun() + + client.get_transfer_run( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_transfer_run_rest_bad_request( + transport: str = "rest", request_type=datatransfer.GetTransferRunRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_transfer_run(request) + + +def test_get_transfer_run_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferRun() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferRun.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_transfer_run(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}" + % client.transport._host, + args[1], + ) + + +def test_get_transfer_run_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_transfer_run( + datatransfer.GetTransferRunRequest(), + name="name_value", + ) + + +def test_get_transfer_run_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.DeleteTransferRunRequest, + dict, + ], +) +def test_delete_transfer_run_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_transfer_run(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_transfer_run_rest_required_fields( + request_type=datatransfer.DeleteTransferRunRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_transfer_run._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_transfer_run._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_transfer_run(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_transfer_run_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_transfer_run._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_transfer_run_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_delete_transfer_run" + ) as pre: + pre.assert_not_called() + pb_message = datatransfer.DeleteTransferRunRequest.pb( + datatransfer.DeleteTransferRunRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = datatransfer.DeleteTransferRunRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_transfer_run( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_transfer_run_rest_bad_request( + transport: str = "rest", request_type=datatransfer.DeleteTransferRunRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_transfer_run(request) + + +def test_delete_transfer_run_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_transfer_run(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_transfer_run_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_transfer_run( + datatransfer.DeleteTransferRunRequest(), + name="name_value", + ) + + +def test_delete_transfer_run_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.ListTransferRunsRequest, + dict, + ], +) +def test_list_transfer_runs_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferRunsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListTransferRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_transfer_runs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferRunsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_runs_rest_required_fields( + request_type=datatransfer.ListTransferRunsRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transfer_runs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transfer_runs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "run_attempt", + "states", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferRunsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datatransfer.ListTransferRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_transfer_runs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_transfer_runs_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_transfer_runs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "runAttempt", + "states", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_transfer_runs_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_list_transfer_runs" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_list_transfer_runs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.ListTransferRunsRequest.pb( + datatransfer.ListTransferRunsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datatransfer.ListTransferRunsResponse.to_json( + datatransfer.ListTransferRunsResponse() + ) + + request = datatransfer.ListTransferRunsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.ListTransferRunsResponse() + + client.list_transfer_runs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_transfer_runs_rest_bad_request( + transport: str = "rest", request_type=datatransfer.ListTransferRunsRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_transfer_runs(request) + + +def test_list_transfer_runs_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferRunsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListTransferRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_transfer_runs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/transferConfigs/*}/runs" + % client.transport._host, + args[1], + ) + + +def test_list_transfer_runs_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transfer_runs( + datatransfer.ListTransferRunsRequest(), + parent="parent_value", + ) + + +def test_list_transfer_runs_rest_pager(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + transfer.TransferRun(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[], + next_page_token="def", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datatransfer.ListTransferRunsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + + pager = client.list_transfer_runs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, transfer.TransferRun) for i in results) + + pages = list(client.list_transfer_runs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.ListTransferLogsRequest, + dict, + ], +) +def test_list_transfer_logs_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferLogsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListTransferLogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_transfer_logs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferLogsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_logs_rest_required_fields( + request_type=datatransfer.ListTransferLogsRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transfer_logs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transfer_logs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "message_types", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferLogsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datatransfer.ListTransferLogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_transfer_logs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_transfer_logs_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_transfer_logs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "messageTypes", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_transfer_logs_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_list_transfer_logs" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_list_transfer_logs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.ListTransferLogsRequest.pb( + datatransfer.ListTransferLogsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datatransfer.ListTransferLogsResponse.to_json( + datatransfer.ListTransferLogsResponse() + ) + + request = datatransfer.ListTransferLogsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.ListTransferLogsResponse() + + client.list_transfer_logs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_transfer_logs_rest_bad_request( + transport: str = "rest", request_type=datatransfer.ListTransferLogsRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_transfer_logs(request) + + +def test_list_transfer_logs_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferLogsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListTransferLogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_transfer_logs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogs" + % client.transport._host, + args[1], + ) + + +def test_list_transfer_logs_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transfer_logs( + datatransfer.ListTransferLogsRequest(), + parent="parent_value", + ) + + +def test_list_transfer_logs_rest_pager(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[], + next_page_token="def", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datatransfer.ListTransferLogsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + + pager = client.list_transfer_logs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, transfer.TransferMessage) for i in results) + + pages = list(client.list_transfer_logs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.CheckValidCredsRequest, + dict, + ], +) +def test_check_valid_creds_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataSources/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.CheckValidCredsResponse( + has_valid_creds=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.CheckValidCredsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.check_valid_creds(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.CheckValidCredsResponse) + assert response.has_valid_creds is True + + +def test_check_valid_creds_rest_required_fields( + request_type=datatransfer.CheckValidCredsRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).check_valid_creds._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).check_valid_creds._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datatransfer.CheckValidCredsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datatransfer.CheckValidCredsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.check_valid_creds(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_check_valid_creds_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.check_valid_creds._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_check_valid_creds_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_check_valid_creds" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_check_valid_creds" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.CheckValidCredsRequest.pb( + datatransfer.CheckValidCredsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datatransfer.CheckValidCredsResponse.to_json( + datatransfer.CheckValidCredsResponse() + ) + + request = datatransfer.CheckValidCredsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.CheckValidCredsResponse() + + client.check_valid_creds( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_check_valid_creds_rest_bad_request( + transport: str = "rest", request_type=datatransfer.CheckValidCredsRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataSources/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.check_valid_creds(request) + + +def test_check_valid_creds_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.CheckValidCredsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataSources/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.CheckValidCredsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.check_valid_creds(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds" + % client.transport._host, + args[1], + ) + + +def test_check_valid_creds_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.check_valid_creds( + datatransfer.CheckValidCredsRequest(), + name="name_value", + ) + + +def test_check_valid_creds_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.EnrollDataSourcesRequest, + dict, + ], +) +def test_enroll_data_sources_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enroll_data_sources(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enroll_data_sources_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_enroll_data_sources" + ) as pre: + pre.assert_not_called() + pb_message = datatransfer.EnrollDataSourcesRequest.pb( + datatransfer.EnrollDataSourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = datatransfer.EnrollDataSourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.enroll_data_sources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_enroll_data_sources_rest_bad_request( + transport: str = "rest", request_type=datatransfer.EnrollDataSourcesRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enroll_data_sources(request) + + +def test_enroll_data_sources_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTransferServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataTransferServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataTransferServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTransferServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataTransferServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataTransferServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTransferServiceGrpcTransport, + transports.DataTransferServiceGrpcAsyncIOTransport, + transports.DataTransferServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DataTransferServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataTransferServiceGrpcTransport, + ) + + +def test_data_transfer_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataTransferServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_data_transfer_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DataTransferServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_data_source", + "list_data_sources", + "create_transfer_config", + "update_transfer_config", + "delete_transfer_config", + "get_transfer_config", + "list_transfer_configs", + "schedule_transfer_runs", + "start_manual_transfer_runs", + "get_transfer_run", + "delete_transfer_run", + "list_transfer_runs", + "list_transfer_logs", + "check_valid_creds", + "enroll_data_sources", + "get_location", + "list_locations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_transfer_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataTransferServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_data_transfer_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataTransferServiceTransport() + adc.assert_called_once() + + +def test_data_transfer_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataTransferServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTransferServiceGrpcTransport, + transports.DataTransferServiceGrpcAsyncIOTransport, + ], +) +def test_data_transfer_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTransferServiceGrpcTransport, + transports.DataTransferServiceGrpcAsyncIOTransport, + transports.DataTransferServiceRestTransport, + ], +) +def test_data_transfer_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataTransferServiceGrpcTransport, grpc_helpers), + (transports.DataTransferServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_data_transfer_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "bigquerydatatransfer.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="bigquerydatatransfer.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTransferServiceGrpcTransport, + transports.DataTransferServiceGrpcAsyncIOTransport, + ], +) +def test_data_transfer_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_data_transfer_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DataTransferServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_data_transfer_service_host_no_port(transport_name): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="bigquerydatatransfer.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "bigquerydatatransfer.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigquerydatatransfer.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_data_transfer_service_host_with_port(transport_name): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="bigquerydatatransfer.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "bigquerydatatransfer.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigquerydatatransfer.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_data_transfer_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DataTransferServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DataTransferServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_data_source._session + session2 = client2.transport.get_data_source._session + assert session1 != session2 + session1 = client1.transport.list_data_sources._session + session2 = client2.transport.list_data_sources._session + assert session1 != session2 + session1 = client1.transport.create_transfer_config._session + session2 = client2.transport.create_transfer_config._session + assert session1 != session2 + session1 = client1.transport.update_transfer_config._session + session2 = client2.transport.update_transfer_config._session + assert session1 != session2 + session1 = client1.transport.delete_transfer_config._session + session2 = client2.transport.delete_transfer_config._session + assert session1 != session2 + session1 = client1.transport.get_transfer_config._session + session2 = client2.transport.get_transfer_config._session + assert session1 != session2 + session1 = client1.transport.list_transfer_configs._session + session2 = client2.transport.list_transfer_configs._session + assert session1 != session2 + session1 = client1.transport.schedule_transfer_runs._session + session2 = client2.transport.schedule_transfer_runs._session + assert session1 != session2 + session1 = client1.transport.start_manual_transfer_runs._session + session2 = client2.transport.start_manual_transfer_runs._session + assert session1 != session2 + session1 = client1.transport.get_transfer_run._session + session2 = client2.transport.get_transfer_run._session + assert session1 != session2 + session1 = client1.transport.delete_transfer_run._session + session2 = client2.transport.delete_transfer_run._session + assert session1 != session2 + session1 = client1.transport.list_transfer_runs._session + session2 = client2.transport.list_transfer_runs._session + assert session1 != session2 + session1 = client1.transport.list_transfer_logs._session + session2 = client2.transport.list_transfer_logs._session + assert session1 != session2 + session1 = client1.transport.check_valid_creds._session + session2 = client2.transport.check_valid_creds._session + assert session1 != session2 + session1 = client1.transport.enroll_data_sources._session + session2 = client2.transport.enroll_data_sources._session + assert session1 != session2 + + +def test_data_transfer_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataTransferServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_transfer_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataTransferServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTransferServiceGrpcTransport, + transports.DataTransferServiceGrpcAsyncIOTransport, + ], +) +def test_data_transfer_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTransferServiceGrpcTransport, + transports.DataTransferServiceGrpcAsyncIOTransport, + ], +) +def test_data_transfer_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_source_path(): + project = "squid" + data_source = "clam" + expected = "projects/{project}/dataSources/{data_source}".format( + project=project, + data_source=data_source, + ) + actual = DataTransferServiceClient.data_source_path(project, data_source) + assert expected == actual + + +def test_parse_data_source_path(): + expected = { + "project": "whelk", + "data_source": "octopus", + } + path = DataTransferServiceClient.data_source_path(**expected) + + # Check that the path construction is reversible. + actual = DataTransferServiceClient.parse_data_source_path(path) + assert expected == actual + + +def test_run_path(): + project = "oyster" + transfer_config = "nudibranch" + run = "cuttlefish" + expected = "projects/{project}/transferConfigs/{transfer_config}/runs/{run}".format( + project=project, + transfer_config=transfer_config, + run=run, + ) + actual = DataTransferServiceClient.run_path(project, transfer_config, run) + assert expected == actual + + +def test_parse_run_path(): + expected = { + "project": "mussel", + "transfer_config": "winkle", + "run": "nautilus", + } + path = DataTransferServiceClient.run_path(**expected) + + # Check that the path construction is reversible. + actual = DataTransferServiceClient.parse_run_path(path) + assert expected == actual + + +def test_transfer_config_path(): + project = "scallop" + transfer_config = "abalone" + expected = "projects/{project}/transferConfigs/{transfer_config}".format( + project=project, + transfer_config=transfer_config, + ) + actual = DataTransferServiceClient.transfer_config_path(project, transfer_config) + assert expected == actual + + +def test_parse_transfer_config_path(): + expected = { + "project": "squid", + "transfer_config": "clam", + } + path = DataTransferServiceClient.transfer_config_path(**expected) + + # Check that the path construction is reversible. + actual = DataTransferServiceClient.parse_transfer_config_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DataTransferServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = DataTransferServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataTransferServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DataTransferServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = DataTransferServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataTransferServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DataTransferServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = DataTransferServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataTransferServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = DataTransferServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = DataTransferServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataTransferServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DataTransferServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = DataTransferServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataTransferServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DataTransferServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DataTransferServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DataTransferServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations(transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DataTransferServiceClient, transports.DataTransferServiceGrpcTransport), + ( + DataTransferServiceAsyncClient, + transports.DataTransferServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/test_shim.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/test_shim.py new file mode 100644 index 000000000000..c5c20db2708a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/test_shim.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests for versionless import.""" + + +def test_shim(): + from google.cloud import bigquery_datatransfer, bigquery_datatransfer_v1 + + assert sorted(bigquery_datatransfer.__all__) == sorted( + bigquery_datatransfer_v1.__all__ + ) + + for name in bigquery_datatransfer.__all__: + found = getattr(bigquery_datatransfer, name) + expected = getattr(bigquery_datatransfer_v1, name) + assert found is expected diff --git a/packages/google-cloud-bigquery-reservation/.OwlBot.yaml b/packages/google-cloud-bigquery-reservation/.OwlBot.yaml new file mode 100644 index 000000000000..f387c10c4de8 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/.OwlBot.yaml @@ -0,0 +1,27 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-preserve-regex: + - /owl-bot-staging/google-cloud-bigquery-reservation/v1beta1 + +deep-copy-regex: + - source: /google/cloud/bigquery/reservation/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-bigquery-reservation/$1 + +begin-after-commit-hash: 6a5da3f1274b088752f074da5bc9e30bd1beb27e + diff --git a/packages/google-cloud-bigquery-reservation/.coveragerc b/packages/google-cloud-bigquery-reservation/.coveragerc new file mode 100644 index 000000000000..24e124ab08de --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/bigquery_reservation/__init__.py + google/cloud/bigquery_reservation/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-bigquery-reservation/.eggs/README.txt b/packages/google-cloud-bigquery-reservation/.eggs/README.txt new file mode 100644 index 000000000000..5d01668824f4 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/.eggs/README.txt @@ -0,0 +1,6 @@ +This directory contains eggs that were downloaded by setuptools to build, test, and run plug-ins. + +This directory caches those eggs to prevent repeated downloads. + +However, it is safe to delete this directory. + diff --git a/packages/google-cloud-bigquery-reservation/.flake8 b/packages/google-cloud-bigquery-reservation/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-bigquery-reservation/.gitignore b/packages/google-cloud-bigquery-reservation/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-bigquery-reservation/.repo-metadata.json b/packages/google-cloud-bigquery-reservation/.repo-metadata.json new file mode 100644 index 000000000000..a5bb65939238 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/.repo-metadata.json @@ -0,0 +1,18 @@ +{ + "name": "bigqueryreservation", + "name_pretty": "BigQuery Reservation", + "product_documentation": "https://cloud.google.com/bigquery/docs/reference/reservations", + "client_documentation": "https://cloud.google.com/python/docs/reference/bigqueryreservation/latest", + "issue_tracker": "", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-bigquery-reservation", + "api_id": "bigqueryreservation.googleapis.com", + "requires_billing": true, + "codeowner_team": "@googleapis/api-bigquery", + "default_version": "v1", + "api_shortname": "bigqueryreservation", + "api_description": "Modify BigQuery flat-rate reservations." +} diff --git a/packages/google-cloud-bigquery-reservation/CHANGELOG.md b/packages/google-cloud-bigquery-reservation/CHANGELOG.md new file mode 100644 index 000000000000..6d07a81d98a2 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/CHANGELOG.md @@ -0,0 +1,306 @@ +# Changelog + +## [1.11.2](https://github.com/googleapis/python-bigquery-reservation/compare/v1.11.1...v1.11.2) (2023-07-05) + + +### Bug Fixes + +* Add async context manager return types ([#362](https://github.com/googleapis/python-bigquery-reservation/issues/362)) ([98df6ad](https://github.com/googleapis/python-bigquery-reservation/commit/98df6adf73f2a1276e34977c06882a87558edf58)) + +## [1.11.1](https://github.com/googleapis/python-bigquery-reservation/compare/v1.11.0...v1.11.1) (2023-03-28) + + +### Documentation + +* Minor clarifications ([#352](https://github.com/googleapis/python-bigquery-reservation/issues/352)) ([c9ce9b8](https://github.com/googleapis/python-bigquery-reservation/commit/c9ce9b85b8315f5d676fecdc0c95fd4f01363e9a)) + +## [1.11.0](https://github.com/googleapis/python-bigquery-reservation/compare/v1.10.0...v1.11.0) (2023-03-23) + + +### Features + +* Add edition/autoscale related fields ([#348](https://github.com/googleapis/python-bigquery-reservation/issues/348)) ([e94a53b](https://github.com/googleapis/python-bigquery-reservation/commit/e94a53bd510f51facc4a0fd591e5c4a981028f90)) + + +### Documentation + +* Fix formatting of request arg in docstring ([#350](https://github.com/googleapis/python-bigquery-reservation/issues/350)) ([ecce362](https://github.com/googleapis/python-bigquery-reservation/commit/ecce36210c6a8fdce862b9ff16d49451180bea9a)) + +## [1.10.0](https://github.com/googleapis/python-bigquery-reservation/compare/v1.9.1...v1.10.0) (2023-02-16) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#339](https://github.com/googleapis/python-bigquery-reservation/issues/339)) ([50de8ca](https://github.com/googleapis/python-bigquery-reservation/commit/50de8ca394e61e0fd97b9264c660d4017c241ece)) + +## [1.9.1](https://github.com/googleapis/python-bigquery-reservation/compare/v1.9.0...v1.9.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([3912795](https://github.com/googleapis/python-bigquery-reservation/commit/3912795237f099f8cae2dea647f6e599aa3c4a1b)) + + +### Documentation + +* Add documentation for enums ([3912795](https://github.com/googleapis/python-bigquery-reservation/commit/3912795237f099f8cae2dea647f6e599aa3c4a1b)) + +## [1.9.0](https://github.com/googleapis/python-bigquery-reservation/compare/v1.8.0...v1.9.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#327](https://github.com/googleapis/python-bigquery-reservation/issues/327)) ([c86b3b8](https://github.com/googleapis/python-bigquery-reservation/commit/c86b3b8da191595dcbb30b4a55136c30376bad72)) + +## [1.8.0](https://github.com/googleapis/python-bigquery-reservation/compare/v1.7.3...v1.8.0) (2022-12-15) + + +### Features + +* Add support for `google.cloud.bigquery_reservation.__version__` ([e82b8e5](https://github.com/googleapis/python-bigquery-reservation/commit/e82b8e50be61e2885ac8f5b21f650e54f8d6d604)) +* Add typing to proto.Message based class attributes ([e82b8e5](https://github.com/googleapis/python-bigquery-reservation/commit/e82b8e50be61e2885ac8f5b21f650e54f8d6d604)) + + +### Bug Fixes + +* Add dict typing for client_options ([e82b8e5](https://github.com/googleapis/python-bigquery-reservation/commit/e82b8e50be61e2885ac8f5b21f650e54f8d6d604)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([c9ff564](https://github.com/googleapis/python-bigquery-reservation/commit/c9ff564b070c488b96af8574df442334173996cf)) +* Drop usage of pkg_resources ([c9ff564](https://github.com/googleapis/python-bigquery-reservation/commit/c9ff564b070c488b96af8574df442334173996cf)) +* Fix timeout default values ([c9ff564](https://github.com/googleapis/python-bigquery-reservation/commit/c9ff564b070c488b96af8574df442334173996cf)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([e82b8e5](https://github.com/googleapis/python-bigquery-reservation/commit/e82b8e50be61e2885ac8f5b21f650e54f8d6d604)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([c9ff564](https://github.com/googleapis/python-bigquery-reservation/commit/c9ff564b070c488b96af8574df442334173996cf)) + +## [1.7.3](https://github.com/googleapis/python-bigquery-reservation/compare/v1.7.2...v1.7.3) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#313](https://github.com/googleapis/python-bigquery-reservation/issues/313)) ([e616e6b](https://github.com/googleapis/python-bigquery-reservation/commit/e616e6bc2be88305ba5088e37647bc934b28299e)) + +## [1.7.2](https://github.com/googleapis/python-bigquery-reservation/compare/v1.7.1...v1.7.2) (2022-10-03) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#310](https://github.com/googleapis/python-bigquery-reservation/issues/310)) ([9c5c59a](https://github.com/googleapis/python-bigquery-reservation/commit/9c5c59a6f8b164f4d323677be0de537616f2f664)) + +## [1.7.1](https://github.com/googleapis/python-bigquery-reservation/compare/v1.7.0...v1.7.1) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#292](https://github.com/googleapis/python-bigquery-reservation/issues/292)) ([112a726](https://github.com/googleapis/python-bigquery-reservation/commit/112a726680d4426bdbc037631c887915bb8b2e35)) +* **deps:** require proto-plus >= 1.22.0 ([112a726](https://github.com/googleapis/python-bigquery-reservation/commit/112a726680d4426bdbc037631c887915bb8b2e35)) + +## [1.7.0](https://github.com/googleapis/python-bigquery-reservation/compare/v1.6.3...v1.7.0) (2022-07-16) + + +### Features + +* add audience parameter ([8b25cff](https://github.com/googleapis/python-bigquery-reservation/commit/8b25cff82fab1ae6c038238df5d65318396c0782)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#282](https://github.com/googleapis/python-bigquery-reservation/issues/282)) ([8b25cff](https://github.com/googleapis/python-bigquery-reservation/commit/8b25cff82fab1ae6c038238df5d65318396c0782)) +* require python 3.7+ ([#284](https://github.com/googleapis/python-bigquery-reservation/issues/284)) ([13c0983](https://github.com/googleapis/python-bigquery-reservation/commit/13c0983e5d8671549e1b27d1b7533bfcbc374fe8)) + +## [1.6.3](https://github.com/googleapis/python-bigquery-reservation/compare/v1.6.2...v1.6.3) (2022-06-06) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#271](https://github.com/googleapis/python-bigquery-reservation/issues/271)) ([a054642](https://github.com/googleapis/python-bigquery-reservation/commit/a054642feccb275527eb803c97d56613cf006670)) + + +### Documentation + +* fix changelog header to consistent size ([#272](https://github.com/googleapis/python-bigquery-reservation/issues/272)) ([2a9c9de](https://github.com/googleapis/python-bigquery-reservation/commit/2a9c9deb1ec2afbe469296210c6659276c15379a)) + +## [1.6.2](https://github.com/googleapis/python-bigquery-reservation/compare/v1.6.1...v1.6.2) (2022-05-05) + + +### Documentation + +* cleanup and clarifications ([#253](https://github.com/googleapis/python-bigquery-reservation/issues/253)) ([f34d11a](https://github.com/googleapis/python-bigquery-reservation/commit/f34d11a8c42bc626ca2550c9a3a3e98fa09ec2d0)) + +## [1.6.1](https://github.com/googleapis/python-bigquery-reservation/compare/v1.6.0...v1.6.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#233](https://github.com/googleapis/python-bigquery-reservation/issues/233)) ([14eee3f](https://github.com/googleapis/python-bigquery-reservation/commit/14eee3ff6a19aa1199c36c314210f4b8f23bc367)) +* **deps:** require proto-plus>=1.15.0 ([14eee3f](https://github.com/googleapis/python-bigquery-reservation/commit/14eee3ff6a19aa1199c36c314210f4b8f23bc367)) + + +### Documentation + +* fix README typo in PyPI link ([#225](https://github.com/googleapis/python-bigquery-reservation/issues/225)) ([6eea2a3](https://github.com/googleapis/python-bigquery-reservation/commit/6eea2a39fbf22f09d6791aded8bd7bdda0b52425)) + +## [1.6.0](https://github.com/googleapis/python-bigquery-reservation/compare/v1.5.0...v1.6.0) (2022-02-03) + + +### Features + +* add api key support ([#215](https://github.com/googleapis/python-bigquery-reservation/issues/215)) ([33dc0a3](https://github.com/googleapis/python-bigquery-reservation/commit/33dc0a3852ab0786b65b37bbfd17791fb7f29188)) + +## [1.5.0](https://www.github.com/googleapis/python-bigquery-reservation/compare/v1.4.1...v1.5.0) (2022-01-04) + + +### Features + +* increase the logical timeout (retry deadline) to 5 minutes ([#198](https://www.github.com/googleapis/python-bigquery-reservation/issues/198)) ([13cb5b3](https://www.github.com/googleapis/python-bigquery-reservation/commit/13cb5b3c62fc4ca1823c1154e5ee5eaede5478ae)) + +## [1.4.1](https://www.github.com/googleapis/python-bigquery-reservation/compare/v1.4.0...v1.4.1) (2021-11-01) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([9b98c95](https://www.github.com/googleapis/python-bigquery-reservation/commit/9b98c95444b9d1467297ea4a87bbfb2954dc2999)) +* **deps:** require google-api-core >= 1.28.0 ([9b98c95](https://www.github.com/googleapis/python-bigquery-reservation/commit/9b98c95444b9d1467297ea4a87bbfb2954dc2999)) + + +### Documentation + +* list oneofs in docstring ([9b98c95](https://www.github.com/googleapis/python-bigquery-reservation/commit/9b98c95444b9d1467297ea4a87bbfb2954dc2999)) + +## [1.4.0](https://www.github.com/googleapis/python-bigquery-reservation/compare/v1.3.1...v1.4.0) (2021-10-08) + + +### Features + +* add context manager support in client ([#175](https://www.github.com/googleapis/python-bigquery-reservation/issues/175)) ([80768c8](https://www.github.com/googleapis/python-bigquery-reservation/commit/80768c8009b8450a3ac3025c95683bcd0628ef35)) + + +### Bug Fixes + +* improper types in pagers generation ([ae65c70](https://www.github.com/googleapis/python-bigquery-reservation/commit/ae65c70bd91602ccb851167d27b4161ebd6c3bb3)) + +## [1.3.1](https://www.github.com/googleapis/python-bigquery-reservation/compare/v1.3.0...v1.3.1) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([937ea64](https://www.github.com/googleapis/python-bigquery-reservation/commit/937ea64f4a8a5ff47baffeb88c4dd528324a77ae)) + +## [1.3.0](https://www.github.com/googleapis/python-bigquery-reservation/compare/v1.2.2...v1.3.0) (2021-09-02) + + +### Features + +* Deprecated SearchAssignments in favor of SearchAllAssignments ([#157](https://www.github.com/googleapis/python-bigquery-reservation/issues/157)) ([dacdf5a](https://www.github.com/googleapis/python-bigquery-reservation/commit/dacdf5ac37a802f0d00a30468720a3ce1f294985)) + + +### Documentation + +* samples for managing reservations ([#144](https://www.github.com/googleapis/python-bigquery-reservation/issues/144)) ([27b2564](https://www.github.com/googleapis/python-bigquery-reservation/commit/27b256440b2565369c900cd4728e38676f82fcfe)) + +## [1.2.2](https://www.github.com/googleapis/python-bigquery-reservation/compare/v1.2.1...v1.2.2) (2021-07-27) + + +### Bug Fixes + +* enable self signed jwt for grpc ([#138](https://www.github.com/googleapis/python-bigquery-reservation/issues/138)) ([1d3f927](https://www.github.com/googleapis/python-bigquery-reservation/commit/1d3f927b12268c07e724ed44f1b3373a7c64e999)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#132](https://www.github.com/googleapis/python-bigquery-reservation/issues/132)) ([c59d238](https://www.github.com/googleapis/python-bigquery-reservation/commit/c59d2383413ef5c57d72877d76514853f6271b00)) + + +### Miscellaneous Chores + +* release as 1.2.2 ([#139](https://www.github.com/googleapis/python-bigquery-reservation/issues/139)) ([96fbeba](https://www.github.com/googleapis/python-bigquery-reservation/commit/96fbeba273eb1776994f41400163788cf7b5e786)) + +## [1.2.1](https://www.github.com/googleapis/python-bigquery-reservation/compare/v1.2.0...v1.2.1) (2021-07-20) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#131](https://www.github.com/googleapis/python-bigquery-reservation/issues/131)) ([9a011b6](https://www.github.com/googleapis/python-bigquery-reservation/commit/9a011b604ffc2256b89d2fd6909a7219c0bcc88b)) + +## [1.2.0](https://www.github.com/googleapis/python-bigquery-reservation/compare/v1.1.0...v1.2.0) (2021-06-30) + + +### Features + +* add always_use_jwt_access ([#123](https://www.github.com/googleapis/python-bigquery-reservation/issues/123)) ([3123e99](https://www.github.com/googleapis/python-bigquery-reservation/commit/3123e99e8e288dcfb3627f77610c90060654bee4)) +* support self-signed JWT flow for service accounts ([4d52ed9](https://www.github.com/googleapis/python-bigquery-reservation/commit/4d52ed91ae9eaa7ec6091138c134e682c9434853)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([4d52ed9](https://www.github.com/googleapis/python-bigquery-reservation/commit/4d52ed91ae9eaa7ec6091138c134e682c9434853)) +* disable always_use_jwt_access ([32b279f](https://www.github.com/googleapis/python-bigquery-reservation/commit/32b279f0666a55c66e87c347ed7e913c2a9267a7)) +* disable always_use_jwt_access ([#126](https://www.github.com/googleapis/python-bigquery-reservation/issues/126)) ([32b279f](https://www.github.com/googleapis/python-bigquery-reservation/commit/32b279f0666a55c66e87c347ed7e913c2a9267a7)) +* exclude docs and tests from package ([#117](https://www.github.com/googleapis/python-bigquery-reservation/issues/117)) ([4f90792](https://www.github.com/googleapis/python-bigquery-reservation/commit/4f90792c26c8e47aad5a52267c713723e661efa3)) +* require google-api-core >= 1.22.2 ([#90](https://www.github.com/googleapis/python-bigquery-reservation/issues/90)) ([3f0fff7](https://www.github.com/googleapis/python-bigquery-reservation/commit/3f0fff779d880df0648b7bcf59df01c4cacd4ca3)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-bigquery-reservation/issues/1127)) ([#120](https://www.github.com/googleapis/python-bigquery-reservation/issues/120)) ([7d65f87](https://www.github.com/googleapis/python-bigquery-reservation/commit/7d65f877f6814aed37f68116b52e200585587c58)), closes [#1126](https://www.github.com/googleapis/python-bigquery-reservation/issues/1126) +* Update the README to reflect that this library is GA ([#112](https://www.github.com/googleapis/python-bigquery-reservation/issues/112)) ([7bca7a9](https://www.github.com/googleapis/python-bigquery-reservation/commit/7bca7a9b6d73d8c8ee522c8ac930192fad49da57)) + +## [1.1.0](https://www.github.com/googleapis/python-bigquery-reservation/compare/v1.0.1...v1.1.0) (2021-03-09) + + +### Features + +* add `client_cert_source_for_mtls` argument to transports ([#78](https://www.github.com/googleapis/python-bigquery-reservation/issues/78)) ([5df0f09](https://www.github.com/googleapis/python-bigquery-reservation/commit/5df0f0965c541ca546d3851be1ab7782dc80a11b)) + +## [1.0.1](https://www.github.com/googleapis/python-bigquery-reservation/compare/v1.0.0...v1.0.1) (2021-01-14) + + +### Bug Fixes + +* remove gRPC send/recv limit ([#60](https://www.github.com/googleapis/python-bigquery-reservation/issues/60)) ([4115f1e](https://www.github.com/googleapis/python-bigquery-reservation/commit/4115f1ee6b67be5ce409122a44faa47ac53112bf)) + + +### Documentation + +* document enum values with `undoc-members` option ([#69](https://www.github.com/googleapis/python-bigquery-reservation/issues/69)) ([2acdeb7](https://www.github.com/googleapis/python-bigquery-reservation/commit/2acdeb782521c01a4e1fa01e42fdd1ce79dbf13d)) + +## [1.0.0](https://www.github.com/googleapis/python-bigquery-reservation/compare/v0.4.0...v1.0.0) (2020-10-29) + + +### ⚠ BREAKING CHANGES + +* update package names to avoid conflict with google-cloud-bigquery + +### Bug Fixes + +* update package names to avoid conflict with google-cloud-bigquery ([#47](https://www.github.com/googleapis/python-bigquery-reservation/issues/47)) ([dc2172f](https://www.github.com/googleapis/python-bigquery-reservation/commit/dc2172fa8c540efca01c81fdd7f40880e087f66d)) + +## [0.4.0](https://www.github.com/googleapis/python-bigquery-reservation/compare/v0.3.0...v0.4.0) (2020-10-28) + + +### Features + +* add path formatting helper methods ([362e0fe](https://www.github.com/googleapis/python-bigquery-reservation/commit/362e0fe51364101bd770cce851d986eea6c56e6a)) +* implement mtls env variables mentioned in aip.dev/auth/4114 ([#39](https://www.github.com/googleapis/python-bigquery-reservation/issues/39)) ([21bff87](https://www.github.com/googleapis/python-bigquery-reservation/commit/21bff87047519754a01983c9a4551cb534bcb88c)) + +## [0.3.0](https://www.github.com/googleapis/python-bigquery-reservation/compare/v0.2.0...v0.3.0) (2020-08-26) + + +### Features + +* add support for new client options ([#23](https://www.github.com/googleapis/python-bigquery-reservation/issues/23)) ([a0e818d](https://www.github.com/googleapis/python-bigquery-reservation/commit/a0e818d526dc60f0eb24787333e1041b02f26816)) + +## [0.2.0](https://www.github.com/googleapis/python-bigquery-reservation/compare/v0.1.0...v0.2.0) (2020-05-27) + + +### Features + +* add helper methods to parse resource paths (via synth) ([#7](https://www.github.com/googleapis/python-bigquery-reservation/issues/7)) ([8fc54cb](https://www.github.com/googleapis/python-bigquery-reservation/commit/8fc54cb70be698f6d265f60d7b8ee4561d12d2c9)) + +## 0.1.0 (2020-05-12) + + +### Features + +* generate v1 ([6293404](https://www.github.com/googleapis/python-bigquery-reservation/commit/6293404e47ca2efdcb5f702e248f43250060eb8c)) diff --git a/packages/google-cloud-bigquery-reservation/CODE_OF_CONDUCT.md b/packages/google-cloud-bigquery-reservation/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-bigquery-reservation/CONTRIBUTING.rst b/packages/google-cloud-bigquery-reservation/CONTRIBUTING.rst new file mode 100644 index 000000000000..cf4c4df369dd --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.11 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-bigquery-reservation + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-bigquery-reservation/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-bigquery-reservation/LICENSE b/packages/google-cloud-bigquery-reservation/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-bigquery-reservation/MANIFEST.in b/packages/google-cloud-bigquery-reservation/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-bigquery-reservation/README.rst b/packages/google-cloud-bigquery-reservation/README.rst new file mode 100644 index 000000000000..15c18e8416b7 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/README.rst @@ -0,0 +1,108 @@ +Python Client for BigQuery Reservation +====================================== + +|stable| |pypi| |versions| + +`BigQuery Reservation`_: Modify BigQuery flat-rate reservations. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-reservation.svg + :target: https://pypi.org/project/google-cloud-bigquery-reservation/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery-reservation.svg + :target: https://pypi.org/project/google-cloud-bigquery-reservation/ +.. _BigQuery Reservation: https://cloud.google.com/bigquery/docs/reference/reservations +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/bigqueryreservation/latest +.. _Product Documentation: https://cloud.google.com/bigquery/docs/reference/reservations + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the BigQuery Reservation.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the BigQuery Reservation.: https://cloud.google.com/bigquery/docs/reference/reservations +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-bigquery-reservation + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-bigquery-reservation + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for BigQuery Reservation + to see other available methods on the client. +- Read the `BigQuery Reservation Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _BigQuery Reservation Product documentation: https://cloud.google.com/bigquery/docs/reference/reservations +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-bigquery-reservation/SECURITY.md b/packages/google-cloud-bigquery-reservation/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-bigquery-reservation/docs/CHANGELOG.md b/packages/google-cloud-bigquery-reservation/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-bigquery-reservation/docs/README.rst b/packages/google-cloud-bigquery-reservation/docs/README.rst new file mode 100644 index 000000000000..95b9a7f40ce0 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/docs/README.rst @@ -0,0 +1,82 @@ +Python Client for Cloud BigQuery Reservation +================================================= + +|beta| |pypi| |versions| + +`BigQuery Reservation API`_: Modify BigQuery flat-rate reservations. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |beta| image:: https://img.shields.io/badge/support-beta-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#beta-support +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-reservation.svg + :target: https://pypi.org/project/google-cloud-bigquery-reservation/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery-reservation.svg + :target: https://pypi.org/project/google-cloud-bigqyery-reservation/ +.. _BigQuery Reservation API: https:///cloud.google.com/bigquery/docs/reference/reservations +.. _Client Library Documentation: https://googleapis.dev/python/bigqueryreservation/latest +.. _Product Documentation: https:///cloud.google.com/bigquery/docs/reference/reservations + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud BigQuery Reservation API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud BigQuery Reservation API.: https:///cloud.google.com/bigquery/docs/reference/reservations +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-bigquery-reservation + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-bigquery-reservation + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud Billing API + API to see other available methods on the client. +- Read the `BigQuery Reservation API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _BigQuery Reservation API Product documentation: https:///cloud.google.com/bigquery/docs/reference/reservations +.. _repository’s main README: https://github.com/googleapis/google-cloud-python/blob/master/README.rst \ No newline at end of file diff --git a/packages/google-cloud-bigquery-reservation/docs/_static/custom.css b/packages/google-cloud-bigquery-reservation/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-bigquery-reservation/docs/_templates/layout.html b/packages/google-cloud-bigquery-reservation/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-bigquery-reservation/docs/bigquery_reservation_v1/reservation_service.rst b/packages/google-cloud-bigquery-reservation/docs/bigquery_reservation_v1/reservation_service.rst new file mode 100644 index 000000000000..35e6b3cfd676 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/docs/bigquery_reservation_v1/reservation_service.rst @@ -0,0 +1,10 @@ +ReservationService +------------------------------------ + +.. automodule:: google.cloud.bigquery_reservation_v1.services.reservation_service + :members: + :inherited-members: + +.. automodule:: google.cloud.bigquery_reservation_v1.services.reservation_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-bigquery-reservation/docs/bigquery_reservation_v1/services.rst b/packages/google-cloud-bigquery-reservation/docs/bigquery_reservation_v1/services.rst new file mode 100644 index 000000000000..3f9e9577f53c --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/docs/bigquery_reservation_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Bigquery Reservation v1 API +===================================================== +.. toctree:: + :maxdepth: 2 + + reservation_service diff --git a/packages/google-cloud-bigquery-reservation/docs/bigquery_reservation_v1/types.rst b/packages/google-cloud-bigquery-reservation/docs/bigquery_reservation_v1/types.rst new file mode 100644 index 000000000000..96a1583d3f20 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/docs/bigquery_reservation_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Bigquery Reservation v1 API +================================================== + +.. automodule:: google.cloud.bigquery_reservation_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-bigquery-reservation/docs/conf.py b/packages/google-cloud-bigquery-reservation/docs/conf.py new file mode 100644 index 000000000000..7d9adaf6cd7b --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-bigquery-reservation documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-bigquery-reservation" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-bigquery-reservation", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-bigquery-reservation-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-bigquery-reservation.tex", + "google-cloud-bigquery-reservation Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-bigquery-reservation", + "google-cloud-bigquery-reservation Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-bigquery-reservation", + "google-cloud-bigquery-reservation Documentation", + author, + "google-cloud-bigquery-reservation", + "google-cloud-bigquery-reservation Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-bigquery-reservation/docs/index.rst b/packages/google-cloud-bigquery-reservation/docs/index.rst new file mode 100644 index 000000000000..ba4f6d156681 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + bigquery_reservation_v1/services + bigquery_reservation_v1/types + + +Changelog +--------- + +For a list of all ``google-cloud-bigquery-reservation`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-bigquery-reservation/docs/multiprocessing.rst b/packages/google-cloud-bigquery-reservation/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/__init__.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/__init__.py new file mode 100644 index 000000000000..6599f0f85924 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/__init__.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.bigquery_reservation import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.bigquery_reservation_v1.services.reservation_service.async_client import ( + ReservationServiceAsyncClient, +) +from google.cloud.bigquery_reservation_v1.services.reservation_service.client import ( + ReservationServiceClient, +) +from google.cloud.bigquery_reservation_v1.types.reservation import ( + Assignment, + BiReservation, + CapacityCommitment, + CreateAssignmentRequest, + CreateCapacityCommitmentRequest, + CreateReservationRequest, + DeleteAssignmentRequest, + DeleteCapacityCommitmentRequest, + DeleteReservationRequest, + Edition, + GetBiReservationRequest, + GetCapacityCommitmentRequest, + GetReservationRequest, + ListAssignmentsRequest, + ListAssignmentsResponse, + ListCapacityCommitmentsRequest, + ListCapacityCommitmentsResponse, + ListReservationsRequest, + ListReservationsResponse, + MergeCapacityCommitmentsRequest, + MoveAssignmentRequest, + Reservation, + SearchAllAssignmentsRequest, + SearchAllAssignmentsResponse, + SearchAssignmentsRequest, + SearchAssignmentsResponse, + SplitCapacityCommitmentRequest, + SplitCapacityCommitmentResponse, + TableReference, + UpdateAssignmentRequest, + UpdateBiReservationRequest, + UpdateCapacityCommitmentRequest, + UpdateReservationRequest, +) + +__all__ = ( + "ReservationServiceClient", + "ReservationServiceAsyncClient", + "Assignment", + "BiReservation", + "CapacityCommitment", + "CreateAssignmentRequest", + "CreateCapacityCommitmentRequest", + "CreateReservationRequest", + "DeleteAssignmentRequest", + "DeleteCapacityCommitmentRequest", + "DeleteReservationRequest", + "GetBiReservationRequest", + "GetCapacityCommitmentRequest", + "GetReservationRequest", + "ListAssignmentsRequest", + "ListAssignmentsResponse", + "ListCapacityCommitmentsRequest", + "ListCapacityCommitmentsResponse", + "ListReservationsRequest", + "ListReservationsResponse", + "MergeCapacityCommitmentsRequest", + "MoveAssignmentRequest", + "Reservation", + "SearchAllAssignmentsRequest", + "SearchAllAssignmentsResponse", + "SearchAssignmentsRequest", + "SearchAssignmentsResponse", + "SplitCapacityCommitmentRequest", + "SplitCapacityCommitmentResponse", + "TableReference", + "UpdateAssignmentRequest", + "UpdateBiReservationRequest", + "UpdateCapacityCommitmentRequest", + "UpdateReservationRequest", + "Edition", +) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py new file mode 100644 index 000000000000..75d11df9948f --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.11.2" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/py.typed b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/py.typed new file mode 100644 index 000000000000..ff261ee1d848 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-reservation package uses inline types. diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/__init__.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/__init__.py new file mode 100644 index 000000000000..c79093837782 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/__init__.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.bigquery_reservation_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.reservation_service import ( + ReservationServiceAsyncClient, + ReservationServiceClient, +) +from .types.reservation import ( + Assignment, + BiReservation, + CapacityCommitment, + CreateAssignmentRequest, + CreateCapacityCommitmentRequest, + CreateReservationRequest, + DeleteAssignmentRequest, + DeleteCapacityCommitmentRequest, + DeleteReservationRequest, + Edition, + GetBiReservationRequest, + GetCapacityCommitmentRequest, + GetReservationRequest, + ListAssignmentsRequest, + ListAssignmentsResponse, + ListCapacityCommitmentsRequest, + ListCapacityCommitmentsResponse, + ListReservationsRequest, + ListReservationsResponse, + MergeCapacityCommitmentsRequest, + MoveAssignmentRequest, + Reservation, + SearchAllAssignmentsRequest, + SearchAllAssignmentsResponse, + SearchAssignmentsRequest, + SearchAssignmentsResponse, + SplitCapacityCommitmentRequest, + SplitCapacityCommitmentResponse, + TableReference, + UpdateAssignmentRequest, + UpdateBiReservationRequest, + UpdateCapacityCommitmentRequest, + UpdateReservationRequest, +) + +__all__ = ( + "ReservationServiceAsyncClient", + "Assignment", + "BiReservation", + "CapacityCommitment", + "CreateAssignmentRequest", + "CreateCapacityCommitmentRequest", + "CreateReservationRequest", + "DeleteAssignmentRequest", + "DeleteCapacityCommitmentRequest", + "DeleteReservationRequest", + "Edition", + "GetBiReservationRequest", + "GetCapacityCommitmentRequest", + "GetReservationRequest", + "ListAssignmentsRequest", + "ListAssignmentsResponse", + "ListCapacityCommitmentsRequest", + "ListCapacityCommitmentsResponse", + "ListReservationsRequest", + "ListReservationsResponse", + "MergeCapacityCommitmentsRequest", + "MoveAssignmentRequest", + "Reservation", + "ReservationServiceClient", + "SearchAllAssignmentsRequest", + "SearchAllAssignmentsResponse", + "SearchAssignmentsRequest", + "SearchAssignmentsResponse", + "SplitCapacityCommitmentRequest", + "SplitCapacityCommitmentResponse", + "TableReference", + "UpdateAssignmentRequest", + "UpdateBiReservationRequest", + "UpdateCapacityCommitmentRequest", + "UpdateReservationRequest", +) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_metadata.json b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_metadata.json new file mode 100644 index 000000000000..582310e27c21 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_metadata.json @@ -0,0 +1,343 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.bigquery_reservation_v1", + "protoPackage": "google.cloud.bigquery.reservation.v1", + "schema": "1.0", + "services": { + "ReservationService": { + "clients": { + "grpc": { + "libraryClient": "ReservationServiceClient", + "rpcs": { + "CreateAssignment": { + "methods": [ + "create_assignment" + ] + }, + "CreateCapacityCommitment": { + "methods": [ + "create_capacity_commitment" + ] + }, + "CreateReservation": { + "methods": [ + "create_reservation" + ] + }, + "DeleteAssignment": { + "methods": [ + "delete_assignment" + ] + }, + "DeleteCapacityCommitment": { + "methods": [ + "delete_capacity_commitment" + ] + }, + "DeleteReservation": { + "methods": [ + "delete_reservation" + ] + }, + "GetBiReservation": { + "methods": [ + "get_bi_reservation" + ] + }, + "GetCapacityCommitment": { + "methods": [ + "get_capacity_commitment" + ] + }, + "GetReservation": { + "methods": [ + "get_reservation" + ] + }, + "ListAssignments": { + "methods": [ + "list_assignments" + ] + }, + "ListCapacityCommitments": { + "methods": [ + "list_capacity_commitments" + ] + }, + "ListReservations": { + "methods": [ + "list_reservations" + ] + }, + "MergeCapacityCommitments": { + "methods": [ + "merge_capacity_commitments" + ] + }, + "MoveAssignment": { + "methods": [ + "move_assignment" + ] + }, + "SearchAllAssignments": { + "methods": [ + "search_all_assignments" + ] + }, + "SearchAssignments": { + "methods": [ + "search_assignments" + ] + }, + "SplitCapacityCommitment": { + "methods": [ + "split_capacity_commitment" + ] + }, + "UpdateAssignment": { + "methods": [ + "update_assignment" + ] + }, + "UpdateBiReservation": { + "methods": [ + "update_bi_reservation" + ] + }, + "UpdateCapacityCommitment": { + "methods": [ + "update_capacity_commitment" + ] + }, + "UpdateReservation": { + "methods": [ + "update_reservation" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ReservationServiceAsyncClient", + "rpcs": { + "CreateAssignment": { + "methods": [ + "create_assignment" + ] + }, + "CreateCapacityCommitment": { + "methods": [ + "create_capacity_commitment" + ] + }, + "CreateReservation": { + "methods": [ + "create_reservation" + ] + }, + "DeleteAssignment": { + "methods": [ + "delete_assignment" + ] + }, + "DeleteCapacityCommitment": { + "methods": [ + "delete_capacity_commitment" + ] + }, + "DeleteReservation": { + "methods": [ + "delete_reservation" + ] + }, + "GetBiReservation": { + "methods": [ + "get_bi_reservation" + ] + }, + "GetCapacityCommitment": { + "methods": [ + "get_capacity_commitment" + ] + }, + "GetReservation": { + "methods": [ + "get_reservation" + ] + }, + "ListAssignments": { + "methods": [ + "list_assignments" + ] + }, + "ListCapacityCommitments": { + "methods": [ + "list_capacity_commitments" + ] + }, + "ListReservations": { + "methods": [ + "list_reservations" + ] + }, + "MergeCapacityCommitments": { + "methods": [ + "merge_capacity_commitments" + ] + }, + "MoveAssignment": { + "methods": [ + "move_assignment" + ] + }, + "SearchAllAssignments": { + "methods": [ + "search_all_assignments" + ] + }, + "SearchAssignments": { + "methods": [ + "search_assignments" + ] + }, + "SplitCapacityCommitment": { + "methods": [ + "split_capacity_commitment" + ] + }, + "UpdateAssignment": { + "methods": [ + "update_assignment" + ] + }, + "UpdateBiReservation": { + "methods": [ + "update_bi_reservation" + ] + }, + "UpdateCapacityCommitment": { + "methods": [ + "update_capacity_commitment" + ] + }, + "UpdateReservation": { + "methods": [ + "update_reservation" + ] + } + } + }, + "rest": { + "libraryClient": "ReservationServiceClient", + "rpcs": { + "CreateAssignment": { + "methods": [ + "create_assignment" + ] + }, + "CreateCapacityCommitment": { + "methods": [ + "create_capacity_commitment" + ] + }, + "CreateReservation": { + "methods": [ + "create_reservation" + ] + }, + "DeleteAssignment": { + "methods": [ + "delete_assignment" + ] + }, + "DeleteCapacityCommitment": { + "methods": [ + "delete_capacity_commitment" + ] + }, + "DeleteReservation": { + "methods": [ + "delete_reservation" + ] + }, + "GetBiReservation": { + "methods": [ + "get_bi_reservation" + ] + }, + "GetCapacityCommitment": { + "methods": [ + "get_capacity_commitment" + ] + }, + "GetReservation": { + "methods": [ + "get_reservation" + ] + }, + "ListAssignments": { + "methods": [ + "list_assignments" + ] + }, + "ListCapacityCommitments": { + "methods": [ + "list_capacity_commitments" + ] + }, + "ListReservations": { + "methods": [ + "list_reservations" + ] + }, + "MergeCapacityCommitments": { + "methods": [ + "merge_capacity_commitments" + ] + }, + "MoveAssignment": { + "methods": [ + "move_assignment" + ] + }, + "SearchAllAssignments": { + "methods": [ + "search_all_assignments" + ] + }, + "SearchAssignments": { + "methods": [ + "search_assignments" + ] + }, + "SplitCapacityCommitment": { + "methods": [ + "split_capacity_commitment" + ] + }, + "UpdateAssignment": { + "methods": [ + "update_assignment" + ] + }, + "UpdateBiReservation": { + "methods": [ + "update_bi_reservation" + ] + }, + "UpdateCapacityCommitment": { + "methods": [ + "update_capacity_commitment" + ] + }, + "UpdateReservation": { + "methods": [ + "update_reservation" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py new file mode 100644 index 000000000000..75d11df9948f --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.11.2" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/py.typed b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/py.typed new file mode 100644 index 000000000000..ff261ee1d848 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-reservation package uses inline types. diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/__init__.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/__init__.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/__init__.py new file mode 100644 index 000000000000..c2a8759aa950 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ReservationServiceAsyncClient +from .client import ReservationServiceClient + +__all__ = ( + "ReservationServiceClient", + "ReservationServiceAsyncClient", +) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py new file mode 100644 index 000000000000..129f06fc03ec --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py @@ -0,0 +1,2938 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery_reservation_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.cloud.bigquery_reservation_v1.services.reservation_service import pagers +from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation +from google.cloud.bigquery_reservation_v1.types import reservation + +from .client import ReservationServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ReservationServiceTransport +from .transports.grpc_asyncio import ReservationServiceGrpcAsyncIOTransport + + +class ReservationServiceAsyncClient: + """This API allows users to manage their BigQuery reservations. + + A reservation provides computational resource guarantees, in the + form of `slots `__, to + users. A slot is a unit of computational power in BigQuery, and + serves as the basic unit of parallelism. In a scan of a + multi-partitioned table, a single slot operates on a single + partition of the table. A reservation resource exists as a child + resource of the admin project and location, e.g.: + ``projects/myproject/locations/US/reservations/reservationName``. + + A capacity commitment is a way to purchase compute capacity for + BigQuery jobs (in the form of slots) with some committed period of + usage. A capacity commitment resource exists as a child resource of + the admin project and location, e.g.: + ``projects/myproject/locations/US/capacityCommitments/id``. + """ + + _client: ReservationServiceClient + + DEFAULT_ENDPOINT = ReservationServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ReservationServiceClient.DEFAULT_MTLS_ENDPOINT + + assignment_path = staticmethod(ReservationServiceClient.assignment_path) + parse_assignment_path = staticmethod(ReservationServiceClient.parse_assignment_path) + bi_reservation_path = staticmethod(ReservationServiceClient.bi_reservation_path) + parse_bi_reservation_path = staticmethod( + ReservationServiceClient.parse_bi_reservation_path + ) + capacity_commitment_path = staticmethod( + ReservationServiceClient.capacity_commitment_path + ) + parse_capacity_commitment_path = staticmethod( + ReservationServiceClient.parse_capacity_commitment_path + ) + reservation_path = staticmethod(ReservationServiceClient.reservation_path) + parse_reservation_path = staticmethod( + ReservationServiceClient.parse_reservation_path + ) + common_billing_account_path = staticmethod( + ReservationServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ReservationServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ReservationServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ReservationServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ReservationServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ReservationServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ReservationServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ReservationServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ReservationServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ReservationServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ReservationServiceAsyncClient: The constructed client. + """ + return ReservationServiceClient.from_service_account_info.__func__(ReservationServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ReservationServiceAsyncClient: The constructed client. + """ + return ReservationServiceClient.from_service_account_file.__func__(ReservationServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ReservationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ReservationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ReservationServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ReservationServiceClient).get_transport_class, + type(ReservationServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ReservationServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the reservation service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ReservationServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ReservationServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_reservation( + self, + request: Optional[ + Union[gcbr_reservation.CreateReservationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + reservation: Optional[gcbr_reservation.Reservation] = None, + reservation_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcbr_reservation.Reservation: + r"""Creates a new reservation resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_create_reservation(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.CreateReservationRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_reservation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.CreateReservationRequest, dict]]): + The request object. The request for + [ReservationService.CreateReservation][google.cloud.bigquery.reservation.v1.ReservationService.CreateReservation]. + parent (:class:`str`): + Required. Project, location. E.g., + ``projects/myproject/locations/US`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation (:class:`google.cloud.bigquery_reservation_v1.types.Reservation`): + Definition of the new reservation to + create. + + This corresponds to the ``reservation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation_id (:class:`str`): + The reservation ID. It must only + contain lower case alphanumeric + characters or dashes. It must start with + a letter and must not end with a dash. + Its maximum length is 64 characters. + + This corresponds to the ``reservation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.Reservation: + A reservation is a mechanism used to + guarantee slots to users. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, reservation, reservation_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcbr_reservation.CreateReservationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if reservation is not None: + request.reservation = reservation + if reservation_id is not None: + request.reservation_id = reservation_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_reservation, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_reservations( + self, + request: Optional[Union[reservation.ListReservationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListReservationsAsyncPager: + r"""Lists all the reservations for the project in the + specified location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_list_reservations(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.ListReservationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_reservations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.ListReservationsRequest, dict]]): + The request object. The request for + [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. + parent (:class:`str`): + Required. The parent resource name containing project + and location, e.g.: ``projects/myproject/locations/US`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListReservationsAsyncPager: + The response for + [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.ListReservationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_reservations, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListReservationsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_reservation( + self, + request: Optional[Union[reservation.GetReservationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.Reservation: + r"""Returns information about the reservation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_get_reservation(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.GetReservationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_reservation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.GetReservationRequest, dict]]): + The request object. The request for + [ReservationService.GetReservation][google.cloud.bigquery.reservation.v1.ReservationService.GetReservation]. + name (:class:`str`): + Required. Resource name of the reservation to retrieve. + E.g., + ``projects/myproject/locations/US/reservations/team1-prod`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.Reservation: + A reservation is a mechanism used to + guarantee slots to users. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.GetReservationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_reservation, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_reservation( + self, + request: Optional[Union[reservation.DeleteReservationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a reservation. Returns + ``google.rpc.Code.FAILED_PRECONDITION`` when reservation has + assignments. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_delete_reservation(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.DeleteReservationRequest( + name="name_value", + ) + + # Make the request + await client.delete_reservation(request=request) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.DeleteReservationRequest, dict]]): + The request object. The request for + [ReservationService.DeleteReservation][google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservation]. + name (:class:`str`): + Required. Resource name of the reservation to retrieve. + E.g., + ``projects/myproject/locations/US/reservations/team1-prod`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.DeleteReservationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_reservation, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_reservation( + self, + request: Optional[ + Union[gcbr_reservation.UpdateReservationRequest, dict] + ] = None, + *, + reservation: Optional[gcbr_reservation.Reservation] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcbr_reservation.Reservation: + r"""Updates an existing reservation resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_update_reservation(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.UpdateReservationRequest( + ) + + # Make the request + response = await client.update_reservation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.UpdateReservationRequest, dict]]): + The request object. The request for + [ReservationService.UpdateReservation][google.cloud.bigquery.reservation.v1.ReservationService.UpdateReservation]. + reservation (:class:`google.cloud.bigquery_reservation_v1.types.Reservation`): + Content of the reservation to update. + This corresponds to the ``reservation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Standard field mask for the set of + fields to be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.Reservation: + A reservation is a mechanism used to + guarantee slots to users. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([reservation, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcbr_reservation.UpdateReservationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if reservation is not None: + request.reservation = reservation + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_reservation, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("reservation.name", request.reservation.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_capacity_commitment( + self, + request: Optional[ + Union[reservation.CreateCapacityCommitmentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + capacity_commitment: Optional[reservation.CapacityCommitment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.CapacityCommitment: + r"""Creates a new capacity commitment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_create_capacity_commitment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.CreateCapacityCommitmentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_capacity_commitment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.CreateCapacityCommitmentRequest, dict]]): + The request object. The request for + [ReservationService.CreateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.CreateCapacityCommitment]. + parent (:class:`str`): + Required. Resource name of the parent reservation. E.g., + ``projects/myproject/locations/US`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + capacity_commitment (:class:`google.cloud.bigquery_reservation_v1.types.CapacityCommitment`): + Content of the capacity commitment to + create. + + This corresponds to the ``capacity_commitment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.CapacityCommitment: + Capacity commitment is a way to + purchase compute capacity for BigQuery + jobs (in the form of slots) with some + committed period of usage. Annual + commitments renew by default. + Commitments can be removed after their + commitment end time passes. + + In order to remove annual commitment, + its plan needs to be changed to monthly + or flex first. + + A capacity commitment resource exists as + a child resource of the admin project. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, capacity_commitment]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.CreateCapacityCommitmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if capacity_commitment is not None: + request.capacity_commitment = capacity_commitment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_capacity_commitment, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_capacity_commitments( + self, + request: Optional[ + Union[reservation.ListCapacityCommitmentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCapacityCommitmentsAsyncPager: + r"""Lists all the capacity commitments for the admin + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_list_capacity_commitments(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.ListCapacityCommitmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_capacity_commitments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsRequest, dict]]): + The request object. The request for + [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. + parent (:class:`str`): + Required. Resource name of the parent reservation. E.g., + ``projects/myproject/locations/US`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListCapacityCommitmentsAsyncPager: + The response for + [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.ListCapacityCommitmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_capacity_commitments, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListCapacityCommitmentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_capacity_commitment( + self, + request: Optional[Union[reservation.GetCapacityCommitmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.CapacityCommitment: + r"""Returns information about the capacity commitment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_get_capacity_commitment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.GetCapacityCommitmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_capacity_commitment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.GetCapacityCommitmentRequest, dict]]): + The request object. The request for + [ReservationService.GetCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.GetCapacityCommitment]. + name (:class:`str`): + Required. Resource name of the capacity commitment to + retrieve. E.g., + ``projects/myproject/locations/US/capacityCommitments/123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.CapacityCommitment: + Capacity commitment is a way to + purchase compute capacity for BigQuery + jobs (in the form of slots) with some + committed period of usage. Annual + commitments renew by default. + Commitments can be removed after their + commitment end time passes. + + In order to remove annual commitment, + its plan needs to be changed to monthly + or flex first. + + A capacity commitment resource exists as + a child resource of the admin project. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.GetCapacityCommitmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_capacity_commitment, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_capacity_commitment( + self, + request: Optional[ + Union[reservation.DeleteCapacityCommitmentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a capacity commitment. Attempting to delete capacity + commitment before its commitment_end_time will fail with the + error code ``google.rpc.Code.FAILED_PRECONDITION``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_delete_capacity_commitment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.DeleteCapacityCommitmentRequest( + name="name_value", + ) + + # Make the request + await client.delete_capacity_commitment(request=request) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.DeleteCapacityCommitmentRequest, dict]]): + The request object. The request for + [ReservationService.DeleteCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteCapacityCommitment]. + name (:class:`str`): + Required. Resource name of the capacity commitment to + delete. E.g., + ``projects/myproject/locations/US/capacityCommitments/123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.DeleteCapacityCommitmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_capacity_commitment, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_capacity_commitment( + self, + request: Optional[ + Union[reservation.UpdateCapacityCommitmentRequest, dict] + ] = None, + *, + capacity_commitment: Optional[reservation.CapacityCommitment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.CapacityCommitment: + r"""Updates an existing capacity commitment. + + Only ``plan`` and ``renewal_plan`` fields can be updated. + + Plan can only be changed to a plan of a longer commitment + period. Attempting to change to a plan with shorter commitment + period will fail with the error code + ``google.rpc.Code.FAILED_PRECONDITION``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_update_capacity_commitment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.UpdateCapacityCommitmentRequest( + ) + + # Make the request + response = await client.update_capacity_commitment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.UpdateCapacityCommitmentRequest, dict]]): + The request object. The request for + [ReservationService.UpdateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateCapacityCommitment]. + capacity_commitment (:class:`google.cloud.bigquery_reservation_v1.types.CapacityCommitment`): + Content of the capacity commitment to + update. + + This corresponds to the ``capacity_commitment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Standard field mask for the set of + fields to be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.CapacityCommitment: + Capacity commitment is a way to + purchase compute capacity for BigQuery + jobs (in the form of slots) with some + committed period of usage. Annual + commitments renew by default. + Commitments can be removed after their + commitment end time passes. + + In order to remove annual commitment, + its plan needs to be changed to monthly + or flex first. + + A capacity commitment resource exists as + a child resource of the admin project. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([capacity_commitment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.UpdateCapacityCommitmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if capacity_commitment is not None: + request.capacity_commitment = capacity_commitment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_capacity_commitment, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("capacity_commitment.name", request.capacity_commitment.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def split_capacity_commitment( + self, + request: Optional[ + Union[reservation.SplitCapacityCommitmentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + slot_count: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.SplitCapacityCommitmentResponse: + r"""Splits capacity commitment to two commitments of the same plan + and ``commitment_end_time``. + + A common use case is to enable downgrading commitments. + + For example, in order to downgrade from 10000 slots to 8000, you + might split a 10000 capacity commitment into commitments of 2000 + and 8000. Then, you delete the first one after the commitment + end time passes. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_split_capacity_commitment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.SplitCapacityCommitmentRequest( + name="name_value", + ) + + # Make the request + response = await client.split_capacity_commitment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.SplitCapacityCommitmentRequest, dict]]): + The request object. The request for + [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. + name (:class:`str`): + Required. The resource name e.g.,: + ``projects/myproject/locations/US/capacityCommitments/123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + slot_count (:class:`int`): + Number of slots in the capacity + commitment after the split. + + This corresponds to the ``slot_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.SplitCapacityCommitmentResponse: + The response for + [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, slot_count]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.SplitCapacityCommitmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if slot_count is not None: + request.slot_count = slot_count + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.split_capacity_commitment, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def merge_capacity_commitments( + self, + request: Optional[ + Union[reservation.MergeCapacityCommitmentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + capacity_commitment_ids: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.CapacityCommitment: + r"""Merges capacity commitments of the same plan into a single + commitment. + + The resulting capacity commitment has the greater + commitment_end_time out of the to-be-merged capacity + commitments. + + Attempting to merge capacity commitments of different plan will + fail with the error code + ``google.rpc.Code.FAILED_PRECONDITION``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_merge_capacity_commitments(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.MergeCapacityCommitmentsRequest( + ) + + # Make the request + response = await client.merge_capacity_commitments(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.MergeCapacityCommitmentsRequest, dict]]): + The request object. The request for + [ReservationService.MergeCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.MergeCapacityCommitments]. + parent (:class:`str`): + Parent resource that identifies admin project and + location e.g., ``projects/myproject/locations/us`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + capacity_commitment_ids (:class:`MutableSequence[str]`): + Ids of capacity commitments to merge. + These capacity commitments must exist + under admin project and location + specified in the parent. + ID is the last portion of capacity + commitment name e.g., 'abc' for + projects/myproject/locations/US/capacityCommitments/abc + + This corresponds to the ``capacity_commitment_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.CapacityCommitment: + Capacity commitment is a way to + purchase compute capacity for BigQuery + jobs (in the form of slots) with some + committed period of usage. Annual + commitments renew by default. + Commitments can be removed after their + commitment end time passes. + + In order to remove annual commitment, + its plan needs to be changed to monthly + or flex first. + + A capacity commitment resource exists as + a child resource of the admin project. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, capacity_commitment_ids]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.MergeCapacityCommitmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if capacity_commitment_ids: + request.capacity_commitment_ids.extend(capacity_commitment_ids) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.merge_capacity_commitments, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_assignment( + self, + request: Optional[Union[reservation.CreateAssignmentRequest, dict]] = None, + *, + parent: Optional[str] = None, + assignment: Optional[reservation.Assignment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.Assignment: + r"""Creates an assignment object which allows the given project to + submit jobs of a certain type using slots from the specified + reservation. + + Currently a resource (project, folder, organization) can only + have one assignment per each (job_type, location) combination, + and that reservation will be used for all jobs of the matching + type. + + Different assignments can be created on different levels of the + projects, folders or organization hierarchy. During query + execution, the assignment is looked up at the project, folder + and organization levels in that order. The first assignment + found is applied to the query. + + When creating assignments, it does not matter if other + assignments exist at higher levels. + + Example: + + - The organization ``organizationA`` contains two projects, + ``project1`` and ``project2``. + - Assignments for all three entities (``organizationA``, + ``project1``, and ``project2``) could all be created and + mapped to the same or different reservations. + + "None" assignments represent an absence of the assignment. + Projects assigned to None use on-demand pricing. To create a + "None" assignment, use "none" as a reservation_id in the parent. + Example parent: + ``projects/myproject/locations/US/reservations/none``. + + Returns ``google.rpc.Code.PERMISSION_DENIED`` if user does not + have 'bigquery.admin' permissions on the project using the + reservation and the project that owns this reservation. + + Returns ``google.rpc.Code.INVALID_ARGUMENT`` when location of + the assignment does not match location of the reservation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_create_assignment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.CreateAssignmentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_assignment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.CreateAssignmentRequest, dict]]): + The request object. The request for + [ReservationService.CreateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.CreateAssignment]. + Note: "bigquery.reservationAssignments.create" + permission is required on the related assignee. + parent (:class:`str`): + Required. The parent resource name of the assignment + E.g. + ``projects/myproject/locations/US/reservations/team1-prod`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + assignment (:class:`google.cloud.bigquery_reservation_v1.types.Assignment`): + Assignment resource to create. + This corresponds to the ``assignment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.Assignment: + An assignment allows a project to + submit jobs of a certain type using + slots from the specified reservation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, assignment]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.CreateAssignmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if assignment is not None: + request.assignment = assignment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_assignment, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_assignments( + self, + request: Optional[Union[reservation.ListAssignmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAssignmentsAsyncPager: + r"""Lists assignments. + + Only explicitly created assignments will be returned. + + Example: + + - Organization ``organizationA`` contains two projects, + ``project1`` and ``project2``. + - Reservation ``res1`` exists and was created previously. + - CreateAssignment was used previously to define the following + associations between entities and reservations: + ```` and ```` + + In this example, ListAssignments will just return the above two + assignments for reservation ``res1``, and no expansion/merge + will happen. + + The wildcard "-" can be used for reservations in the request. In + that case all assignments belongs to the specified project and + location will be listed. + + **Note** "-" cannot be used for projects nor locations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_list_assignments(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.ListAssignmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_assignments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.ListAssignmentsRequest, dict]]): + The request object. The request for + [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. + parent (:class:`str`): + Required. The parent resource name e.g.: + + ``projects/myproject/locations/US/reservations/team1-prod`` + + Or: + + ``projects/myproject/locations/US/reservations/-`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListAssignmentsAsyncPager: + The response for + [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.ListAssignmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_assignments, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAssignmentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_assignment( + self, + request: Optional[Union[reservation.DeleteAssignmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a assignment. No expansion will happen. + + Example: + + - Organization ``organizationA`` contains two projects, + ``project1`` and ``project2``. + - Reservation ``res1`` exists and was created previously. + - CreateAssignment was used previously to define the following + associations between entities and reservations: + ```` and ```` + + In this example, deletion of the ```` + assignment won't affect the other assignment + ````. After said deletion, queries from + ``project1`` will still use ``res1`` while queries from + ``project2`` will switch to use on-demand mode. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_delete_assignment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.DeleteAssignmentRequest( + name="name_value", + ) + + # Make the request + await client.delete_assignment(request=request) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.DeleteAssignmentRequest, dict]]): + The request object. The request for + [ReservationService.DeleteAssignment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteAssignment]. + Note: "bigquery.reservationAssignments.delete" + permission is required on the related assignee. + name (:class:`str`): + Required. Name of the resource, e.g. + ``projects/myproject/locations/US/reservations/team1-prod/assignments/123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.DeleteAssignmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_assignment, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def search_assignments( + self, + request: Optional[Union[reservation.SearchAssignmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchAssignmentsAsyncPager: + r"""Deprecated: Looks up assignments for a specified resource for a + particular region. If the request is about a project: + + 1. Assignments created on the project will be returned if they + exist. + 2. Otherwise assignments created on the closest ancestor will be + returned. + 3. Assignments for different JobTypes will all be returned. + + The same logic applies if the request is about a folder. + + If the request is about an organization, then assignments + created on the organization will be returned (organization + doesn't have ancestors). + + Comparing to ListAssignments, there are some behavior + differences: + + 1. permission on the assignee will be verified in this API. + 2. Hierarchy lookup (project->folder->organization) happens in + this API. + 3. Parent here is ``projects/*/locations/*``, instead of + ``projects/*/locations/*reservations/*``. + + **Note** "-" cannot be used for projects nor locations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_search_assignments(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.SearchAssignmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.search_assignments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.SearchAssignmentsRequest, dict]]): + The request object. The request for + [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. + Note: "bigquery.reservationAssignments.search" + permission is required on the related assignee. + parent (:class:`str`): + Required. The resource name of the admin + project(containing project and location), e.g.: + ``projects/myproject/locations/US``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (:class:`str`): + Please specify resource name as assignee in the query. + + Examples: + + - ``assignee=projects/myproject`` + - ``assignee=folders/123`` + - ``assignee=organizations/456`` + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.SearchAssignmentsAsyncPager: + The response for + [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + warnings.warn( + "ReservationServiceAsyncClient.search_assignments is deprecated", + DeprecationWarning, + ) + + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, query]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.SearchAssignmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_assignments, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchAssignmentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def search_all_assignments( + self, + request: Optional[Union[reservation.SearchAllAssignmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchAllAssignmentsAsyncPager: + r"""Looks up assignments for a specified resource for a particular + region. If the request is about a project: + + 1. Assignments created on the project will be returned if they + exist. + 2. Otherwise assignments created on the closest ancestor will be + returned. + 3. Assignments for different JobTypes will all be returned. + + The same logic applies if the request is about a folder. + + If the request is about an organization, then assignments + created on the organization will be returned (organization + doesn't have ancestors). + + Comparing to ListAssignments, there are some behavior + differences: + + 1. permission on the assignee will be verified in this API. + 2. Hierarchy lookup (project->folder->organization) happens in + this API. + 3. Parent here is ``projects/*/locations/*``, instead of + ``projects/*/locations/*reservations/*``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_search_all_assignments(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.SearchAllAssignmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.search_all_assignments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsRequest, dict]]): + The request object. The request for + [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. + Note: "bigquery.reservationAssignments.search" + permission is required on the related assignee. + parent (:class:`str`): + Required. The resource name with location (project name + could be the wildcard '-'), e.g.: + ``projects/-/locations/US``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (:class:`str`): + Please specify resource name as assignee in the query. + + Examples: + + - ``assignee=projects/myproject`` + - ``assignee=folders/123`` + - ``assignee=organizations/456`` + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.SearchAllAssignmentsAsyncPager: + The response for + [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, query]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.SearchAllAssignmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_all_assignments, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchAllAssignmentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def move_assignment( + self, + request: Optional[Union[reservation.MoveAssignmentRequest, dict]] = None, + *, + name: Optional[str] = None, + destination_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.Assignment: + r"""Moves an assignment under a new reservation. + + This differs from removing an existing assignment and + recreating a new one by providing a transactional change + that ensures an assignee always has an associated + reservation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_move_assignment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.MoveAssignmentRequest( + name="name_value", + ) + + # Make the request + response = await client.move_assignment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.MoveAssignmentRequest, dict]]): + The request object. The request for + [ReservationService.MoveAssignment][google.cloud.bigquery.reservation.v1.ReservationService.MoveAssignment]. + + **Note**: "bigquery.reservationAssignments.create" + permission is required on the destination_id. + + **Note**: "bigquery.reservationAssignments.create" and + "bigquery.reservationAssignments.delete" permission are + required on the related assignee. + name (:class:`str`): + Required. The resource name of the assignment, e.g. + ``projects/myproject/locations/US/reservations/team1-prod/assignments/123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + destination_id (:class:`str`): + The new reservation ID, e.g.: + ``projects/myotherproject/locations/US/reservations/team2-prod`` + + This corresponds to the ``destination_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.Assignment: + An assignment allows a project to + submit jobs of a certain type using + slots from the specified reservation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, destination_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.MoveAssignmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if destination_id is not None: + request.destination_id = destination_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.move_assignment, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_assignment( + self, + request: Optional[Union[reservation.UpdateAssignmentRequest, dict]] = None, + *, + assignment: Optional[reservation.Assignment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.Assignment: + r"""Updates an existing assignment. + + Only the ``priority`` field can be updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_update_assignment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.UpdateAssignmentRequest( + ) + + # Make the request + response = await client.update_assignment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.UpdateAssignmentRequest, dict]]): + The request object. The request for + [ReservationService.UpdateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateAssignment]. + assignment (:class:`google.cloud.bigquery_reservation_v1.types.Assignment`): + Content of the assignment to update. + This corresponds to the ``assignment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Standard field mask for the set of + fields to be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.Assignment: + An assignment allows a project to + submit jobs of a certain type using + slots from the specified reservation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([assignment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.UpdateAssignmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if assignment is not None: + request.assignment = assignment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_assignment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("assignment.name", request.assignment.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_bi_reservation( + self, + request: Optional[Union[reservation.GetBiReservationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.BiReservation: + r"""Retrieves a BI reservation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_get_bi_reservation(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.GetBiReservationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_bi_reservation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.GetBiReservationRequest, dict]]): + The request object. A request to get a singleton BI + reservation. + name (:class:`str`): + Required. Name of the requested reservation, for + example: + ``projects/{project_id}/locations/{location_id}/biReservation`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.BiReservation: + Represents a BI Reservation. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.GetBiReservationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_bi_reservation, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_bi_reservation( + self, + request: Optional[Union[reservation.UpdateBiReservationRequest, dict]] = None, + *, + bi_reservation: Optional[reservation.BiReservation] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.BiReservation: + r"""Updates a BI reservation. + + Only fields specified in the ``field_mask`` are updated. + + A singleton BI reservation always exists with default size 0. In + order to reserve BI capacity it needs to be updated to an amount + greater than 0. In order to release BI capacity reservation size + must be set to 0. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_update_bi_reservation(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.UpdateBiReservationRequest( + ) + + # Make the request + response = await client.update_bi_reservation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.UpdateBiReservationRequest, dict]]): + The request object. A request to update a BI reservation. + bi_reservation (:class:`google.cloud.bigquery_reservation_v1.types.BiReservation`): + A reservation to update. + This corresponds to the ``bi_reservation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + A list of fields to be updated in + this request. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.BiReservation: + Represents a BI Reservation. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([bi_reservation, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = reservation.UpdateBiReservationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if bi_reservation is not None: + request.bi_reservation = bi_reservation + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_bi_reservation, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("bi_reservation.name", request.bi_reservation.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ReservationServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ReservationServiceAsyncClient",) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py new file mode 100644 index 000000000000..a3c785374782 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py @@ -0,0 +1,3139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery_reservation_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.cloud.bigquery_reservation_v1.services.reservation_service import pagers +from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation +from google.cloud.bigquery_reservation_v1.types import reservation + +from .transports.base import DEFAULT_CLIENT_INFO, ReservationServiceTransport +from .transports.grpc import ReservationServiceGrpcTransport +from .transports.grpc_asyncio import ReservationServiceGrpcAsyncIOTransport +from .transports.rest import ReservationServiceRestTransport + + +class ReservationServiceClientMeta(type): + """Metaclass for the ReservationService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ReservationServiceTransport]] + _transport_registry["grpc"] = ReservationServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ReservationServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ReservationServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ReservationServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ReservationServiceClient(metaclass=ReservationServiceClientMeta): + """This API allows users to manage their BigQuery reservations. + + A reservation provides computational resource guarantees, in the + form of `slots `__, to + users. A slot is a unit of computational power in BigQuery, and + serves as the basic unit of parallelism. In a scan of a + multi-partitioned table, a single slot operates on a single + partition of the table. A reservation resource exists as a child + resource of the admin project and location, e.g.: + ``projects/myproject/locations/US/reservations/reservationName``. + + A capacity commitment is a way to purchase compute capacity for + BigQuery jobs (in the form of slots) with some committed period of + usage. A capacity commitment resource exists as a child resource of + the admin project and location, e.g.: + ``projects/myproject/locations/US/capacityCommitments/id``. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "bigqueryreservation.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ReservationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ReservationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ReservationServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ReservationServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def assignment_path( + project: str, + location: str, + reservation: str, + assignment: str, + ) -> str: + """Returns a fully-qualified assignment string.""" + return "projects/{project}/locations/{location}/reservations/{reservation}/assignments/{assignment}".format( + project=project, + location=location, + reservation=reservation, + assignment=assignment, + ) + + @staticmethod + def parse_assignment_path(path: str) -> Dict[str, str]: + """Parses a assignment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/reservations/(?P.+?)/assignments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def bi_reservation_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified bi_reservation string.""" + return "projects/{project}/locations/{location}/biReservation".format( + project=project, + location=location, + ) + + @staticmethod + def parse_bi_reservation_path(path: str) -> Dict[str, str]: + """Parses a bi_reservation path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/biReservation$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def capacity_commitment_path( + project: str, + location: str, + capacity_commitment: str, + ) -> str: + """Returns a fully-qualified capacity_commitment string.""" + return "projects/{project}/locations/{location}/capacityCommitments/{capacity_commitment}".format( + project=project, + location=location, + capacity_commitment=capacity_commitment, + ) + + @staticmethod + def parse_capacity_commitment_path(path: str) -> Dict[str, str]: + """Parses a capacity_commitment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/capacityCommitments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def reservation_path( + project: str, + location: str, + reservation: str, + ) -> str: + """Returns a fully-qualified reservation string.""" + return ( + "projects/{project}/locations/{location}/reservations/{reservation}".format( + project=project, + location=location, + reservation=reservation, + ) + ) + + @staticmethod + def parse_reservation_path(path: str) -> Dict[str, str]: + """Parses a reservation path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/reservations/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ReservationServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the reservation service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ReservationServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ReservationServiceTransport): + # transport is a ReservationServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_reservation( + self, + request: Optional[ + Union[gcbr_reservation.CreateReservationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + reservation: Optional[gcbr_reservation.Reservation] = None, + reservation_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcbr_reservation.Reservation: + r"""Creates a new reservation resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_create_reservation(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.CreateReservationRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_reservation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.CreateReservationRequest, dict]): + The request object. The request for + [ReservationService.CreateReservation][google.cloud.bigquery.reservation.v1.ReservationService.CreateReservation]. + parent (str): + Required. Project, location. E.g., + ``projects/myproject/locations/US`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation (google.cloud.bigquery_reservation_v1.types.Reservation): + Definition of the new reservation to + create. + + This corresponds to the ``reservation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reservation_id (str): + The reservation ID. It must only + contain lower case alphanumeric + characters or dashes. It must start with + a letter and must not end with a dash. + Its maximum length is 64 characters. + + This corresponds to the ``reservation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.Reservation: + A reservation is a mechanism used to + guarantee slots to users. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, reservation, reservation_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcbr_reservation.CreateReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcbr_reservation.CreateReservationRequest): + request = gcbr_reservation.CreateReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if reservation is not None: + request.reservation = reservation + if reservation_id is not None: + request.reservation_id = reservation_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_reservation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_reservations( + self, + request: Optional[Union[reservation.ListReservationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListReservationsPager: + r"""Lists all the reservations for the project in the + specified location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_list_reservations(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.ListReservationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_reservations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.ListReservationsRequest, dict]): + The request object. The request for + [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. + parent (str): + Required. The parent resource name containing project + and location, e.g.: ``projects/myproject/locations/US`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListReservationsPager: + The response for + [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.ListReservationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.ListReservationsRequest): + request = reservation.ListReservationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_reservations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListReservationsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_reservation( + self, + request: Optional[Union[reservation.GetReservationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.Reservation: + r"""Returns information about the reservation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_get_reservation(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.GetReservationRequest( + name="name_value", + ) + + # Make the request + response = client.get_reservation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.GetReservationRequest, dict]): + The request object. The request for + [ReservationService.GetReservation][google.cloud.bigquery.reservation.v1.ReservationService.GetReservation]. + name (str): + Required. Resource name of the reservation to retrieve. + E.g., + ``projects/myproject/locations/US/reservations/team1-prod`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.Reservation: + A reservation is a mechanism used to + guarantee slots to users. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.GetReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.GetReservationRequest): + request = reservation.GetReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_reservation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_reservation( + self, + request: Optional[Union[reservation.DeleteReservationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a reservation. Returns + ``google.rpc.Code.FAILED_PRECONDITION`` when reservation has + assignments. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_delete_reservation(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.DeleteReservationRequest( + name="name_value", + ) + + # Make the request + client.delete_reservation(request=request) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.DeleteReservationRequest, dict]): + The request object. The request for + [ReservationService.DeleteReservation][google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservation]. + name (str): + Required. Resource name of the reservation to retrieve. + E.g., + ``projects/myproject/locations/US/reservations/team1-prod`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.DeleteReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.DeleteReservationRequest): + request = reservation.DeleteReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_reservation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_reservation( + self, + request: Optional[ + Union[gcbr_reservation.UpdateReservationRequest, dict] + ] = None, + *, + reservation: Optional[gcbr_reservation.Reservation] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcbr_reservation.Reservation: + r"""Updates an existing reservation resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_update_reservation(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.UpdateReservationRequest( + ) + + # Make the request + response = client.update_reservation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.UpdateReservationRequest, dict]): + The request object. The request for + [ReservationService.UpdateReservation][google.cloud.bigquery.reservation.v1.ReservationService.UpdateReservation]. + reservation (google.cloud.bigquery_reservation_v1.types.Reservation): + Content of the reservation to update. + This corresponds to the ``reservation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Standard field mask for the set of + fields to be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.Reservation: + A reservation is a mechanism used to + guarantee slots to users. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([reservation, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcbr_reservation.UpdateReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcbr_reservation.UpdateReservationRequest): + request = gcbr_reservation.UpdateReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if reservation is not None: + request.reservation = reservation + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_reservation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("reservation.name", request.reservation.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_capacity_commitment( + self, + request: Optional[ + Union[reservation.CreateCapacityCommitmentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + capacity_commitment: Optional[reservation.CapacityCommitment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.CapacityCommitment: + r"""Creates a new capacity commitment resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_create_capacity_commitment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.CreateCapacityCommitmentRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_capacity_commitment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.CreateCapacityCommitmentRequest, dict]): + The request object. The request for + [ReservationService.CreateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.CreateCapacityCommitment]. + parent (str): + Required. Resource name of the parent reservation. E.g., + ``projects/myproject/locations/US`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + capacity_commitment (google.cloud.bigquery_reservation_v1.types.CapacityCommitment): + Content of the capacity commitment to + create. + + This corresponds to the ``capacity_commitment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.CapacityCommitment: + Capacity commitment is a way to + purchase compute capacity for BigQuery + jobs (in the form of slots) with some + committed period of usage. Annual + commitments renew by default. + Commitments can be removed after their + commitment end time passes. + + In order to remove annual commitment, + its plan needs to be changed to monthly + or flex first. + + A capacity commitment resource exists as + a child resource of the admin project. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, capacity_commitment]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.CreateCapacityCommitmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.CreateCapacityCommitmentRequest): + request = reservation.CreateCapacityCommitmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if capacity_commitment is not None: + request.capacity_commitment = capacity_commitment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_capacity_commitment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_capacity_commitments( + self, + request: Optional[ + Union[reservation.ListCapacityCommitmentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListCapacityCommitmentsPager: + r"""Lists all the capacity commitments for the admin + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_list_capacity_commitments(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.ListCapacityCommitmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_capacity_commitments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsRequest, dict]): + The request object. The request for + [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. + parent (str): + Required. Resource name of the parent reservation. E.g., + ``projects/myproject/locations/US`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListCapacityCommitmentsPager: + The response for + [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.ListCapacityCommitmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.ListCapacityCommitmentsRequest): + request = reservation.ListCapacityCommitmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_capacity_commitments + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCapacityCommitmentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_capacity_commitment( + self, + request: Optional[Union[reservation.GetCapacityCommitmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.CapacityCommitment: + r"""Returns information about the capacity commitment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_get_capacity_commitment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.GetCapacityCommitmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_capacity_commitment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.GetCapacityCommitmentRequest, dict]): + The request object. The request for + [ReservationService.GetCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.GetCapacityCommitment]. + name (str): + Required. Resource name of the capacity commitment to + retrieve. E.g., + ``projects/myproject/locations/US/capacityCommitments/123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.CapacityCommitment: + Capacity commitment is a way to + purchase compute capacity for BigQuery + jobs (in the form of slots) with some + committed period of usage. Annual + commitments renew by default. + Commitments can be removed after their + commitment end time passes. + + In order to remove annual commitment, + its plan needs to be changed to monthly + or flex first. + + A capacity commitment resource exists as + a child resource of the admin project. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.GetCapacityCommitmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.GetCapacityCommitmentRequest): + request = reservation.GetCapacityCommitmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_capacity_commitment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_capacity_commitment( + self, + request: Optional[ + Union[reservation.DeleteCapacityCommitmentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a capacity commitment. Attempting to delete capacity + commitment before its commitment_end_time will fail with the + error code ``google.rpc.Code.FAILED_PRECONDITION``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_delete_capacity_commitment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.DeleteCapacityCommitmentRequest( + name="name_value", + ) + + # Make the request + client.delete_capacity_commitment(request=request) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.DeleteCapacityCommitmentRequest, dict]): + The request object. The request for + [ReservationService.DeleteCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteCapacityCommitment]. + name (str): + Required. Resource name of the capacity commitment to + delete. E.g., + ``projects/myproject/locations/US/capacityCommitments/123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.DeleteCapacityCommitmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.DeleteCapacityCommitmentRequest): + request = reservation.DeleteCapacityCommitmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_capacity_commitment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_capacity_commitment( + self, + request: Optional[ + Union[reservation.UpdateCapacityCommitmentRequest, dict] + ] = None, + *, + capacity_commitment: Optional[reservation.CapacityCommitment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.CapacityCommitment: + r"""Updates an existing capacity commitment. + + Only ``plan`` and ``renewal_plan`` fields can be updated. + + Plan can only be changed to a plan of a longer commitment + period. Attempting to change to a plan with shorter commitment + period will fail with the error code + ``google.rpc.Code.FAILED_PRECONDITION``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_update_capacity_commitment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.UpdateCapacityCommitmentRequest( + ) + + # Make the request + response = client.update_capacity_commitment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.UpdateCapacityCommitmentRequest, dict]): + The request object. The request for + [ReservationService.UpdateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateCapacityCommitment]. + capacity_commitment (google.cloud.bigquery_reservation_v1.types.CapacityCommitment): + Content of the capacity commitment to + update. + + This corresponds to the ``capacity_commitment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Standard field mask for the set of + fields to be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.CapacityCommitment: + Capacity commitment is a way to + purchase compute capacity for BigQuery + jobs (in the form of slots) with some + committed period of usage. Annual + commitments renew by default. + Commitments can be removed after their + commitment end time passes. + + In order to remove annual commitment, + its plan needs to be changed to monthly + or flex first. + + A capacity commitment resource exists as + a child resource of the admin project. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([capacity_commitment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.UpdateCapacityCommitmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.UpdateCapacityCommitmentRequest): + request = reservation.UpdateCapacityCommitmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if capacity_commitment is not None: + request.capacity_commitment = capacity_commitment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_capacity_commitment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("capacity_commitment.name", request.capacity_commitment.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def split_capacity_commitment( + self, + request: Optional[ + Union[reservation.SplitCapacityCommitmentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + slot_count: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.SplitCapacityCommitmentResponse: + r"""Splits capacity commitment to two commitments of the same plan + and ``commitment_end_time``. + + A common use case is to enable downgrading commitments. + + For example, in order to downgrade from 10000 slots to 8000, you + might split a 10000 capacity commitment into commitments of 2000 + and 8000. Then, you delete the first one after the commitment + end time passes. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_split_capacity_commitment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.SplitCapacityCommitmentRequest( + name="name_value", + ) + + # Make the request + response = client.split_capacity_commitment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.SplitCapacityCommitmentRequest, dict]): + The request object. The request for + [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. + name (str): + Required. The resource name e.g.,: + ``projects/myproject/locations/US/capacityCommitments/123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + slot_count (int): + Number of slots in the capacity + commitment after the split. + + This corresponds to the ``slot_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.SplitCapacityCommitmentResponse: + The response for + [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, slot_count]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.SplitCapacityCommitmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.SplitCapacityCommitmentRequest): + request = reservation.SplitCapacityCommitmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if slot_count is not None: + request.slot_count = slot_count + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.split_capacity_commitment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def merge_capacity_commitments( + self, + request: Optional[ + Union[reservation.MergeCapacityCommitmentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + capacity_commitment_ids: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.CapacityCommitment: + r"""Merges capacity commitments of the same plan into a single + commitment. + + The resulting capacity commitment has the greater + commitment_end_time out of the to-be-merged capacity + commitments. + + Attempting to merge capacity commitments of different plan will + fail with the error code + ``google.rpc.Code.FAILED_PRECONDITION``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_merge_capacity_commitments(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.MergeCapacityCommitmentsRequest( + ) + + # Make the request + response = client.merge_capacity_commitments(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.MergeCapacityCommitmentsRequest, dict]): + The request object. The request for + [ReservationService.MergeCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.MergeCapacityCommitments]. + parent (str): + Parent resource that identifies admin project and + location e.g., ``projects/myproject/locations/us`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + capacity_commitment_ids (MutableSequence[str]): + Ids of capacity commitments to merge. + These capacity commitments must exist + under admin project and location + specified in the parent. + ID is the last portion of capacity + commitment name e.g., 'abc' for + projects/myproject/locations/US/capacityCommitments/abc + + This corresponds to the ``capacity_commitment_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.CapacityCommitment: + Capacity commitment is a way to + purchase compute capacity for BigQuery + jobs (in the form of slots) with some + committed period of usage. Annual + commitments renew by default. + Commitments can be removed after their + commitment end time passes. + + In order to remove annual commitment, + its plan needs to be changed to monthly + or flex first. + + A capacity commitment resource exists as + a child resource of the admin project. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, capacity_commitment_ids]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.MergeCapacityCommitmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.MergeCapacityCommitmentsRequest): + request = reservation.MergeCapacityCommitmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if capacity_commitment_ids is not None: + request.capacity_commitment_ids = capacity_commitment_ids + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.merge_capacity_commitments + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_assignment( + self, + request: Optional[Union[reservation.CreateAssignmentRequest, dict]] = None, + *, + parent: Optional[str] = None, + assignment: Optional[reservation.Assignment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.Assignment: + r"""Creates an assignment object which allows the given project to + submit jobs of a certain type using slots from the specified + reservation. + + Currently a resource (project, folder, organization) can only + have one assignment per each (job_type, location) combination, + and that reservation will be used for all jobs of the matching + type. + + Different assignments can be created on different levels of the + projects, folders or organization hierarchy. During query + execution, the assignment is looked up at the project, folder + and organization levels in that order. The first assignment + found is applied to the query. + + When creating assignments, it does not matter if other + assignments exist at higher levels. + + Example: + + - The organization ``organizationA`` contains two projects, + ``project1`` and ``project2``. + - Assignments for all three entities (``organizationA``, + ``project1``, and ``project2``) could all be created and + mapped to the same or different reservations. + + "None" assignments represent an absence of the assignment. + Projects assigned to None use on-demand pricing. To create a + "None" assignment, use "none" as a reservation_id in the parent. + Example parent: + ``projects/myproject/locations/US/reservations/none``. + + Returns ``google.rpc.Code.PERMISSION_DENIED`` if user does not + have 'bigquery.admin' permissions on the project using the + reservation and the project that owns this reservation. + + Returns ``google.rpc.Code.INVALID_ARGUMENT`` when location of + the assignment does not match location of the reservation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_create_assignment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.CreateAssignmentRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_assignment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.CreateAssignmentRequest, dict]): + The request object. The request for + [ReservationService.CreateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.CreateAssignment]. + Note: "bigquery.reservationAssignments.create" + permission is required on the related assignee. + parent (str): + Required. The parent resource name of the assignment + E.g. + ``projects/myproject/locations/US/reservations/team1-prod`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + assignment (google.cloud.bigquery_reservation_v1.types.Assignment): + Assignment resource to create. + This corresponds to the ``assignment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.Assignment: + An assignment allows a project to + submit jobs of a certain type using + slots from the specified reservation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, assignment]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.CreateAssignmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.CreateAssignmentRequest): + request = reservation.CreateAssignmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if assignment is not None: + request.assignment = assignment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_assignment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_assignments( + self, + request: Optional[Union[reservation.ListAssignmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAssignmentsPager: + r"""Lists assignments. + + Only explicitly created assignments will be returned. + + Example: + + - Organization ``organizationA`` contains two projects, + ``project1`` and ``project2``. + - Reservation ``res1`` exists and was created previously. + - CreateAssignment was used previously to define the following + associations between entities and reservations: + ```` and ```` + + In this example, ListAssignments will just return the above two + assignments for reservation ``res1``, and no expansion/merge + will happen. + + The wildcard "-" can be used for reservations in the request. In + that case all assignments belongs to the specified project and + location will be listed. + + **Note** "-" cannot be used for projects nor locations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_list_assignments(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.ListAssignmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_assignments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.ListAssignmentsRequest, dict]): + The request object. The request for + [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. + parent (str): + Required. The parent resource name e.g.: + + ``projects/myproject/locations/US/reservations/team1-prod`` + + Or: + + ``projects/myproject/locations/US/reservations/-`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListAssignmentsPager: + The response for + [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.ListAssignmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.ListAssignmentsRequest): + request = reservation.ListAssignmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_assignments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAssignmentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_assignment( + self, + request: Optional[Union[reservation.DeleteAssignmentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a assignment. No expansion will happen. + + Example: + + - Organization ``organizationA`` contains two projects, + ``project1`` and ``project2``. + - Reservation ``res1`` exists and was created previously. + - CreateAssignment was used previously to define the following + associations between entities and reservations: + ```` and ```` + + In this example, deletion of the ```` + assignment won't affect the other assignment + ````. After said deletion, queries from + ``project1`` will still use ``res1`` while queries from + ``project2`` will switch to use on-demand mode. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_delete_assignment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.DeleteAssignmentRequest( + name="name_value", + ) + + # Make the request + client.delete_assignment(request=request) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.DeleteAssignmentRequest, dict]): + The request object. The request for + [ReservationService.DeleteAssignment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteAssignment]. + Note: "bigquery.reservationAssignments.delete" + permission is required on the related assignee. + name (str): + Required. Name of the resource, e.g. + ``projects/myproject/locations/US/reservations/team1-prod/assignments/123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.DeleteAssignmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.DeleteAssignmentRequest): + request = reservation.DeleteAssignmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_assignment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def search_assignments( + self, + request: Optional[Union[reservation.SearchAssignmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchAssignmentsPager: + r"""Deprecated: Looks up assignments for a specified resource for a + particular region. If the request is about a project: + + 1. Assignments created on the project will be returned if they + exist. + 2. Otherwise assignments created on the closest ancestor will be + returned. + 3. Assignments for different JobTypes will all be returned. + + The same logic applies if the request is about a folder. + + If the request is about an organization, then assignments + created on the organization will be returned (organization + doesn't have ancestors). + + Comparing to ListAssignments, there are some behavior + differences: + + 1. permission on the assignee will be verified in this API. + 2. Hierarchy lookup (project->folder->organization) happens in + this API. + 3. Parent here is ``projects/*/locations/*``, instead of + ``projects/*/locations/*reservations/*``. + + **Note** "-" cannot be used for projects nor locations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_search_assignments(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.SearchAssignmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.search_assignments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.SearchAssignmentsRequest, dict]): + The request object. The request for + [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. + Note: "bigquery.reservationAssignments.search" + permission is required on the related assignee. + parent (str): + Required. The resource name of the admin + project(containing project and location), e.g.: + ``projects/myproject/locations/US``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (str): + Please specify resource name as assignee in the query. + + Examples: + + - ``assignee=projects/myproject`` + - ``assignee=folders/123`` + - ``assignee=organizations/456`` + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.SearchAssignmentsPager: + The response for + [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + warnings.warn( + "ReservationServiceClient.search_assignments is deprecated", + DeprecationWarning, + ) + + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, query]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.SearchAssignmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.SearchAssignmentsRequest): + request = reservation.SearchAssignmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_assignments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchAssignmentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def search_all_assignments( + self, + request: Optional[Union[reservation.SearchAllAssignmentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchAllAssignmentsPager: + r"""Looks up assignments for a specified resource for a particular + region. If the request is about a project: + + 1. Assignments created on the project will be returned if they + exist. + 2. Otherwise assignments created on the closest ancestor will be + returned. + 3. Assignments for different JobTypes will all be returned. + + The same logic applies if the request is about a folder. + + If the request is about an organization, then assignments + created on the organization will be returned (organization + doesn't have ancestors). + + Comparing to ListAssignments, there are some behavior + differences: + + 1. permission on the assignee will be verified in this API. + 2. Hierarchy lookup (project->folder->organization) happens in + this API. + 3. Parent here is ``projects/*/locations/*``, instead of + ``projects/*/locations/*reservations/*``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_search_all_assignments(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.SearchAllAssignmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.search_all_assignments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsRequest, dict]): + The request object. The request for + [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. + Note: "bigquery.reservationAssignments.search" + permission is required on the related assignee. + parent (str): + Required. The resource name with location (project name + could be the wildcard '-'), e.g.: + ``projects/-/locations/US``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (str): + Please specify resource name as assignee in the query. + + Examples: + + - ``assignee=projects/myproject`` + - ``assignee=folders/123`` + - ``assignee=organizations/456`` + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.SearchAllAssignmentsPager: + The response for + [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, query]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.SearchAllAssignmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.SearchAllAssignmentsRequest): + request = reservation.SearchAllAssignmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_all_assignments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchAllAssignmentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def move_assignment( + self, + request: Optional[Union[reservation.MoveAssignmentRequest, dict]] = None, + *, + name: Optional[str] = None, + destination_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.Assignment: + r"""Moves an assignment under a new reservation. + + This differs from removing an existing assignment and + recreating a new one by providing a transactional change + that ensures an assignee always has an associated + reservation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_move_assignment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.MoveAssignmentRequest( + name="name_value", + ) + + # Make the request + response = client.move_assignment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.MoveAssignmentRequest, dict]): + The request object. The request for + [ReservationService.MoveAssignment][google.cloud.bigquery.reservation.v1.ReservationService.MoveAssignment]. + + **Note**: "bigquery.reservationAssignments.create" + permission is required on the destination_id. + + **Note**: "bigquery.reservationAssignments.create" and + "bigquery.reservationAssignments.delete" permission are + required on the related assignee. + name (str): + Required. The resource name of the assignment, e.g. + ``projects/myproject/locations/US/reservations/team1-prod/assignments/123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + destination_id (str): + The new reservation ID, e.g.: + ``projects/myotherproject/locations/US/reservations/team2-prod`` + + This corresponds to the ``destination_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.Assignment: + An assignment allows a project to + submit jobs of a certain type using + slots from the specified reservation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, destination_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.MoveAssignmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.MoveAssignmentRequest): + request = reservation.MoveAssignmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if destination_id is not None: + request.destination_id = destination_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move_assignment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_assignment( + self, + request: Optional[Union[reservation.UpdateAssignmentRequest, dict]] = None, + *, + assignment: Optional[reservation.Assignment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.Assignment: + r"""Updates an existing assignment. + + Only the ``priority`` field can be updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_update_assignment(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.UpdateAssignmentRequest( + ) + + # Make the request + response = client.update_assignment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.UpdateAssignmentRequest, dict]): + The request object. The request for + [ReservationService.UpdateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateAssignment]. + assignment (google.cloud.bigquery_reservation_v1.types.Assignment): + Content of the assignment to update. + This corresponds to the ``assignment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Standard field mask for the set of + fields to be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.Assignment: + An assignment allows a project to + submit jobs of a certain type using + slots from the specified reservation. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([assignment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.UpdateAssignmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.UpdateAssignmentRequest): + request = reservation.UpdateAssignmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if assignment is not None: + request.assignment = assignment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_assignment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("assignment.name", request.assignment.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_bi_reservation( + self, + request: Optional[Union[reservation.GetBiReservationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.BiReservation: + r"""Retrieves a BI reservation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_get_bi_reservation(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.GetBiReservationRequest( + name="name_value", + ) + + # Make the request + response = client.get_bi_reservation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.GetBiReservationRequest, dict]): + The request object. A request to get a singleton BI + reservation. + name (str): + Required. Name of the requested reservation, for + example: + ``projects/{project_id}/locations/{location_id}/biReservation`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.BiReservation: + Represents a BI Reservation. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.GetBiReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.GetBiReservationRequest): + request = reservation.GetBiReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_bi_reservation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_bi_reservation( + self, + request: Optional[Union[reservation.UpdateBiReservationRequest, dict]] = None, + *, + bi_reservation: Optional[reservation.BiReservation] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.BiReservation: + r"""Updates a BI reservation. + + Only fields specified in the ``field_mask`` are updated. + + A singleton BI reservation always exists with default size 0. In + order to reserve BI capacity it needs to be updated to an amount + greater than 0. In order to release BI capacity reservation size + must be set to 0. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_update_bi_reservation(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.UpdateBiReservationRequest( + ) + + # Make the request + response = client.update_bi_reservation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.UpdateBiReservationRequest, dict]): + The request object. A request to update a BI reservation. + bi_reservation (google.cloud.bigquery_reservation_v1.types.BiReservation): + A reservation to update. + This corresponds to the ``bi_reservation`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A list of fields to be updated in + this request. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.bigquery_reservation_v1.types.BiReservation: + Represents a BI Reservation. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([bi_reservation, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a reservation.UpdateBiReservationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, reservation.UpdateBiReservationRequest): + request = reservation.UpdateBiReservationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if bi_reservation is not None: + request.bi_reservation = bi_reservation + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_bi_reservation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("bi_reservation.name", request.bi_reservation.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ReservationServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ReservationServiceClient",) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/pagers.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/pagers.py new file mode 100644 index 000000000000..55fbc701b219 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/pagers.py @@ -0,0 +1,667 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.bigquery_reservation_v1.types import reservation + + +class ListReservationsPager: + """A pager for iterating through ``list_reservations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_reservation_v1.types.ListReservationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``reservations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListReservations`` requests and continue to iterate + through the ``reservations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListReservationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., reservation.ListReservationsResponse], + request: reservation.ListReservationsRequest, + response: reservation.ListReservationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_reservation_v1.types.ListReservationsRequest): + The initial request object. + response (google.cloud.bigquery_reservation_v1.types.ListReservationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = reservation.ListReservationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[reservation.ListReservationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[reservation.Reservation]: + for page in self.pages: + yield from page.reservations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListReservationsAsyncPager: + """A pager for iterating through ``list_reservations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_reservation_v1.types.ListReservationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``reservations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListReservations`` requests and continue to iterate + through the ``reservations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListReservationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[reservation.ListReservationsResponse]], + request: reservation.ListReservationsRequest, + response: reservation.ListReservationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_reservation_v1.types.ListReservationsRequest): + The initial request object. + response (google.cloud.bigquery_reservation_v1.types.ListReservationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = reservation.ListReservationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[reservation.ListReservationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[reservation.Reservation]: + async def async_generator(): + async for page in self.pages: + for response in page.reservations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCapacityCommitmentsPager: + """A pager for iterating through ``list_capacity_commitments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``capacity_commitments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCapacityCommitments`` requests and continue to iterate + through the ``capacity_commitments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., reservation.ListCapacityCommitmentsResponse], + request: reservation.ListCapacityCommitmentsRequest, + response: reservation.ListCapacityCommitmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsRequest): + The initial request object. + response (google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = reservation.ListCapacityCommitmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[reservation.ListCapacityCommitmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[reservation.CapacityCommitment]: + for page in self.pages: + yield from page.capacity_commitments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCapacityCommitmentsAsyncPager: + """A pager for iterating through ``list_capacity_commitments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``capacity_commitments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListCapacityCommitments`` requests and continue to iterate + through the ``capacity_commitments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[reservation.ListCapacityCommitmentsResponse]], + request: reservation.ListCapacityCommitmentsRequest, + response: reservation.ListCapacityCommitmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsRequest): + The initial request object. + response (google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = reservation.ListCapacityCommitmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[reservation.ListCapacityCommitmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[reservation.CapacityCommitment]: + async def async_generator(): + async for page in self.pages: + for response in page.capacity_commitments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAssignmentsPager: + """A pager for iterating through ``list_assignments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_reservation_v1.types.ListAssignmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``assignments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAssignments`` requests and continue to iterate + through the ``assignments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListAssignmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., reservation.ListAssignmentsResponse], + request: reservation.ListAssignmentsRequest, + response: reservation.ListAssignmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_reservation_v1.types.ListAssignmentsRequest): + The initial request object. + response (google.cloud.bigquery_reservation_v1.types.ListAssignmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = reservation.ListAssignmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[reservation.ListAssignmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[reservation.Assignment]: + for page in self.pages: + yield from page.assignments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAssignmentsAsyncPager: + """A pager for iterating through ``list_assignments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_reservation_v1.types.ListAssignmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``assignments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAssignments`` requests and continue to iterate + through the ``assignments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListAssignmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[reservation.ListAssignmentsResponse]], + request: reservation.ListAssignmentsRequest, + response: reservation.ListAssignmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_reservation_v1.types.ListAssignmentsRequest): + The initial request object. + response (google.cloud.bigquery_reservation_v1.types.ListAssignmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = reservation.ListAssignmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[reservation.ListAssignmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[reservation.Assignment]: + async def async_generator(): + async for page in self.pages: + for response in page.assignments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchAssignmentsPager: + """A pager for iterating through ``search_assignments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_reservation_v1.types.SearchAssignmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``assignments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchAssignments`` requests and continue to iterate + through the ``assignments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_reservation_v1.types.SearchAssignmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., reservation.SearchAssignmentsResponse], + request: reservation.SearchAssignmentsRequest, + response: reservation.SearchAssignmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_reservation_v1.types.SearchAssignmentsRequest): + The initial request object. + response (google.cloud.bigquery_reservation_v1.types.SearchAssignmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = reservation.SearchAssignmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[reservation.SearchAssignmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[reservation.Assignment]: + for page in self.pages: + yield from page.assignments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchAssignmentsAsyncPager: + """A pager for iterating through ``search_assignments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_reservation_v1.types.SearchAssignmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``assignments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchAssignments`` requests and continue to iterate + through the ``assignments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_reservation_v1.types.SearchAssignmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[reservation.SearchAssignmentsResponse]], + request: reservation.SearchAssignmentsRequest, + response: reservation.SearchAssignmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_reservation_v1.types.SearchAssignmentsRequest): + The initial request object. + response (google.cloud.bigquery_reservation_v1.types.SearchAssignmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = reservation.SearchAssignmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[reservation.SearchAssignmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[reservation.Assignment]: + async def async_generator(): + async for page in self.pages: + for response in page.assignments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchAllAssignmentsPager: + """A pager for iterating through ``search_all_assignments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``assignments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchAllAssignments`` requests and continue to iterate + through the ``assignments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., reservation.SearchAllAssignmentsResponse], + request: reservation.SearchAllAssignmentsRequest, + response: reservation.SearchAllAssignmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsRequest): + The initial request object. + response (google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = reservation.SearchAllAssignmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[reservation.SearchAllAssignmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[reservation.Assignment]: + for page in self.pages: + yield from page.assignments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchAllAssignmentsAsyncPager: + """A pager for iterating through ``search_all_assignments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``assignments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchAllAssignments`` requests and continue to iterate + through the ``assignments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[reservation.SearchAllAssignmentsResponse]], + request: reservation.SearchAllAssignmentsRequest, + response: reservation.SearchAllAssignmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsRequest): + The initial request object. + response (google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = reservation.SearchAllAssignmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[reservation.SearchAllAssignmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[reservation.Assignment]: + async def async_generator(): + async for page in self.pages: + for response in page.assignments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/__init__.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/__init__.py new file mode 100644 index 000000000000..a3bf16eac4eb --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ReservationServiceTransport +from .grpc import ReservationServiceGrpcTransport +from .grpc_asyncio import ReservationServiceGrpcAsyncIOTransport +from .rest import ReservationServiceRestInterceptor, ReservationServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[ReservationServiceTransport]] +_transport_registry["grpc"] = ReservationServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ReservationServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ReservationServiceRestTransport + +__all__ = ( + "ReservationServiceTransport", + "ReservationServiceGrpcTransport", + "ReservationServiceGrpcAsyncIOTransport", + "ReservationServiceRestTransport", + "ReservationServiceRestInterceptor", +) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/base.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/base.py new file mode 100644 index 000000000000..9a87c95ca072 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/base.py @@ -0,0 +1,566 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.bigquery_reservation_v1 import gapic_version as package_version +from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation +from google.cloud.bigquery_reservation_v1.types import reservation + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ReservationServiceTransport(abc.ABC): + """Abstract transport class for ReservationService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "bigqueryreservation.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_reservation: gapic_v1.method.wrap_method( + self.create_reservation, + default_timeout=300.0, + client_info=client_info, + ), + self.list_reservations: gapic_v1.method.wrap_method( + self.list_reservations, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_reservation: gapic_v1.method.wrap_method( + self.get_reservation, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_reservation: gapic_v1.method.wrap_method( + self.delete_reservation, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.update_reservation: gapic_v1.method.wrap_method( + self.update_reservation, + default_timeout=300.0, + client_info=client_info, + ), + self.create_capacity_commitment: gapic_v1.method.wrap_method( + self.create_capacity_commitment, + default_timeout=300.0, + client_info=client_info, + ), + self.list_capacity_commitments: gapic_v1.method.wrap_method( + self.list_capacity_commitments, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_capacity_commitment: gapic_v1.method.wrap_method( + self.get_capacity_commitment, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_capacity_commitment: gapic_v1.method.wrap_method( + self.delete_capacity_commitment, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.update_capacity_commitment: gapic_v1.method.wrap_method( + self.update_capacity_commitment, + default_timeout=300.0, + client_info=client_info, + ), + self.split_capacity_commitment: gapic_v1.method.wrap_method( + self.split_capacity_commitment, + default_timeout=300.0, + client_info=client_info, + ), + self.merge_capacity_commitments: gapic_v1.method.wrap_method( + self.merge_capacity_commitments, + default_timeout=300.0, + client_info=client_info, + ), + self.create_assignment: gapic_v1.method.wrap_method( + self.create_assignment, + default_timeout=300.0, + client_info=client_info, + ), + self.list_assignments: gapic_v1.method.wrap_method( + self.list_assignments, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_assignment: gapic_v1.method.wrap_method( + self.delete_assignment, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.search_assignments: gapic_v1.method.wrap_method( + self.search_assignments, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.search_all_assignments: gapic_v1.method.wrap_method( + self.search_all_assignments, + default_timeout=None, + client_info=client_info, + ), + self.move_assignment: gapic_v1.method.wrap_method( + self.move_assignment, + default_timeout=300.0, + client_info=client_info, + ), + self.update_assignment: gapic_v1.method.wrap_method( + self.update_assignment, + default_timeout=None, + client_info=client_info, + ), + self.get_bi_reservation: gapic_v1.method.wrap_method( + self.get_bi_reservation, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.update_bi_reservation: gapic_v1.method.wrap_method( + self.update_bi_reservation, + default_timeout=300.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_reservation( + self, + ) -> Callable[ + [gcbr_reservation.CreateReservationRequest], + Union[gcbr_reservation.Reservation, Awaitable[gcbr_reservation.Reservation]], + ]: + raise NotImplementedError() + + @property + def list_reservations( + self, + ) -> Callable[ + [reservation.ListReservationsRequest], + Union[ + reservation.ListReservationsResponse, + Awaitable[reservation.ListReservationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_reservation( + self, + ) -> Callable[ + [reservation.GetReservationRequest], + Union[reservation.Reservation, Awaitable[reservation.Reservation]], + ]: + raise NotImplementedError() + + @property + def delete_reservation( + self, + ) -> Callable[ + [reservation.DeleteReservationRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def update_reservation( + self, + ) -> Callable[ + [gcbr_reservation.UpdateReservationRequest], + Union[gcbr_reservation.Reservation, Awaitable[gcbr_reservation.Reservation]], + ]: + raise NotImplementedError() + + @property + def create_capacity_commitment( + self, + ) -> Callable[ + [reservation.CreateCapacityCommitmentRequest], + Union[ + reservation.CapacityCommitment, Awaitable[reservation.CapacityCommitment] + ], + ]: + raise NotImplementedError() + + @property + def list_capacity_commitments( + self, + ) -> Callable[ + [reservation.ListCapacityCommitmentsRequest], + Union[ + reservation.ListCapacityCommitmentsResponse, + Awaitable[reservation.ListCapacityCommitmentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_capacity_commitment( + self, + ) -> Callable[ + [reservation.GetCapacityCommitmentRequest], + Union[ + reservation.CapacityCommitment, Awaitable[reservation.CapacityCommitment] + ], + ]: + raise NotImplementedError() + + @property + def delete_capacity_commitment( + self, + ) -> Callable[ + [reservation.DeleteCapacityCommitmentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def update_capacity_commitment( + self, + ) -> Callable[ + [reservation.UpdateCapacityCommitmentRequest], + Union[ + reservation.CapacityCommitment, Awaitable[reservation.CapacityCommitment] + ], + ]: + raise NotImplementedError() + + @property + def split_capacity_commitment( + self, + ) -> Callable[ + [reservation.SplitCapacityCommitmentRequest], + Union[ + reservation.SplitCapacityCommitmentResponse, + Awaitable[reservation.SplitCapacityCommitmentResponse], + ], + ]: + raise NotImplementedError() + + @property + def merge_capacity_commitments( + self, + ) -> Callable[ + [reservation.MergeCapacityCommitmentsRequest], + Union[ + reservation.CapacityCommitment, Awaitable[reservation.CapacityCommitment] + ], + ]: + raise NotImplementedError() + + @property + def create_assignment( + self, + ) -> Callable[ + [reservation.CreateAssignmentRequest], + Union[reservation.Assignment, Awaitable[reservation.Assignment]], + ]: + raise NotImplementedError() + + @property + def list_assignments( + self, + ) -> Callable[ + [reservation.ListAssignmentsRequest], + Union[ + reservation.ListAssignmentsResponse, + Awaitable[reservation.ListAssignmentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_assignment( + self, + ) -> Callable[ + [reservation.DeleteAssignmentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def search_assignments( + self, + ) -> Callable[ + [reservation.SearchAssignmentsRequest], + Union[ + reservation.SearchAssignmentsResponse, + Awaitable[reservation.SearchAssignmentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def search_all_assignments( + self, + ) -> Callable[ + [reservation.SearchAllAssignmentsRequest], + Union[ + reservation.SearchAllAssignmentsResponse, + Awaitable[reservation.SearchAllAssignmentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def move_assignment( + self, + ) -> Callable[ + [reservation.MoveAssignmentRequest], + Union[reservation.Assignment, Awaitable[reservation.Assignment]], + ]: + raise NotImplementedError() + + @property + def update_assignment( + self, + ) -> Callable[ + [reservation.UpdateAssignmentRequest], + Union[reservation.Assignment, Awaitable[reservation.Assignment]], + ]: + raise NotImplementedError() + + @property + def get_bi_reservation( + self, + ) -> Callable[ + [reservation.GetBiReservationRequest], + Union[reservation.BiReservation, Awaitable[reservation.BiReservation]], + ]: + raise NotImplementedError() + + @property + def update_bi_reservation( + self, + ) -> Callable[ + [reservation.UpdateBiReservationRequest], + Union[reservation.BiReservation, Awaitable[reservation.BiReservation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ReservationServiceTransport",) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc.py new file mode 100644 index 000000000000..ba0bccb82a19 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc.py @@ -0,0 +1,992 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation +from google.cloud.bigquery_reservation_v1.types import reservation + +from .base import DEFAULT_CLIENT_INFO, ReservationServiceTransport + + +class ReservationServiceGrpcTransport(ReservationServiceTransport): + """gRPC backend transport for ReservationService. + + This API allows users to manage their BigQuery reservations. + + A reservation provides computational resource guarantees, in the + form of `slots `__, to + users. A slot is a unit of computational power in BigQuery, and + serves as the basic unit of parallelism. In a scan of a + multi-partitioned table, a single slot operates on a single + partition of the table. A reservation resource exists as a child + resource of the admin project and location, e.g.: + ``projects/myproject/locations/US/reservations/reservationName``. + + A capacity commitment is a way to purchase compute capacity for + BigQuery jobs (in the form of slots) with some committed period of + usage. A capacity commitment resource exists as a child resource of + the admin project and location, e.g.: + ``projects/myproject/locations/US/capacityCommitments/id``. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "bigqueryreservation.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "bigqueryreservation.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_reservation( + self, + ) -> Callable[ + [gcbr_reservation.CreateReservationRequest], gcbr_reservation.Reservation + ]: + r"""Return a callable for the create reservation method over gRPC. + + Creates a new reservation resource. + + Returns: + Callable[[~.CreateReservationRequest], + ~.Reservation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_reservation" not in self._stubs: + self._stubs["create_reservation"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/CreateReservation", + request_serializer=gcbr_reservation.CreateReservationRequest.serialize, + response_deserializer=gcbr_reservation.Reservation.deserialize, + ) + return self._stubs["create_reservation"] + + @property + def list_reservations( + self, + ) -> Callable[ + [reservation.ListReservationsRequest], reservation.ListReservationsResponse + ]: + r"""Return a callable for the list reservations method over gRPC. + + Lists all the reservations for the project in the + specified location. + + Returns: + Callable[[~.ListReservationsRequest], + ~.ListReservationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_reservations" not in self._stubs: + self._stubs["list_reservations"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/ListReservations", + request_serializer=reservation.ListReservationsRequest.serialize, + response_deserializer=reservation.ListReservationsResponse.deserialize, + ) + return self._stubs["list_reservations"] + + @property + def get_reservation( + self, + ) -> Callable[[reservation.GetReservationRequest], reservation.Reservation]: + r"""Return a callable for the get reservation method over gRPC. + + Returns information about the reservation. + + Returns: + Callable[[~.GetReservationRequest], + ~.Reservation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_reservation" not in self._stubs: + self._stubs["get_reservation"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/GetReservation", + request_serializer=reservation.GetReservationRequest.serialize, + response_deserializer=reservation.Reservation.deserialize, + ) + return self._stubs["get_reservation"] + + @property + def delete_reservation( + self, + ) -> Callable[[reservation.DeleteReservationRequest], empty_pb2.Empty]: + r"""Return a callable for the delete reservation method over gRPC. + + Deletes a reservation. Returns + ``google.rpc.Code.FAILED_PRECONDITION`` when reservation has + assignments. + + Returns: + Callable[[~.DeleteReservationRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_reservation" not in self._stubs: + self._stubs["delete_reservation"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/DeleteReservation", + request_serializer=reservation.DeleteReservationRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_reservation"] + + @property + def update_reservation( + self, + ) -> Callable[ + [gcbr_reservation.UpdateReservationRequest], gcbr_reservation.Reservation + ]: + r"""Return a callable for the update reservation method over gRPC. + + Updates an existing reservation resource. + + Returns: + Callable[[~.UpdateReservationRequest], + ~.Reservation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_reservation" not in self._stubs: + self._stubs["update_reservation"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/UpdateReservation", + request_serializer=gcbr_reservation.UpdateReservationRequest.serialize, + response_deserializer=gcbr_reservation.Reservation.deserialize, + ) + return self._stubs["update_reservation"] + + @property + def create_capacity_commitment( + self, + ) -> Callable[ + [reservation.CreateCapacityCommitmentRequest], reservation.CapacityCommitment + ]: + r"""Return a callable for the create capacity commitment method over gRPC. + + Creates a new capacity commitment resource. + + Returns: + Callable[[~.CreateCapacityCommitmentRequest], + ~.CapacityCommitment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_capacity_commitment" not in self._stubs: + self._stubs["create_capacity_commitment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/CreateCapacityCommitment", + request_serializer=reservation.CreateCapacityCommitmentRequest.serialize, + response_deserializer=reservation.CapacityCommitment.deserialize, + ) + return self._stubs["create_capacity_commitment"] + + @property + def list_capacity_commitments( + self, + ) -> Callable[ + [reservation.ListCapacityCommitmentsRequest], + reservation.ListCapacityCommitmentsResponse, + ]: + r"""Return a callable for the list capacity commitments method over gRPC. + + Lists all the capacity commitments for the admin + project. + + Returns: + Callable[[~.ListCapacityCommitmentsRequest], + ~.ListCapacityCommitmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_capacity_commitments" not in self._stubs: + self._stubs["list_capacity_commitments"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/ListCapacityCommitments", + request_serializer=reservation.ListCapacityCommitmentsRequest.serialize, + response_deserializer=reservation.ListCapacityCommitmentsResponse.deserialize, + ) + return self._stubs["list_capacity_commitments"] + + @property + def get_capacity_commitment( + self, + ) -> Callable[ + [reservation.GetCapacityCommitmentRequest], reservation.CapacityCommitment + ]: + r"""Return a callable for the get capacity commitment method over gRPC. + + Returns information about the capacity commitment. + + Returns: + Callable[[~.GetCapacityCommitmentRequest], + ~.CapacityCommitment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_capacity_commitment" not in self._stubs: + self._stubs["get_capacity_commitment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/GetCapacityCommitment", + request_serializer=reservation.GetCapacityCommitmentRequest.serialize, + response_deserializer=reservation.CapacityCommitment.deserialize, + ) + return self._stubs["get_capacity_commitment"] + + @property + def delete_capacity_commitment( + self, + ) -> Callable[[reservation.DeleteCapacityCommitmentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete capacity commitment method over gRPC. + + Deletes a capacity commitment. Attempting to delete capacity + commitment before its commitment_end_time will fail with the + error code ``google.rpc.Code.FAILED_PRECONDITION``. + + Returns: + Callable[[~.DeleteCapacityCommitmentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_capacity_commitment" not in self._stubs: + self._stubs["delete_capacity_commitment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/DeleteCapacityCommitment", + request_serializer=reservation.DeleteCapacityCommitmentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_capacity_commitment"] + + @property + def update_capacity_commitment( + self, + ) -> Callable[ + [reservation.UpdateCapacityCommitmentRequest], reservation.CapacityCommitment + ]: + r"""Return a callable for the update capacity commitment method over gRPC. + + Updates an existing capacity commitment. + + Only ``plan`` and ``renewal_plan`` fields can be updated. + + Plan can only be changed to a plan of a longer commitment + period. Attempting to change to a plan with shorter commitment + period will fail with the error code + ``google.rpc.Code.FAILED_PRECONDITION``. + + Returns: + Callable[[~.UpdateCapacityCommitmentRequest], + ~.CapacityCommitment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_capacity_commitment" not in self._stubs: + self._stubs["update_capacity_commitment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/UpdateCapacityCommitment", + request_serializer=reservation.UpdateCapacityCommitmentRequest.serialize, + response_deserializer=reservation.CapacityCommitment.deserialize, + ) + return self._stubs["update_capacity_commitment"] + + @property + def split_capacity_commitment( + self, + ) -> Callable[ + [reservation.SplitCapacityCommitmentRequest], + reservation.SplitCapacityCommitmentResponse, + ]: + r"""Return a callable for the split capacity commitment method over gRPC. + + Splits capacity commitment to two commitments of the same plan + and ``commitment_end_time``. + + A common use case is to enable downgrading commitments. + + For example, in order to downgrade from 10000 slots to 8000, you + might split a 10000 capacity commitment into commitments of 2000 + and 8000. Then, you delete the first one after the commitment + end time passes. + + Returns: + Callable[[~.SplitCapacityCommitmentRequest], + ~.SplitCapacityCommitmentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "split_capacity_commitment" not in self._stubs: + self._stubs["split_capacity_commitment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/SplitCapacityCommitment", + request_serializer=reservation.SplitCapacityCommitmentRequest.serialize, + response_deserializer=reservation.SplitCapacityCommitmentResponse.deserialize, + ) + return self._stubs["split_capacity_commitment"] + + @property + def merge_capacity_commitments( + self, + ) -> Callable[ + [reservation.MergeCapacityCommitmentsRequest], reservation.CapacityCommitment + ]: + r"""Return a callable for the merge capacity commitments method over gRPC. + + Merges capacity commitments of the same plan into a single + commitment. + + The resulting capacity commitment has the greater + commitment_end_time out of the to-be-merged capacity + commitments. + + Attempting to merge capacity commitments of different plan will + fail with the error code + ``google.rpc.Code.FAILED_PRECONDITION``. + + Returns: + Callable[[~.MergeCapacityCommitmentsRequest], + ~.CapacityCommitment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "merge_capacity_commitments" not in self._stubs: + self._stubs["merge_capacity_commitments"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/MergeCapacityCommitments", + request_serializer=reservation.MergeCapacityCommitmentsRequest.serialize, + response_deserializer=reservation.CapacityCommitment.deserialize, + ) + return self._stubs["merge_capacity_commitments"] + + @property + def create_assignment( + self, + ) -> Callable[[reservation.CreateAssignmentRequest], reservation.Assignment]: + r"""Return a callable for the create assignment method over gRPC. + + Creates an assignment object which allows the given project to + submit jobs of a certain type using slots from the specified + reservation. + + Currently a resource (project, folder, organization) can only + have one assignment per each (job_type, location) combination, + and that reservation will be used for all jobs of the matching + type. + + Different assignments can be created on different levels of the + projects, folders or organization hierarchy. During query + execution, the assignment is looked up at the project, folder + and organization levels in that order. The first assignment + found is applied to the query. + + When creating assignments, it does not matter if other + assignments exist at higher levels. + + Example: + + - The organization ``organizationA`` contains two projects, + ``project1`` and ``project2``. + - Assignments for all three entities (``organizationA``, + ``project1``, and ``project2``) could all be created and + mapped to the same or different reservations. + + "None" assignments represent an absence of the assignment. + Projects assigned to None use on-demand pricing. To create a + "None" assignment, use "none" as a reservation_id in the parent. + Example parent: + ``projects/myproject/locations/US/reservations/none``. + + Returns ``google.rpc.Code.PERMISSION_DENIED`` if user does not + have 'bigquery.admin' permissions on the project using the + reservation and the project that owns this reservation. + + Returns ``google.rpc.Code.INVALID_ARGUMENT`` when location of + the assignment does not match location of the reservation. + + Returns: + Callable[[~.CreateAssignmentRequest], + ~.Assignment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_assignment" not in self._stubs: + self._stubs["create_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/CreateAssignment", + request_serializer=reservation.CreateAssignmentRequest.serialize, + response_deserializer=reservation.Assignment.deserialize, + ) + return self._stubs["create_assignment"] + + @property + def list_assignments( + self, + ) -> Callable[ + [reservation.ListAssignmentsRequest], reservation.ListAssignmentsResponse + ]: + r"""Return a callable for the list assignments method over gRPC. + + Lists assignments. + + Only explicitly created assignments will be returned. + + Example: + + - Organization ``organizationA`` contains two projects, + ``project1`` and ``project2``. + - Reservation ``res1`` exists and was created previously. + - CreateAssignment was used previously to define the following + associations between entities and reservations: + ```` and ```` + + In this example, ListAssignments will just return the above two + assignments for reservation ``res1``, and no expansion/merge + will happen. + + The wildcard "-" can be used for reservations in the request. In + that case all assignments belongs to the specified project and + location will be listed. + + **Note** "-" cannot be used for projects nor locations. + + Returns: + Callable[[~.ListAssignmentsRequest], + ~.ListAssignmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_assignments" not in self._stubs: + self._stubs["list_assignments"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/ListAssignments", + request_serializer=reservation.ListAssignmentsRequest.serialize, + response_deserializer=reservation.ListAssignmentsResponse.deserialize, + ) + return self._stubs["list_assignments"] + + @property + def delete_assignment( + self, + ) -> Callable[[reservation.DeleteAssignmentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete assignment method over gRPC. + + Deletes a assignment. No expansion will happen. + + Example: + + - Organization ``organizationA`` contains two projects, + ``project1`` and ``project2``. + - Reservation ``res1`` exists and was created previously. + - CreateAssignment was used previously to define the following + associations between entities and reservations: + ```` and ```` + + In this example, deletion of the ```` + assignment won't affect the other assignment + ````. After said deletion, queries from + ``project1`` will still use ``res1`` while queries from + ``project2`` will switch to use on-demand mode. + + Returns: + Callable[[~.DeleteAssignmentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_assignment" not in self._stubs: + self._stubs["delete_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/DeleteAssignment", + request_serializer=reservation.DeleteAssignmentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_assignment"] + + @property + def search_assignments( + self, + ) -> Callable[ + [reservation.SearchAssignmentsRequest], reservation.SearchAssignmentsResponse + ]: + r"""Return a callable for the search assignments method over gRPC. + + Deprecated: Looks up assignments for a specified resource for a + particular region. If the request is about a project: + + 1. Assignments created on the project will be returned if they + exist. + 2. Otherwise assignments created on the closest ancestor will be + returned. + 3. Assignments for different JobTypes will all be returned. + + The same logic applies if the request is about a folder. + + If the request is about an organization, then assignments + created on the organization will be returned (organization + doesn't have ancestors). + + Comparing to ListAssignments, there are some behavior + differences: + + 1. permission on the assignee will be verified in this API. + 2. Hierarchy lookup (project->folder->organization) happens in + this API. + 3. Parent here is ``projects/*/locations/*``, instead of + ``projects/*/locations/*reservations/*``. + + **Note** "-" cannot be used for projects nor locations. + + Returns: + Callable[[~.SearchAssignmentsRequest], + ~.SearchAssignmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_assignments" not in self._stubs: + self._stubs["search_assignments"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/SearchAssignments", + request_serializer=reservation.SearchAssignmentsRequest.serialize, + response_deserializer=reservation.SearchAssignmentsResponse.deserialize, + ) + return self._stubs["search_assignments"] + + @property + def search_all_assignments( + self, + ) -> Callable[ + [reservation.SearchAllAssignmentsRequest], + reservation.SearchAllAssignmentsResponse, + ]: + r"""Return a callable for the search all assignments method over gRPC. + + Looks up assignments for a specified resource for a particular + region. If the request is about a project: + + 1. Assignments created on the project will be returned if they + exist. + 2. Otherwise assignments created on the closest ancestor will be + returned. + 3. Assignments for different JobTypes will all be returned. + + The same logic applies if the request is about a folder. + + If the request is about an organization, then assignments + created on the organization will be returned (organization + doesn't have ancestors). + + Comparing to ListAssignments, there are some behavior + differences: + + 1. permission on the assignee will be verified in this API. + 2. Hierarchy lookup (project->folder->organization) happens in + this API. + 3. Parent here is ``projects/*/locations/*``, instead of + ``projects/*/locations/*reservations/*``. + + Returns: + Callable[[~.SearchAllAssignmentsRequest], + ~.SearchAllAssignmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_all_assignments" not in self._stubs: + self._stubs["search_all_assignments"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/SearchAllAssignments", + request_serializer=reservation.SearchAllAssignmentsRequest.serialize, + response_deserializer=reservation.SearchAllAssignmentsResponse.deserialize, + ) + return self._stubs["search_all_assignments"] + + @property + def move_assignment( + self, + ) -> Callable[[reservation.MoveAssignmentRequest], reservation.Assignment]: + r"""Return a callable for the move assignment method over gRPC. + + Moves an assignment under a new reservation. + + This differs from removing an existing assignment and + recreating a new one by providing a transactional change + that ensures an assignee always has an associated + reservation. + + Returns: + Callable[[~.MoveAssignmentRequest], + ~.Assignment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "move_assignment" not in self._stubs: + self._stubs["move_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/MoveAssignment", + request_serializer=reservation.MoveAssignmentRequest.serialize, + response_deserializer=reservation.Assignment.deserialize, + ) + return self._stubs["move_assignment"] + + @property + def update_assignment( + self, + ) -> Callable[[reservation.UpdateAssignmentRequest], reservation.Assignment]: + r"""Return a callable for the update assignment method over gRPC. + + Updates an existing assignment. + + Only the ``priority`` field can be updated. + + Returns: + Callable[[~.UpdateAssignmentRequest], + ~.Assignment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_assignment" not in self._stubs: + self._stubs["update_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/UpdateAssignment", + request_serializer=reservation.UpdateAssignmentRequest.serialize, + response_deserializer=reservation.Assignment.deserialize, + ) + return self._stubs["update_assignment"] + + @property + def get_bi_reservation( + self, + ) -> Callable[[reservation.GetBiReservationRequest], reservation.BiReservation]: + r"""Return a callable for the get bi reservation method over gRPC. + + Retrieves a BI reservation. + + Returns: + Callable[[~.GetBiReservationRequest], + ~.BiReservation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_bi_reservation" not in self._stubs: + self._stubs["get_bi_reservation"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/GetBiReservation", + request_serializer=reservation.GetBiReservationRequest.serialize, + response_deserializer=reservation.BiReservation.deserialize, + ) + return self._stubs["get_bi_reservation"] + + @property + def update_bi_reservation( + self, + ) -> Callable[[reservation.UpdateBiReservationRequest], reservation.BiReservation]: + r"""Return a callable for the update bi reservation method over gRPC. + + Updates a BI reservation. + + Only fields specified in the ``field_mask`` are updated. + + A singleton BI reservation always exists with default size 0. In + order to reserve BI capacity it needs to be updated to an amount + greater than 0. In order to release BI capacity reservation size + must be set to 0. + + Returns: + Callable[[~.UpdateBiReservationRequest], + ~.BiReservation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_bi_reservation" not in self._stubs: + self._stubs["update_bi_reservation"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/UpdateBiReservation", + request_serializer=reservation.UpdateBiReservationRequest.serialize, + response_deserializer=reservation.BiReservation.deserialize, + ) + return self._stubs["update_bi_reservation"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ReservationServiceGrpcTransport",) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc_asyncio.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..29520c7b0c77 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc_asyncio.py @@ -0,0 +1,1014 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation +from google.cloud.bigquery_reservation_v1.types import reservation + +from .base import DEFAULT_CLIENT_INFO, ReservationServiceTransport +from .grpc import ReservationServiceGrpcTransport + + +class ReservationServiceGrpcAsyncIOTransport(ReservationServiceTransport): + """gRPC AsyncIO backend transport for ReservationService. + + This API allows users to manage their BigQuery reservations. + + A reservation provides computational resource guarantees, in the + form of `slots `__, to + users. A slot is a unit of computational power in BigQuery, and + serves as the basic unit of parallelism. In a scan of a + multi-partitioned table, a single slot operates on a single + partition of the table. A reservation resource exists as a child + resource of the admin project and location, e.g.: + ``projects/myproject/locations/US/reservations/reservationName``. + + A capacity commitment is a way to purchase compute capacity for + BigQuery jobs (in the form of slots) with some committed period of + usage. A capacity commitment resource exists as a child resource of + the admin project and location, e.g.: + ``projects/myproject/locations/US/capacityCommitments/id``. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "bigqueryreservation.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "bigqueryreservation.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_reservation( + self, + ) -> Callable[ + [gcbr_reservation.CreateReservationRequest], + Awaitable[gcbr_reservation.Reservation], + ]: + r"""Return a callable for the create reservation method over gRPC. + + Creates a new reservation resource. + + Returns: + Callable[[~.CreateReservationRequest], + Awaitable[~.Reservation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_reservation" not in self._stubs: + self._stubs["create_reservation"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/CreateReservation", + request_serializer=gcbr_reservation.CreateReservationRequest.serialize, + response_deserializer=gcbr_reservation.Reservation.deserialize, + ) + return self._stubs["create_reservation"] + + @property + def list_reservations( + self, + ) -> Callable[ + [reservation.ListReservationsRequest], + Awaitable[reservation.ListReservationsResponse], + ]: + r"""Return a callable for the list reservations method over gRPC. + + Lists all the reservations for the project in the + specified location. + + Returns: + Callable[[~.ListReservationsRequest], + Awaitable[~.ListReservationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_reservations" not in self._stubs: + self._stubs["list_reservations"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/ListReservations", + request_serializer=reservation.ListReservationsRequest.serialize, + response_deserializer=reservation.ListReservationsResponse.deserialize, + ) + return self._stubs["list_reservations"] + + @property + def get_reservation( + self, + ) -> Callable[ + [reservation.GetReservationRequest], Awaitable[reservation.Reservation] + ]: + r"""Return a callable for the get reservation method over gRPC. + + Returns information about the reservation. + + Returns: + Callable[[~.GetReservationRequest], + Awaitable[~.Reservation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_reservation" not in self._stubs: + self._stubs["get_reservation"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/GetReservation", + request_serializer=reservation.GetReservationRequest.serialize, + response_deserializer=reservation.Reservation.deserialize, + ) + return self._stubs["get_reservation"] + + @property + def delete_reservation( + self, + ) -> Callable[[reservation.DeleteReservationRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete reservation method over gRPC. + + Deletes a reservation. Returns + ``google.rpc.Code.FAILED_PRECONDITION`` when reservation has + assignments. + + Returns: + Callable[[~.DeleteReservationRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_reservation" not in self._stubs: + self._stubs["delete_reservation"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/DeleteReservation", + request_serializer=reservation.DeleteReservationRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_reservation"] + + @property + def update_reservation( + self, + ) -> Callable[ + [gcbr_reservation.UpdateReservationRequest], + Awaitable[gcbr_reservation.Reservation], + ]: + r"""Return a callable for the update reservation method over gRPC. + + Updates an existing reservation resource. + + Returns: + Callable[[~.UpdateReservationRequest], + Awaitable[~.Reservation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_reservation" not in self._stubs: + self._stubs["update_reservation"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/UpdateReservation", + request_serializer=gcbr_reservation.UpdateReservationRequest.serialize, + response_deserializer=gcbr_reservation.Reservation.deserialize, + ) + return self._stubs["update_reservation"] + + @property + def create_capacity_commitment( + self, + ) -> Callable[ + [reservation.CreateCapacityCommitmentRequest], + Awaitable[reservation.CapacityCommitment], + ]: + r"""Return a callable for the create capacity commitment method over gRPC. + + Creates a new capacity commitment resource. + + Returns: + Callable[[~.CreateCapacityCommitmentRequest], + Awaitable[~.CapacityCommitment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_capacity_commitment" not in self._stubs: + self._stubs["create_capacity_commitment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/CreateCapacityCommitment", + request_serializer=reservation.CreateCapacityCommitmentRequest.serialize, + response_deserializer=reservation.CapacityCommitment.deserialize, + ) + return self._stubs["create_capacity_commitment"] + + @property + def list_capacity_commitments( + self, + ) -> Callable[ + [reservation.ListCapacityCommitmentsRequest], + Awaitable[reservation.ListCapacityCommitmentsResponse], + ]: + r"""Return a callable for the list capacity commitments method over gRPC. + + Lists all the capacity commitments for the admin + project. + + Returns: + Callable[[~.ListCapacityCommitmentsRequest], + Awaitable[~.ListCapacityCommitmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_capacity_commitments" not in self._stubs: + self._stubs["list_capacity_commitments"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/ListCapacityCommitments", + request_serializer=reservation.ListCapacityCommitmentsRequest.serialize, + response_deserializer=reservation.ListCapacityCommitmentsResponse.deserialize, + ) + return self._stubs["list_capacity_commitments"] + + @property + def get_capacity_commitment( + self, + ) -> Callable[ + [reservation.GetCapacityCommitmentRequest], + Awaitable[reservation.CapacityCommitment], + ]: + r"""Return a callable for the get capacity commitment method over gRPC. + + Returns information about the capacity commitment. + + Returns: + Callable[[~.GetCapacityCommitmentRequest], + Awaitable[~.CapacityCommitment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_capacity_commitment" not in self._stubs: + self._stubs["get_capacity_commitment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/GetCapacityCommitment", + request_serializer=reservation.GetCapacityCommitmentRequest.serialize, + response_deserializer=reservation.CapacityCommitment.deserialize, + ) + return self._stubs["get_capacity_commitment"] + + @property + def delete_capacity_commitment( + self, + ) -> Callable[ + [reservation.DeleteCapacityCommitmentRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete capacity commitment method over gRPC. + + Deletes a capacity commitment. Attempting to delete capacity + commitment before its commitment_end_time will fail with the + error code ``google.rpc.Code.FAILED_PRECONDITION``. + + Returns: + Callable[[~.DeleteCapacityCommitmentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_capacity_commitment" not in self._stubs: + self._stubs["delete_capacity_commitment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/DeleteCapacityCommitment", + request_serializer=reservation.DeleteCapacityCommitmentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_capacity_commitment"] + + @property + def update_capacity_commitment( + self, + ) -> Callable[ + [reservation.UpdateCapacityCommitmentRequest], + Awaitable[reservation.CapacityCommitment], + ]: + r"""Return a callable for the update capacity commitment method over gRPC. + + Updates an existing capacity commitment. + + Only ``plan`` and ``renewal_plan`` fields can be updated. + + Plan can only be changed to a plan of a longer commitment + period. Attempting to change to a plan with shorter commitment + period will fail with the error code + ``google.rpc.Code.FAILED_PRECONDITION``. + + Returns: + Callable[[~.UpdateCapacityCommitmentRequest], + Awaitable[~.CapacityCommitment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_capacity_commitment" not in self._stubs: + self._stubs["update_capacity_commitment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/UpdateCapacityCommitment", + request_serializer=reservation.UpdateCapacityCommitmentRequest.serialize, + response_deserializer=reservation.CapacityCommitment.deserialize, + ) + return self._stubs["update_capacity_commitment"] + + @property + def split_capacity_commitment( + self, + ) -> Callable[ + [reservation.SplitCapacityCommitmentRequest], + Awaitable[reservation.SplitCapacityCommitmentResponse], + ]: + r"""Return a callable for the split capacity commitment method over gRPC. + + Splits capacity commitment to two commitments of the same plan + and ``commitment_end_time``. + + A common use case is to enable downgrading commitments. + + For example, in order to downgrade from 10000 slots to 8000, you + might split a 10000 capacity commitment into commitments of 2000 + and 8000. Then, you delete the first one after the commitment + end time passes. + + Returns: + Callable[[~.SplitCapacityCommitmentRequest], + Awaitable[~.SplitCapacityCommitmentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "split_capacity_commitment" not in self._stubs: + self._stubs["split_capacity_commitment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/SplitCapacityCommitment", + request_serializer=reservation.SplitCapacityCommitmentRequest.serialize, + response_deserializer=reservation.SplitCapacityCommitmentResponse.deserialize, + ) + return self._stubs["split_capacity_commitment"] + + @property + def merge_capacity_commitments( + self, + ) -> Callable[ + [reservation.MergeCapacityCommitmentsRequest], + Awaitable[reservation.CapacityCommitment], + ]: + r"""Return a callable for the merge capacity commitments method over gRPC. + + Merges capacity commitments of the same plan into a single + commitment. + + The resulting capacity commitment has the greater + commitment_end_time out of the to-be-merged capacity + commitments. + + Attempting to merge capacity commitments of different plan will + fail with the error code + ``google.rpc.Code.FAILED_PRECONDITION``. + + Returns: + Callable[[~.MergeCapacityCommitmentsRequest], + Awaitable[~.CapacityCommitment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "merge_capacity_commitments" not in self._stubs: + self._stubs["merge_capacity_commitments"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/MergeCapacityCommitments", + request_serializer=reservation.MergeCapacityCommitmentsRequest.serialize, + response_deserializer=reservation.CapacityCommitment.deserialize, + ) + return self._stubs["merge_capacity_commitments"] + + @property + def create_assignment( + self, + ) -> Callable[ + [reservation.CreateAssignmentRequest], Awaitable[reservation.Assignment] + ]: + r"""Return a callable for the create assignment method over gRPC. + + Creates an assignment object which allows the given project to + submit jobs of a certain type using slots from the specified + reservation. + + Currently a resource (project, folder, organization) can only + have one assignment per each (job_type, location) combination, + and that reservation will be used for all jobs of the matching + type. + + Different assignments can be created on different levels of the + projects, folders or organization hierarchy. During query + execution, the assignment is looked up at the project, folder + and organization levels in that order. The first assignment + found is applied to the query. + + When creating assignments, it does not matter if other + assignments exist at higher levels. + + Example: + + - The organization ``organizationA`` contains two projects, + ``project1`` and ``project2``. + - Assignments for all three entities (``organizationA``, + ``project1``, and ``project2``) could all be created and + mapped to the same or different reservations. + + "None" assignments represent an absence of the assignment. + Projects assigned to None use on-demand pricing. To create a + "None" assignment, use "none" as a reservation_id in the parent. + Example parent: + ``projects/myproject/locations/US/reservations/none``. + + Returns ``google.rpc.Code.PERMISSION_DENIED`` if user does not + have 'bigquery.admin' permissions on the project using the + reservation and the project that owns this reservation. + + Returns ``google.rpc.Code.INVALID_ARGUMENT`` when location of + the assignment does not match location of the reservation. + + Returns: + Callable[[~.CreateAssignmentRequest], + Awaitable[~.Assignment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_assignment" not in self._stubs: + self._stubs["create_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/CreateAssignment", + request_serializer=reservation.CreateAssignmentRequest.serialize, + response_deserializer=reservation.Assignment.deserialize, + ) + return self._stubs["create_assignment"] + + @property + def list_assignments( + self, + ) -> Callable[ + [reservation.ListAssignmentsRequest], + Awaitable[reservation.ListAssignmentsResponse], + ]: + r"""Return a callable for the list assignments method over gRPC. + + Lists assignments. + + Only explicitly created assignments will be returned. + + Example: + + - Organization ``organizationA`` contains two projects, + ``project1`` and ``project2``. + - Reservation ``res1`` exists and was created previously. + - CreateAssignment was used previously to define the following + associations between entities and reservations: + ```` and ```` + + In this example, ListAssignments will just return the above two + assignments for reservation ``res1``, and no expansion/merge + will happen. + + The wildcard "-" can be used for reservations in the request. In + that case all assignments belongs to the specified project and + location will be listed. + + **Note** "-" cannot be used for projects nor locations. + + Returns: + Callable[[~.ListAssignmentsRequest], + Awaitable[~.ListAssignmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_assignments" not in self._stubs: + self._stubs["list_assignments"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/ListAssignments", + request_serializer=reservation.ListAssignmentsRequest.serialize, + response_deserializer=reservation.ListAssignmentsResponse.deserialize, + ) + return self._stubs["list_assignments"] + + @property + def delete_assignment( + self, + ) -> Callable[[reservation.DeleteAssignmentRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete assignment method over gRPC. + + Deletes a assignment. No expansion will happen. + + Example: + + - Organization ``organizationA`` contains two projects, + ``project1`` and ``project2``. + - Reservation ``res1`` exists and was created previously. + - CreateAssignment was used previously to define the following + associations between entities and reservations: + ```` and ```` + + In this example, deletion of the ```` + assignment won't affect the other assignment + ````. After said deletion, queries from + ``project1`` will still use ``res1`` while queries from + ``project2`` will switch to use on-demand mode. + + Returns: + Callable[[~.DeleteAssignmentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_assignment" not in self._stubs: + self._stubs["delete_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/DeleteAssignment", + request_serializer=reservation.DeleteAssignmentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_assignment"] + + @property + def search_assignments( + self, + ) -> Callable[ + [reservation.SearchAssignmentsRequest], + Awaitable[reservation.SearchAssignmentsResponse], + ]: + r"""Return a callable for the search assignments method over gRPC. + + Deprecated: Looks up assignments for a specified resource for a + particular region. If the request is about a project: + + 1. Assignments created on the project will be returned if they + exist. + 2. Otherwise assignments created on the closest ancestor will be + returned. + 3. Assignments for different JobTypes will all be returned. + + The same logic applies if the request is about a folder. + + If the request is about an organization, then assignments + created on the organization will be returned (organization + doesn't have ancestors). + + Comparing to ListAssignments, there are some behavior + differences: + + 1. permission on the assignee will be verified in this API. + 2. Hierarchy lookup (project->folder->organization) happens in + this API. + 3. Parent here is ``projects/*/locations/*``, instead of + ``projects/*/locations/*reservations/*``. + + **Note** "-" cannot be used for projects nor locations. + + Returns: + Callable[[~.SearchAssignmentsRequest], + Awaitable[~.SearchAssignmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_assignments" not in self._stubs: + self._stubs["search_assignments"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/SearchAssignments", + request_serializer=reservation.SearchAssignmentsRequest.serialize, + response_deserializer=reservation.SearchAssignmentsResponse.deserialize, + ) + return self._stubs["search_assignments"] + + @property + def search_all_assignments( + self, + ) -> Callable[ + [reservation.SearchAllAssignmentsRequest], + Awaitable[reservation.SearchAllAssignmentsResponse], + ]: + r"""Return a callable for the search all assignments method over gRPC. + + Looks up assignments for a specified resource for a particular + region. If the request is about a project: + + 1. Assignments created on the project will be returned if they + exist. + 2. Otherwise assignments created on the closest ancestor will be + returned. + 3. Assignments for different JobTypes will all be returned. + + The same logic applies if the request is about a folder. + + If the request is about an organization, then assignments + created on the organization will be returned (organization + doesn't have ancestors). + + Comparing to ListAssignments, there are some behavior + differences: + + 1. permission on the assignee will be verified in this API. + 2. Hierarchy lookup (project->folder->organization) happens in + this API. + 3. Parent here is ``projects/*/locations/*``, instead of + ``projects/*/locations/*reservations/*``. + + Returns: + Callable[[~.SearchAllAssignmentsRequest], + Awaitable[~.SearchAllAssignmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_all_assignments" not in self._stubs: + self._stubs["search_all_assignments"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/SearchAllAssignments", + request_serializer=reservation.SearchAllAssignmentsRequest.serialize, + response_deserializer=reservation.SearchAllAssignmentsResponse.deserialize, + ) + return self._stubs["search_all_assignments"] + + @property + def move_assignment( + self, + ) -> Callable[ + [reservation.MoveAssignmentRequest], Awaitable[reservation.Assignment] + ]: + r"""Return a callable for the move assignment method over gRPC. + + Moves an assignment under a new reservation. + + This differs from removing an existing assignment and + recreating a new one by providing a transactional change + that ensures an assignee always has an associated + reservation. + + Returns: + Callable[[~.MoveAssignmentRequest], + Awaitable[~.Assignment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "move_assignment" not in self._stubs: + self._stubs["move_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/MoveAssignment", + request_serializer=reservation.MoveAssignmentRequest.serialize, + response_deserializer=reservation.Assignment.deserialize, + ) + return self._stubs["move_assignment"] + + @property + def update_assignment( + self, + ) -> Callable[ + [reservation.UpdateAssignmentRequest], Awaitable[reservation.Assignment] + ]: + r"""Return a callable for the update assignment method over gRPC. + + Updates an existing assignment. + + Only the ``priority`` field can be updated. + + Returns: + Callable[[~.UpdateAssignmentRequest], + Awaitable[~.Assignment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_assignment" not in self._stubs: + self._stubs["update_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/UpdateAssignment", + request_serializer=reservation.UpdateAssignmentRequest.serialize, + response_deserializer=reservation.Assignment.deserialize, + ) + return self._stubs["update_assignment"] + + @property + def get_bi_reservation( + self, + ) -> Callable[ + [reservation.GetBiReservationRequest], Awaitable[reservation.BiReservation] + ]: + r"""Return a callable for the get bi reservation method over gRPC. + + Retrieves a BI reservation. + + Returns: + Callable[[~.GetBiReservationRequest], + Awaitable[~.BiReservation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_bi_reservation" not in self._stubs: + self._stubs["get_bi_reservation"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/GetBiReservation", + request_serializer=reservation.GetBiReservationRequest.serialize, + response_deserializer=reservation.BiReservation.deserialize, + ) + return self._stubs["get_bi_reservation"] + + @property + def update_bi_reservation( + self, + ) -> Callable[ + [reservation.UpdateBiReservationRequest], Awaitable[reservation.BiReservation] + ]: + r"""Return a callable for the update bi reservation method over gRPC. + + Updates a BI reservation. + + Only fields specified in the ``field_mask`` are updated. + + A singleton BI reservation always exists with default size 0. In + order to reserve BI capacity it needs to be updated to an amount + greater than 0. In order to release BI capacity reservation size + must be set to 0. + + Returns: + Callable[[~.UpdateBiReservationRequest], + Awaitable[~.BiReservation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_bi_reservation" not in self._stubs: + self._stubs["update_bi_reservation"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/UpdateBiReservation", + request_serializer=reservation.UpdateBiReservationRequest.serialize, + response_deserializer=reservation.BiReservation.deserialize, + ) + return self._stubs["update_bi_reservation"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("ReservationServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest.py new file mode 100644 index 000000000000..e3c3e749abdf --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest.py @@ -0,0 +1,2961 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation +from google.cloud.bigquery_reservation_v1.types import reservation + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import ReservationServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ReservationServiceRestInterceptor: + """Interceptor for ReservationService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ReservationServiceRestTransport. + + .. code-block:: python + class MyCustomReservationServiceInterceptor(ReservationServiceRestInterceptor): + def pre_create_assignment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_assignment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_capacity_commitment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_capacity_commitment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_reservation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_reservation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_assignment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_capacity_commitment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_reservation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_bi_reservation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_bi_reservation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_capacity_commitment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_capacity_commitment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_reservation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_reservation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_assignments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_assignments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_capacity_commitments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_capacity_commitments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_reservations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_reservations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_merge_capacity_commitments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_merge_capacity_commitments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_move_assignment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_move_assignment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_search_all_assignments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_all_assignments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_search_assignments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_assignments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_split_capacity_commitment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_split_capacity_commitment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_assignment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_assignment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_bi_reservation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_bi_reservation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_capacity_commitment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_capacity_commitment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_reservation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_reservation(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ReservationServiceRestTransport(interceptor=MyCustomReservationServiceInterceptor()) + client = ReservationServiceClient(transport=transport) + + + """ + + def pre_create_assignment( + self, + request: reservation.CreateAssignmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.CreateAssignmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_assignment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_create_assignment( + self, response: reservation.Assignment + ) -> reservation.Assignment: + """Post-rpc interceptor for create_assignment + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_create_capacity_commitment( + self, + request: reservation.CreateCapacityCommitmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.CreateCapacityCommitmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_capacity_commitment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_create_capacity_commitment( + self, response: reservation.CapacityCommitment + ) -> reservation.CapacityCommitment: + """Post-rpc interceptor for create_capacity_commitment + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_create_reservation( + self, + request: gcbr_reservation.CreateReservationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcbr_reservation.CreateReservationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_reservation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_create_reservation( + self, response: gcbr_reservation.Reservation + ) -> gcbr_reservation.Reservation: + """Post-rpc interceptor for create_reservation + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_delete_assignment( + self, + request: reservation.DeleteAssignmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.DeleteAssignmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_assignment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def pre_delete_capacity_commitment( + self, + request: reservation.DeleteCapacityCommitmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.DeleteCapacityCommitmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_capacity_commitment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def pre_delete_reservation( + self, + request: reservation.DeleteReservationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.DeleteReservationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_reservation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def pre_get_bi_reservation( + self, + request: reservation.GetBiReservationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.GetBiReservationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_bi_reservation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_get_bi_reservation( + self, response: reservation.BiReservation + ) -> reservation.BiReservation: + """Post-rpc interceptor for get_bi_reservation + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_get_capacity_commitment( + self, + request: reservation.GetCapacityCommitmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.GetCapacityCommitmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_capacity_commitment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_get_capacity_commitment( + self, response: reservation.CapacityCommitment + ) -> reservation.CapacityCommitment: + """Post-rpc interceptor for get_capacity_commitment + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_get_reservation( + self, + request: reservation.GetReservationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.GetReservationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_reservation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_get_reservation( + self, response: reservation.Reservation + ) -> reservation.Reservation: + """Post-rpc interceptor for get_reservation + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_list_assignments( + self, + request: reservation.ListAssignmentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.ListAssignmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_assignments + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_list_assignments( + self, response: reservation.ListAssignmentsResponse + ) -> reservation.ListAssignmentsResponse: + """Post-rpc interceptor for list_assignments + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_list_capacity_commitments( + self, + request: reservation.ListCapacityCommitmentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.ListCapacityCommitmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_capacity_commitments + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_list_capacity_commitments( + self, response: reservation.ListCapacityCommitmentsResponse + ) -> reservation.ListCapacityCommitmentsResponse: + """Post-rpc interceptor for list_capacity_commitments + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_list_reservations( + self, + request: reservation.ListReservationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.ListReservationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_reservations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_list_reservations( + self, response: reservation.ListReservationsResponse + ) -> reservation.ListReservationsResponse: + """Post-rpc interceptor for list_reservations + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_merge_capacity_commitments( + self, + request: reservation.MergeCapacityCommitmentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.MergeCapacityCommitmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for merge_capacity_commitments + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_merge_capacity_commitments( + self, response: reservation.CapacityCommitment + ) -> reservation.CapacityCommitment: + """Post-rpc interceptor for merge_capacity_commitments + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_move_assignment( + self, + request: reservation.MoveAssignmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.MoveAssignmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for move_assignment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_move_assignment( + self, response: reservation.Assignment + ) -> reservation.Assignment: + """Post-rpc interceptor for move_assignment + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_search_all_assignments( + self, + request: reservation.SearchAllAssignmentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.SearchAllAssignmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search_all_assignments + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_search_all_assignments( + self, response: reservation.SearchAllAssignmentsResponse + ) -> reservation.SearchAllAssignmentsResponse: + """Post-rpc interceptor for search_all_assignments + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_search_assignments( + self, + request: reservation.SearchAssignmentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.SearchAssignmentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search_assignments + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_search_assignments( + self, response: reservation.SearchAssignmentsResponse + ) -> reservation.SearchAssignmentsResponse: + """Post-rpc interceptor for search_assignments + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_split_capacity_commitment( + self, + request: reservation.SplitCapacityCommitmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.SplitCapacityCommitmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for split_capacity_commitment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_split_capacity_commitment( + self, response: reservation.SplitCapacityCommitmentResponse + ) -> reservation.SplitCapacityCommitmentResponse: + """Post-rpc interceptor for split_capacity_commitment + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_update_assignment( + self, + request: reservation.UpdateAssignmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.UpdateAssignmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_assignment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_update_assignment( + self, response: reservation.Assignment + ) -> reservation.Assignment: + """Post-rpc interceptor for update_assignment + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_update_bi_reservation( + self, + request: reservation.UpdateBiReservationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.UpdateBiReservationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_bi_reservation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_update_bi_reservation( + self, response: reservation.BiReservation + ) -> reservation.BiReservation: + """Post-rpc interceptor for update_bi_reservation + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_update_capacity_commitment( + self, + request: reservation.UpdateCapacityCommitmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[reservation.UpdateCapacityCommitmentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_capacity_commitment + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_update_capacity_commitment( + self, response: reservation.CapacityCommitment + ) -> reservation.CapacityCommitment: + """Post-rpc interceptor for update_capacity_commitment + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + def pre_update_reservation( + self, + request: gcbr_reservation.UpdateReservationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gcbr_reservation.UpdateReservationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_reservation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_update_reservation( + self, response: gcbr_reservation.Reservation + ) -> gcbr_reservation.Reservation: + """Post-rpc interceptor for update_reservation + + Override in a subclass to manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ReservationServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ReservationServiceRestInterceptor + + +class ReservationServiceRestTransport(ReservationServiceTransport): + """REST backend transport for ReservationService. + + This API allows users to manage their BigQuery reservations. + + A reservation provides computational resource guarantees, in the + form of `slots `__, to + users. A slot is a unit of computational power in BigQuery, and + serves as the basic unit of parallelism. In a scan of a + multi-partitioned table, a single slot operates on a single + partition of the table. A reservation resource exists as a child + resource of the admin project and location, e.g.: + ``projects/myproject/locations/US/reservations/reservationName``. + + A capacity commitment is a way to purchase compute capacity for + BigQuery jobs (in the form of slots) with some committed period of + usage. A capacity commitment resource exists as a child resource of + the admin project and location, e.g.: + ``projects/myproject/locations/US/capacityCommitments/id``. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "bigqueryreservation.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ReservationServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ReservationServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateAssignment(ReservationServiceRestStub): + def __hash__(self): + return hash("CreateAssignment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.CreateAssignmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.Assignment: + r"""Call the create assignment method over HTTP. + + Args: + request (~.reservation.CreateAssignmentRequest): + The request object. The request for + [ReservationService.CreateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.CreateAssignment]. + Note: "bigquery.reservationAssignments.create" + permission is required on the related assignee. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.Assignment: + An assignment allows a project to + submit jobs of a certain type using + slots from the specified reservation. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/reservations/*}/assignments", + "body": "assignment", + }, + ] + request, metadata = self._interceptor.pre_create_assignment( + request, metadata + ) + pb_request = reservation.CreateAssignmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.Assignment() + pb_resp = reservation.Assignment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_assignment(resp) + return resp + + class _CreateCapacityCommitment(ReservationServiceRestStub): + def __hash__(self): + return hash("CreateCapacityCommitment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.CreateCapacityCommitmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.CapacityCommitment: + r"""Call the create capacity + commitment method over HTTP. + + Args: + request (~.reservation.CreateCapacityCommitmentRequest): + The request object. The request for + [ReservationService.CreateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.CreateCapacityCommitment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.CapacityCommitment: + Capacity commitment is a way to + purchase compute capacity for BigQuery + jobs (in the form of slots) with some + committed period of usage. Annual + commitments renew by default. + Commitments can be removed after their + commitment end time passes. + + In order to remove annual commitment, + its plan needs to be changed to monthly + or flex first. + + A capacity commitment resource exists as + a child resource of the admin project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/capacityCommitments", + "body": "capacity_commitment", + }, + ] + request, metadata = self._interceptor.pre_create_capacity_commitment( + request, metadata + ) + pb_request = reservation.CreateCapacityCommitmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.CapacityCommitment() + pb_resp = reservation.CapacityCommitment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_capacity_commitment(resp) + return resp + + class _CreateReservation(ReservationServiceRestStub): + def __hash__(self): + return hash("CreateReservation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gcbr_reservation.CreateReservationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcbr_reservation.Reservation: + r"""Call the create reservation method over HTTP. + + Args: + request (~.gcbr_reservation.CreateReservationRequest): + The request object. The request for + [ReservationService.CreateReservation][google.cloud.bigquery.reservation.v1.ReservationService.CreateReservation]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcbr_reservation.Reservation: + A reservation is a mechanism used to + guarantee slots to users. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/reservations", + "body": "reservation", + }, + ] + request, metadata = self._interceptor.pre_create_reservation( + request, metadata + ) + pb_request = gcbr_reservation.CreateReservationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcbr_reservation.Reservation() + pb_resp = gcbr_reservation.Reservation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_reservation(resp) + return resp + + class _DeleteAssignment(ReservationServiceRestStub): + def __hash__(self): + return hash("DeleteAssignment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.DeleteAssignmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete assignment method over HTTP. + + Args: + request (~.reservation.DeleteAssignmentRequest): + The request object. The request for + [ReservationService.DeleteAssignment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteAssignment]. + Note: "bigquery.reservationAssignments.delete" + permission is required on the related assignee. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/reservations/*/assignments/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_assignment( + request, metadata + ) + pb_request = reservation.DeleteAssignmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteCapacityCommitment(ReservationServiceRestStub): + def __hash__(self): + return hash("DeleteCapacityCommitment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.DeleteCapacityCommitmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete capacity + commitment method over HTTP. + + Args: + request (~.reservation.DeleteCapacityCommitmentRequest): + The request object. The request for + [ReservationService.DeleteCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteCapacityCommitment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/capacityCommitments/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_capacity_commitment( + request, metadata + ) + pb_request = reservation.DeleteCapacityCommitmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteReservation(ReservationServiceRestStub): + def __hash__(self): + return hash("DeleteReservation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.DeleteReservationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete reservation method over HTTP. + + Args: + request (~.reservation.DeleteReservationRequest): + The request object. The request for + [ReservationService.DeleteReservation][google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservation]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/reservations/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_reservation( + request, metadata + ) + pb_request = reservation.DeleteReservationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetBiReservation(ReservationServiceRestStub): + def __hash__(self): + return hash("GetBiReservation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.GetBiReservationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.BiReservation: + r"""Call the get bi reservation method over HTTP. + + Args: + request (~.reservation.GetBiReservationRequest): + The request object. A request to get a singleton BI + reservation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.BiReservation: + Represents a BI Reservation. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/biReservation}", + }, + ] + request, metadata = self._interceptor.pre_get_bi_reservation( + request, metadata + ) + pb_request = reservation.GetBiReservationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.BiReservation() + pb_resp = reservation.BiReservation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_bi_reservation(resp) + return resp + + class _GetCapacityCommitment(ReservationServiceRestStub): + def __hash__(self): + return hash("GetCapacityCommitment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.GetCapacityCommitmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.CapacityCommitment: + r"""Call the get capacity commitment method over HTTP. + + Args: + request (~.reservation.GetCapacityCommitmentRequest): + The request object. The request for + [ReservationService.GetCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.GetCapacityCommitment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.CapacityCommitment: + Capacity commitment is a way to + purchase compute capacity for BigQuery + jobs (in the form of slots) with some + committed period of usage. Annual + commitments renew by default. + Commitments can be removed after their + commitment end time passes. + + In order to remove annual commitment, + its plan needs to be changed to monthly + or flex first. + + A capacity commitment resource exists as + a child resource of the admin project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/capacityCommitments/*}", + }, + ] + request, metadata = self._interceptor.pre_get_capacity_commitment( + request, metadata + ) + pb_request = reservation.GetCapacityCommitmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.CapacityCommitment() + pb_resp = reservation.CapacityCommitment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_capacity_commitment(resp) + return resp + + class _GetReservation(ReservationServiceRestStub): + def __hash__(self): + return hash("GetReservation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.GetReservationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.Reservation: + r"""Call the get reservation method over HTTP. + + Args: + request (~.reservation.GetReservationRequest): + The request object. The request for + [ReservationService.GetReservation][google.cloud.bigquery.reservation.v1.ReservationService.GetReservation]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.Reservation: + A reservation is a mechanism used to + guarantee slots to users. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/reservations/*}", + }, + ] + request, metadata = self._interceptor.pre_get_reservation(request, metadata) + pb_request = reservation.GetReservationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.Reservation() + pb_resp = reservation.Reservation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_reservation(resp) + return resp + + class _ListAssignments(ReservationServiceRestStub): + def __hash__(self): + return hash("ListAssignments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.ListAssignmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.ListAssignmentsResponse: + r"""Call the list assignments method over HTTP. + + Args: + request (~.reservation.ListAssignmentsRequest): + The request object. The request for + [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.ListAssignmentsResponse: + The response for + [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/reservations/*}/assignments", + }, + ] + request, metadata = self._interceptor.pre_list_assignments( + request, metadata + ) + pb_request = reservation.ListAssignmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.ListAssignmentsResponse() + pb_resp = reservation.ListAssignmentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_assignments(resp) + return resp + + class _ListCapacityCommitments(ReservationServiceRestStub): + def __hash__(self): + return hash("ListCapacityCommitments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.ListCapacityCommitmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.ListCapacityCommitmentsResponse: + r"""Call the list capacity commitments method over HTTP. + + Args: + request (~.reservation.ListCapacityCommitmentsRequest): + The request object. The request for + [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.ListCapacityCommitmentsResponse: + The response for + [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/capacityCommitments", + }, + ] + request, metadata = self._interceptor.pre_list_capacity_commitments( + request, metadata + ) + pb_request = reservation.ListCapacityCommitmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.ListCapacityCommitmentsResponse() + pb_resp = reservation.ListCapacityCommitmentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_capacity_commitments(resp) + return resp + + class _ListReservations(ReservationServiceRestStub): + def __hash__(self): + return hash("ListReservations") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.ListReservationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.ListReservationsResponse: + r"""Call the list reservations method over HTTP. + + Args: + request (~.reservation.ListReservationsRequest): + The request object. The request for + [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.ListReservationsResponse: + The response for + [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/reservations", + }, + ] + request, metadata = self._interceptor.pre_list_reservations( + request, metadata + ) + pb_request = reservation.ListReservationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.ListReservationsResponse() + pb_resp = reservation.ListReservationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_reservations(resp) + return resp + + class _MergeCapacityCommitments(ReservationServiceRestStub): + def __hash__(self): + return hash("MergeCapacityCommitments") + + def __call__( + self, + request: reservation.MergeCapacityCommitmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.CapacityCommitment: + r"""Call the merge capacity + commitments method over HTTP. + + Args: + request (~.reservation.MergeCapacityCommitmentsRequest): + The request object. The request for + [ReservationService.MergeCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.MergeCapacityCommitments]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.CapacityCommitment: + Capacity commitment is a way to + purchase compute capacity for BigQuery + jobs (in the form of slots) with some + committed period of usage. Annual + commitments renew by default. + Commitments can be removed after their + commitment end time passes. + + In order to remove annual commitment, + its plan needs to be changed to monthly + or flex first. + + A capacity commitment resource exists as + a child resource of the admin project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/capacityCommitments:merge", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_merge_capacity_commitments( + request, metadata + ) + pb_request = reservation.MergeCapacityCommitmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.CapacityCommitment() + pb_resp = reservation.CapacityCommitment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_merge_capacity_commitments(resp) + return resp + + class _MoveAssignment(ReservationServiceRestStub): + def __hash__(self): + return hash("MoveAssignment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.MoveAssignmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.Assignment: + r"""Call the move assignment method over HTTP. + + Args: + request (~.reservation.MoveAssignmentRequest): + The request object. The request for + [ReservationService.MoveAssignment][google.cloud.bigquery.reservation.v1.ReservationService.MoveAssignment]. + + **Note**: "bigquery.reservationAssignments.create" + permission is required on the destination_id. + + **Note**: "bigquery.reservationAssignments.create" and + "bigquery.reservationAssignments.delete" permission are + required on the related assignee. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.Assignment: + An assignment allows a project to + submit jobs of a certain type using + slots from the specified reservation. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/reservations/*/assignments/*}:move", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_move_assignment(request, metadata) + pb_request = reservation.MoveAssignmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.Assignment() + pb_resp = reservation.Assignment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_move_assignment(resp) + return resp + + class _SearchAllAssignments(ReservationServiceRestStub): + def __hash__(self): + return hash("SearchAllAssignments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.SearchAllAssignmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.SearchAllAssignmentsResponse: + r"""Call the search all assignments method over HTTP. + + Args: + request (~.reservation.SearchAllAssignmentsRequest): + The request object. The request for + [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. + Note: "bigquery.reservationAssignments.search" + permission is required on the related assignee. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.SearchAllAssignmentsResponse: + The response for + [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}:searchAllAssignments", + }, + ] + request, metadata = self._interceptor.pre_search_all_assignments( + request, metadata + ) + pb_request = reservation.SearchAllAssignmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.SearchAllAssignmentsResponse() + pb_resp = reservation.SearchAllAssignmentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_all_assignments(resp) + return resp + + class _SearchAssignments(ReservationServiceRestStub): + def __hash__(self): + return hash("SearchAssignments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.SearchAssignmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.SearchAssignmentsResponse: + r"""Call the search assignments method over HTTP. + + Args: + request (~.reservation.SearchAssignmentsRequest): + The request object. The request for + [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. + Note: "bigquery.reservationAssignments.search" + permission is required on the related assignee. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.SearchAssignmentsResponse: + The response for + [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}:searchAssignments", + }, + ] + request, metadata = self._interceptor.pre_search_assignments( + request, metadata + ) + pb_request = reservation.SearchAssignmentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.SearchAssignmentsResponse() + pb_resp = reservation.SearchAssignmentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_assignments(resp) + return resp + + class _SplitCapacityCommitment(ReservationServiceRestStub): + def __hash__(self): + return hash("SplitCapacityCommitment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: reservation.SplitCapacityCommitmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.SplitCapacityCommitmentResponse: + r"""Call the split capacity commitment method over HTTP. + + Args: + request (~.reservation.SplitCapacityCommitmentRequest): + The request object. The request for + [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.SplitCapacityCommitmentResponse: + The response for + [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/capacityCommitments/*}:split", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_split_capacity_commitment( + request, metadata + ) + pb_request = reservation.SplitCapacityCommitmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.SplitCapacityCommitmentResponse() + pb_resp = reservation.SplitCapacityCommitmentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_split_capacity_commitment(resp) + return resp + + class _UpdateAssignment(ReservationServiceRestStub): + def __hash__(self): + return hash("UpdateAssignment") + + def __call__( + self, + request: reservation.UpdateAssignmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.Assignment: + r"""Call the update assignment method over HTTP. + + Args: + request (~.reservation.UpdateAssignmentRequest): + The request object. The request for + [ReservationService.UpdateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateAssignment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.Assignment: + An assignment allows a project to + submit jobs of a certain type using + slots from the specified reservation. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{assignment.name=projects/*/locations/*/reservations/*/assignments/*}", + "body": "assignment", + }, + ] + request, metadata = self._interceptor.pre_update_assignment( + request, metadata + ) + pb_request = reservation.UpdateAssignmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.Assignment() + pb_resp = reservation.Assignment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_assignment(resp) + return resp + + class _UpdateBiReservation(ReservationServiceRestStub): + def __hash__(self): + return hash("UpdateBiReservation") + + def __call__( + self, + request: reservation.UpdateBiReservationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.BiReservation: + r"""Call the update bi reservation method over HTTP. + + Args: + request (~.reservation.UpdateBiReservationRequest): + The request object. A request to update a BI reservation. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.BiReservation: + Represents a BI Reservation. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{bi_reservation.name=projects/*/locations/*/biReservation}", + "body": "bi_reservation", + }, + ] + request, metadata = self._interceptor.pre_update_bi_reservation( + request, metadata + ) + pb_request = reservation.UpdateBiReservationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.BiReservation() + pb_resp = reservation.BiReservation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_bi_reservation(resp) + return resp + + class _UpdateCapacityCommitment(ReservationServiceRestStub): + def __hash__(self): + return hash("UpdateCapacityCommitment") + + def __call__( + self, + request: reservation.UpdateCapacityCommitmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> reservation.CapacityCommitment: + r"""Call the update capacity + commitment method over HTTP. + + Args: + request (~.reservation.UpdateCapacityCommitmentRequest): + The request object. The request for + [ReservationService.UpdateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateCapacityCommitment]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.reservation.CapacityCommitment: + Capacity commitment is a way to + purchase compute capacity for BigQuery + jobs (in the form of slots) with some + committed period of usage. Annual + commitments renew by default. + Commitments can be removed after their + commitment end time passes. + + In order to remove annual commitment, + its plan needs to be changed to monthly + or flex first. + + A capacity commitment resource exists as + a child resource of the admin project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{capacity_commitment.name=projects/*/locations/*/capacityCommitments/*}", + "body": "capacity_commitment", + }, + ] + request, metadata = self._interceptor.pre_update_capacity_commitment( + request, metadata + ) + pb_request = reservation.UpdateCapacityCommitmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.CapacityCommitment() + pb_resp = reservation.CapacityCommitment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_capacity_commitment(resp) + return resp + + class _UpdateReservation(ReservationServiceRestStub): + def __hash__(self): + return hash("UpdateReservation") + + def __call__( + self, + request: gcbr_reservation.UpdateReservationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcbr_reservation.Reservation: + r"""Call the update reservation method over HTTP. + + Args: + request (~.gcbr_reservation.UpdateReservationRequest): + The request object. The request for + [ReservationService.UpdateReservation][google.cloud.bigquery.reservation.v1.ReservationService.UpdateReservation]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcbr_reservation.Reservation: + A reservation is a mechanism used to + guarantee slots to users. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{reservation.name=projects/*/locations/*/reservations/*}", + "body": "reservation", + }, + ] + request, metadata = self._interceptor.pre_update_reservation( + request, metadata + ) + pb_request = gcbr_reservation.UpdateReservationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcbr_reservation.Reservation() + pb_resp = gcbr_reservation.Reservation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_reservation(resp) + return resp + + @property + def create_assignment( + self, + ) -> Callable[[reservation.CreateAssignmentRequest], reservation.Assignment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAssignment(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_capacity_commitment( + self, + ) -> Callable[ + [reservation.CreateCapacityCommitmentRequest], reservation.CapacityCommitment + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCapacityCommitment(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_reservation( + self, + ) -> Callable[ + [gcbr_reservation.CreateReservationRequest], gcbr_reservation.Reservation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateReservation(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_assignment( + self, + ) -> Callable[[reservation.DeleteAssignmentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAssignment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_capacity_commitment( + self, + ) -> Callable[[reservation.DeleteCapacityCommitmentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCapacityCommitment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_reservation( + self, + ) -> Callable[[reservation.DeleteReservationRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteReservation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_bi_reservation( + self, + ) -> Callable[[reservation.GetBiReservationRequest], reservation.BiReservation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBiReservation(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_capacity_commitment( + self, + ) -> Callable[ + [reservation.GetCapacityCommitmentRequest], reservation.CapacityCommitment + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCapacityCommitment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_reservation( + self, + ) -> Callable[[reservation.GetReservationRequest], reservation.Reservation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetReservation(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_assignments( + self, + ) -> Callable[ + [reservation.ListAssignmentsRequest], reservation.ListAssignmentsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAssignments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_capacity_commitments( + self, + ) -> Callable[ + [reservation.ListCapacityCommitmentsRequest], + reservation.ListCapacityCommitmentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCapacityCommitments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_reservations( + self, + ) -> Callable[ + [reservation.ListReservationsRequest], reservation.ListReservationsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListReservations(self._session, self._host, self._interceptor) # type: ignore + + @property + def merge_capacity_commitments( + self, + ) -> Callable[ + [reservation.MergeCapacityCommitmentsRequest], reservation.CapacityCommitment + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._MergeCapacityCommitments(self._session, self._host, self._interceptor) # type: ignore + + @property + def move_assignment( + self, + ) -> Callable[[reservation.MoveAssignmentRequest], reservation.Assignment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._MoveAssignment(self._session, self._host, self._interceptor) # type: ignore + + @property + def search_all_assignments( + self, + ) -> Callable[ + [reservation.SearchAllAssignmentsRequest], + reservation.SearchAllAssignmentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchAllAssignments(self._session, self._host, self._interceptor) # type: ignore + + @property + def search_assignments( + self, + ) -> Callable[ + [reservation.SearchAssignmentsRequest], reservation.SearchAssignmentsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchAssignments(self._session, self._host, self._interceptor) # type: ignore + + @property + def split_capacity_commitment( + self, + ) -> Callable[ + [reservation.SplitCapacityCommitmentRequest], + reservation.SplitCapacityCommitmentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SplitCapacityCommitment(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_assignment( + self, + ) -> Callable[[reservation.UpdateAssignmentRequest], reservation.Assignment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAssignment(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_bi_reservation( + self, + ) -> Callable[[reservation.UpdateBiReservationRequest], reservation.BiReservation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBiReservation(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_capacity_commitment( + self, + ) -> Callable[ + [reservation.UpdateCapacityCommitmentRequest], reservation.CapacityCommitment + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCapacityCommitment(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_reservation( + self, + ) -> Callable[ + [gcbr_reservation.UpdateReservationRequest], gcbr_reservation.Reservation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateReservation(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ReservationServiceRestTransport",) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/__init__.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/__init__.py new file mode 100644 index 000000000000..b182993183cc --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/__init__.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .reservation import ( + Assignment, + BiReservation, + CapacityCommitment, + CreateAssignmentRequest, + CreateCapacityCommitmentRequest, + CreateReservationRequest, + DeleteAssignmentRequest, + DeleteCapacityCommitmentRequest, + DeleteReservationRequest, + Edition, + GetBiReservationRequest, + GetCapacityCommitmentRequest, + GetReservationRequest, + ListAssignmentsRequest, + ListAssignmentsResponse, + ListCapacityCommitmentsRequest, + ListCapacityCommitmentsResponse, + ListReservationsRequest, + ListReservationsResponse, + MergeCapacityCommitmentsRequest, + MoveAssignmentRequest, + Reservation, + SearchAllAssignmentsRequest, + SearchAllAssignmentsResponse, + SearchAssignmentsRequest, + SearchAssignmentsResponse, + SplitCapacityCommitmentRequest, + SplitCapacityCommitmentResponse, + TableReference, + UpdateAssignmentRequest, + UpdateBiReservationRequest, + UpdateCapacityCommitmentRequest, + UpdateReservationRequest, +) + +__all__ = ( + "Assignment", + "BiReservation", + "CapacityCommitment", + "CreateAssignmentRequest", + "CreateCapacityCommitmentRequest", + "CreateReservationRequest", + "DeleteAssignmentRequest", + "DeleteCapacityCommitmentRequest", + "DeleteReservationRequest", + "GetBiReservationRequest", + "GetCapacityCommitmentRequest", + "GetReservationRequest", + "ListAssignmentsRequest", + "ListAssignmentsResponse", + "ListCapacityCommitmentsRequest", + "ListCapacityCommitmentsResponse", + "ListReservationsRequest", + "ListReservationsResponse", + "MergeCapacityCommitmentsRequest", + "MoveAssignmentRequest", + "Reservation", + "SearchAllAssignmentsRequest", + "SearchAllAssignmentsResponse", + "SearchAssignmentsRequest", + "SearchAssignmentsResponse", + "SplitCapacityCommitmentRequest", + "SplitCapacityCommitmentResponse", + "TableReference", + "UpdateAssignmentRequest", + "UpdateBiReservationRequest", + "UpdateCapacityCommitmentRequest", + "UpdateReservationRequest", + "Edition", +) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/reservation.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/reservation.py new file mode 100644 index 000000000000..dc241664c01e --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/reservation.py @@ -0,0 +1,1320 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.bigquery.reservation.v1", + manifest={ + "Edition", + "Reservation", + "CapacityCommitment", + "CreateReservationRequest", + "ListReservationsRequest", + "ListReservationsResponse", + "GetReservationRequest", + "DeleteReservationRequest", + "UpdateReservationRequest", + "CreateCapacityCommitmentRequest", + "ListCapacityCommitmentsRequest", + "ListCapacityCommitmentsResponse", + "GetCapacityCommitmentRequest", + "DeleteCapacityCommitmentRequest", + "UpdateCapacityCommitmentRequest", + "SplitCapacityCommitmentRequest", + "SplitCapacityCommitmentResponse", + "MergeCapacityCommitmentsRequest", + "Assignment", + "CreateAssignmentRequest", + "ListAssignmentsRequest", + "ListAssignmentsResponse", + "DeleteAssignmentRequest", + "SearchAssignmentsRequest", + "SearchAllAssignmentsRequest", + "SearchAssignmentsResponse", + "SearchAllAssignmentsResponse", + "MoveAssignmentRequest", + "UpdateAssignmentRequest", + "TableReference", + "BiReservation", + "GetBiReservationRequest", + "UpdateBiReservationRequest", + }, +) + + +class Edition(proto.Enum): + r"""The type of editions. + Different features and behaviors are provided to different + editions Capacity commitments and reservations are linked to + editions. + + Values: + EDITION_UNSPECIFIED (0): + Default value, which will be treated as + ENTERPRISE. + STANDARD (1): + Standard edition. + ENTERPRISE (2): + Enterprise edition. + ENTERPRISE_PLUS (3): + Enterprise plus edition. + """ + EDITION_UNSPECIFIED = 0 + STANDARD = 1 + ENTERPRISE = 2 + ENTERPRISE_PLUS = 3 + + +class Reservation(proto.Message): + r"""A reservation is a mechanism used to guarantee slots to + users. + + Attributes: + name (str): + The resource name of the reservation, e.g., + ``projects/*/locations/*/reservations/team1-prod``. The + reservation_id must only contain lower case alphanumeric + characters or dashes. It must start with a letter and must + not end with a dash. Its maximum length is 64 characters. + slot_capacity (int): + Minimum slots available to this reservation. A slot is a + unit of computational power in BigQuery, and serves as the + unit of parallelism. + + Queries using this reservation might use more slots during + runtime if ignore_idle_slots is set to false. + + If total slot_capacity of the reservation and its siblings + exceeds the total slot_count of all capacity commitments, + the request will fail with + ``google.rpc.Code.RESOURCE_EXHAUSTED``. + + NOTE: for reservations in US or EU multi-regions, slot + capacity constraints are checked separately for default and + auxiliary regions. See multi_region_auxiliary flag for more + details. + ignore_idle_slots (bool): + If false, any query or pipeline job using this reservation + will use idle slots from other reservations within the same + admin project. If true, a query or pipeline job using this + reservation will execute with the slot capacity specified in + the slot_capacity field at most. + autoscale (google.cloud.bigquery_reservation_v1.types.Reservation.Autoscale): + The configuration parameters for the auto + scaling feature. Note this is an alpha feature. + concurrency (int): + Job concurrency target which sets a soft upper bound on the + number of jobs that can run concurrently in this + reservation. This is a soft target due to asynchronous + nature of the system and various optimizations for small + queries. Default value is 0 which means that concurrency + target will be automatically computed by the system. NOTE: + this field is exposed as ``target_job_concurrency`` in the + Information Schema, DDL and BQ CLI. + creation_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation time of the + reservation. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update time of the + reservation. + multi_region_auxiliary (bool): + Applicable only for reservations located + within one of the BigQuery multi-regions (US or + EU). + + If set to true, this reservation is placed in + the organization's secondary region which is + designated for disaster recovery purposes. If + false, this reservation is placed in the + organization's default region. + + NOTE: this is a preview feature. Project must be + allow-listed in order to set this field. + edition (google.cloud.bigquery_reservation_v1.types.Edition): + Edition of the reservation. + """ + + class Autoscale(proto.Message): + r"""Auto scaling settings. + + Attributes: + current_slots (int): + Output only. The slot capacity added to this reservation + when autoscale happens. Will be between [0, max_slots]. + max_slots (int): + Number of slots to be scaled when needed. + """ + + current_slots: int = proto.Field( + proto.INT64, + number=1, + ) + max_slots: int = proto.Field( + proto.INT64, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + slot_capacity: int = proto.Field( + proto.INT64, + number=2, + ) + ignore_idle_slots: bool = proto.Field( + proto.BOOL, + number=4, + ) + autoscale: Autoscale = proto.Field( + proto.MESSAGE, + number=7, + message=Autoscale, + ) + concurrency: int = proto.Field( + proto.INT64, + number=16, + ) + creation_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + multi_region_auxiliary: bool = proto.Field( + proto.BOOL, + number=14, + ) + edition: "Edition" = proto.Field( + proto.ENUM, + number=17, + enum="Edition", + ) + + +class CapacityCommitment(proto.Message): + r"""Capacity commitment is a way to purchase compute capacity for + BigQuery jobs (in the form of slots) with some committed period + of usage. Annual commitments renew by default. Commitments can + be removed after their commitment end time passes. + + In order to remove annual commitment, its plan needs to be + changed to monthly or flex first. + + A capacity commitment resource exists as a child resource of the + admin project. + + Attributes: + name (str): + Output only. The resource name of the capacity commitment, + e.g., + ``projects/myproject/locations/US/capacityCommitments/123`` + The commitment_id must only contain lower case alphanumeric + characters or dashes. It must start with a letter and must + not end with a dash. Its maximum length is 64 characters. + slot_count (int): + Number of slots in this commitment. + plan (google.cloud.bigquery_reservation_v1.types.CapacityCommitment.CommitmentPlan): + Capacity commitment commitment plan. + state (google.cloud.bigquery_reservation_v1.types.CapacityCommitment.State): + Output only. State of the commitment. + commitment_start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The start of the current + commitment period. It is applicable only for + ACTIVE capacity commitments. + commitment_end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The end of the current + commitment period. It is applicable only for + ACTIVE capacity commitments. + failure_status (google.rpc.status_pb2.Status): + Output only. For FAILED commitment plan, + provides the reason of failure. + renewal_plan (google.cloud.bigquery_reservation_v1.types.CapacityCommitment.CommitmentPlan): + The plan this capacity commitment is converted to after + commitment_end_time passes. Once the plan is changed, + committed period is extended according to commitment plan. + Only applicable for ANNUAL and TRIAL commitments. + multi_region_auxiliary (bool): + Applicable only for commitments located + within one of the BigQuery multi-regions (US or + EU). + + If set to true, this commitment is placed in the + organization's secondary region which is + designated for disaster recovery purposes. If + false, this commitment is placed in the + organization's default region. + + NOTE: this is a preview feature. Project must be + allow-listed in order to set this field. + edition (google.cloud.bigquery_reservation_v1.types.Edition): + Edition of the capacity commitment. + """ + + class CommitmentPlan(proto.Enum): + r"""Commitment plan defines the current committed period. + Capacity commitment cannot be deleted during it's committed + period. + + Values: + COMMITMENT_PLAN_UNSPECIFIED (0): + Invalid plan value. Requests with this value will be + rejected with error code + ``google.rpc.Code.INVALID_ARGUMENT``. + FLEX (3): + Flex commitments have committed period of 1 + minute after becoming ACTIVE. After that, they + are not in a committed period anymore and can be + removed any time. + FLEX_FLAT_RATE (7): + Same as FLEX, should only be used if + flat-rate commitments are still available. + TRIAL (5): + Trial commitments have a committed period of 182 days after + becoming ACTIVE. After that, they are converted to a new + commitment based on the ``renewal_plan``. Default + ``renewal_plan`` for Trial commitment is Flex so that it can + be deleted right after committed period ends. + MONTHLY (2): + Monthly commitments have a committed period + of 30 days after becoming ACTIVE. After that, + they are not in a committed period anymore and + can be removed any time. + MONTHLY_FLAT_RATE (8): + Same as MONTHLY, should only be used if + flat-rate commitments are still available. + ANNUAL (4): + Annual commitments have a committed period of 365 days after + becoming ACTIVE. After that they are converted to a new + commitment based on the renewal_plan. + ANNUAL_FLAT_RATE (9): + Same as ANNUAL, should only be used if + flat-rate commitments are still available. + THREE_YEAR (10): + 3-year commitments have a committed period of 1095(3 \* 365) + days after becoming ACTIVE. After that they are converted to + a new commitment based on the renewal_plan. + NONE (6): + Should only be used for ``renewal_plan`` and is only + meaningful if edition is specified to values other than + EDITION_UNSPECIFIED. Otherwise + CreateCapacityCommitmentRequest or + UpdateCapacityCommitmentRequest will be rejected with error + code ``google.rpc.Code.INVALID_ARGUMENT``. If the + renewal_plan is NONE, capacity commitment will be removed at + the end of its commitment period. + """ + COMMITMENT_PLAN_UNSPECIFIED = 0 + FLEX = 3 + FLEX_FLAT_RATE = 7 + TRIAL = 5 + MONTHLY = 2 + MONTHLY_FLAT_RATE = 8 + ANNUAL = 4 + ANNUAL_FLAT_RATE = 9 + THREE_YEAR = 10 + NONE = 6 + + class State(proto.Enum): + r"""Capacity commitment can either become ACTIVE right away or + transition from PENDING to ACTIVE or FAILED. + + Values: + STATE_UNSPECIFIED (0): + Invalid state value. + PENDING (1): + Capacity commitment is pending provisioning. Pending + capacity commitment does not contribute to the project's + slot_capacity. + ACTIVE (2): + Once slots are provisioned, capacity commitment becomes + active. slot_count is added to the project's slot_capacity. + FAILED (3): + Capacity commitment is failed to be activated + by the backend. + """ + STATE_UNSPECIFIED = 0 + PENDING = 1 + ACTIVE = 2 + FAILED = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + slot_count: int = proto.Field( + proto.INT64, + number=2, + ) + plan: CommitmentPlan = proto.Field( + proto.ENUM, + number=3, + enum=CommitmentPlan, + ) + state: State = proto.Field( + proto.ENUM, + number=4, + enum=State, + ) + commitment_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + commitment_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + failure_status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=7, + message=status_pb2.Status, + ) + renewal_plan: CommitmentPlan = proto.Field( + proto.ENUM, + number=8, + enum=CommitmentPlan, + ) + multi_region_auxiliary: bool = proto.Field( + proto.BOOL, + number=10, + ) + edition: "Edition" = proto.Field( + proto.ENUM, + number=12, + enum="Edition", + ) + + +class CreateReservationRequest(proto.Message): + r"""The request for + [ReservationService.CreateReservation][google.cloud.bigquery.reservation.v1.ReservationService.CreateReservation]. + + Attributes: + parent (str): + Required. Project, location. E.g., + ``projects/myproject/locations/US`` + reservation_id (str): + The reservation ID. It must only contain + lower case alphanumeric characters or dashes. It + must start with a letter and must not end with a + dash. Its maximum length is 64 characters. + reservation (google.cloud.bigquery_reservation_v1.types.Reservation): + Definition of the new reservation to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + reservation_id: str = proto.Field( + proto.STRING, + number=2, + ) + reservation: "Reservation" = proto.Field( + proto.MESSAGE, + number=3, + message="Reservation", + ) + + +class ListReservationsRequest(proto.Message): + r"""The request for + [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. + + Attributes: + parent (str): + Required. The parent resource name containing project and + location, e.g.: ``projects/myproject/locations/US`` + page_size (int): + The maximum number of items to return per + page. + page_token (str): + The next_page_token value returned from a previous List + request, if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListReservationsResponse(proto.Message): + r"""The response for + [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. + + Attributes: + reservations (MutableSequence[google.cloud.bigquery_reservation_v1.types.Reservation]): + List of reservations visible to the user. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + reservations: MutableSequence["Reservation"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Reservation", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetReservationRequest(proto.Message): + r"""The request for + [ReservationService.GetReservation][google.cloud.bigquery.reservation.v1.ReservationService.GetReservation]. + + Attributes: + name (str): + Required. Resource name of the reservation to retrieve. + E.g., + ``projects/myproject/locations/US/reservations/team1-prod`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteReservationRequest(proto.Message): + r"""The request for + [ReservationService.DeleteReservation][google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservation]. + + Attributes: + name (str): + Required. Resource name of the reservation to retrieve. + E.g., + ``projects/myproject/locations/US/reservations/team1-prod`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateReservationRequest(proto.Message): + r"""The request for + [ReservationService.UpdateReservation][google.cloud.bigquery.reservation.v1.ReservationService.UpdateReservation]. + + Attributes: + reservation (google.cloud.bigquery_reservation_v1.types.Reservation): + Content of the reservation to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Standard field mask for the set of fields to + be updated. + """ + + reservation: "Reservation" = proto.Field( + proto.MESSAGE, + number=1, + message="Reservation", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class CreateCapacityCommitmentRequest(proto.Message): + r"""The request for + [ReservationService.CreateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.CreateCapacityCommitment]. + + Attributes: + parent (str): + Required. Resource name of the parent reservation. E.g., + ``projects/myproject/locations/US`` + capacity_commitment (google.cloud.bigquery_reservation_v1.types.CapacityCommitment): + Content of the capacity commitment to create. + enforce_single_admin_project_per_org (bool): + If true, fail the request if another project + in the organization has a capacity commitment. + capacity_commitment_id (str): + The optional capacity commitment ID. Capacity + commitment name will be generated automatically + if this field is empty. This field must only + contain lower case alphanumeric characters or + dashes. The first and last character cannot be a + dash. Max length is 64 characters. NOTE: this ID + won't be kept if the capacity commitment is + split or merged. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + capacity_commitment: "CapacityCommitment" = proto.Field( + proto.MESSAGE, + number=2, + message="CapacityCommitment", + ) + enforce_single_admin_project_per_org: bool = proto.Field( + proto.BOOL, + number=4, + ) + capacity_commitment_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListCapacityCommitmentsRequest(proto.Message): + r"""The request for + [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. + + Attributes: + parent (str): + Required. Resource name of the parent reservation. E.g., + ``projects/myproject/locations/US`` + page_size (int): + The maximum number of items to return. + page_token (str): + The next_page_token value returned from a previous List + request, if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListCapacityCommitmentsResponse(proto.Message): + r"""The response for + [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. + + Attributes: + capacity_commitments (MutableSequence[google.cloud.bigquery_reservation_v1.types.CapacityCommitment]): + List of capacity commitments visible to the + user. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + capacity_commitments: MutableSequence["CapacityCommitment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="CapacityCommitment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetCapacityCommitmentRequest(proto.Message): + r"""The request for + [ReservationService.GetCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.GetCapacityCommitment]. + + Attributes: + name (str): + Required. Resource name of the capacity commitment to + retrieve. E.g., + ``projects/myproject/locations/US/capacityCommitments/123`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteCapacityCommitmentRequest(proto.Message): + r"""The request for + [ReservationService.DeleteCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteCapacityCommitment]. + + Attributes: + name (str): + Required. Resource name of the capacity commitment to + delete. E.g., + ``projects/myproject/locations/US/capacityCommitments/123`` + force (bool): + Can be used to force delete commitments even + if assignments exist. Deleting commitments with + assignments may cause queries to fail if they no + longer have access to slots. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class UpdateCapacityCommitmentRequest(proto.Message): + r"""The request for + [ReservationService.UpdateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateCapacityCommitment]. + + Attributes: + capacity_commitment (google.cloud.bigquery_reservation_v1.types.CapacityCommitment): + Content of the capacity commitment to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Standard field mask for the set of fields to + be updated. + """ + + capacity_commitment: "CapacityCommitment" = proto.Field( + proto.MESSAGE, + number=1, + message="CapacityCommitment", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class SplitCapacityCommitmentRequest(proto.Message): + r"""The request for + [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. + + Attributes: + name (str): + Required. The resource name e.g.,: + ``projects/myproject/locations/US/capacityCommitments/123`` + slot_count (int): + Number of slots in the capacity commitment + after the split. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + slot_count: int = proto.Field( + proto.INT64, + number=2, + ) + + +class SplitCapacityCommitmentResponse(proto.Message): + r"""The response for + [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. + + Attributes: + first (google.cloud.bigquery_reservation_v1.types.CapacityCommitment): + First capacity commitment, result of a split. + second (google.cloud.bigquery_reservation_v1.types.CapacityCommitment): + Second capacity commitment, result of a + split. + """ + + first: "CapacityCommitment" = proto.Field( + proto.MESSAGE, + number=1, + message="CapacityCommitment", + ) + second: "CapacityCommitment" = proto.Field( + proto.MESSAGE, + number=2, + message="CapacityCommitment", + ) + + +class MergeCapacityCommitmentsRequest(proto.Message): + r"""The request for + [ReservationService.MergeCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.MergeCapacityCommitments]. + + Attributes: + parent (str): + Parent resource that identifies admin project and location + e.g., ``projects/myproject/locations/us`` + capacity_commitment_ids (MutableSequence[str]): + Ids of capacity commitments to merge. + These capacity commitments must exist under + admin project and location specified in the + parent. + ID is the last portion of capacity commitment + name e.g., 'abc' for + projects/myproject/locations/US/capacityCommitments/abc + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + capacity_commitment_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class Assignment(proto.Message): + r"""An assignment allows a project to submit jobs + of a certain type using slots from the specified reservation. + + Attributes: + name (str): + Output only. Name of the resource. E.g.: + ``projects/myproject/locations/US/reservations/team1-prod/assignments/123``. + The assignment_id must only contain lower case alphanumeric + characters or dashes and the max length is 64 characters. + assignee (str): + The resource which will use the reservation. E.g. + ``projects/myproject``, ``folders/123``, or + ``organizations/456``. + job_type (google.cloud.bigquery_reservation_v1.types.Assignment.JobType): + Which type of jobs will use the reservation. + state (google.cloud.bigquery_reservation_v1.types.Assignment.State): + Output only. State of the assignment. + """ + + class JobType(proto.Enum): + r"""Types of job, which could be specified when using the + reservation. + + Values: + JOB_TYPE_UNSPECIFIED (0): + Invalid type. Requests with this value will be rejected with + error code ``google.rpc.Code.INVALID_ARGUMENT``. + PIPELINE (1): + Pipeline (load/export) jobs from the project + will use the reservation. + QUERY (2): + Query jobs from the project will use the + reservation. + ML_EXTERNAL (3): + BigQuery ML jobs that use services external + to BigQuery for model training. These jobs will + not utilize idle slots from other reservations. + BACKGROUND (4): + Background jobs that BigQuery runs for the + customers in the background. + """ + JOB_TYPE_UNSPECIFIED = 0 + PIPELINE = 1 + QUERY = 2 + ML_EXTERNAL = 3 + BACKGROUND = 4 + + class State(proto.Enum): + r"""Assignment will remain in PENDING state if no active capacity + commitment is present. It will become ACTIVE when some capacity + commitment becomes active. + + Values: + STATE_UNSPECIFIED (0): + Invalid state value. + PENDING (1): + Queries from assignee will be executed as + on-demand, if related assignment is pending. + ACTIVE (2): + Assignment is ready. + """ + STATE_UNSPECIFIED = 0 + PENDING = 1 + ACTIVE = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + assignee: str = proto.Field( + proto.STRING, + number=4, + ) + job_type: JobType = proto.Field( + proto.ENUM, + number=3, + enum=JobType, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + + +class CreateAssignmentRequest(proto.Message): + r"""The request for + [ReservationService.CreateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.CreateAssignment]. + Note: "bigquery.reservationAssignments.create" permission is + required on the related assignee. + + Attributes: + parent (str): + Required. The parent resource name of the assignment E.g. + ``projects/myproject/locations/US/reservations/team1-prod`` + assignment (google.cloud.bigquery_reservation_v1.types.Assignment): + Assignment resource to create. + assignment_id (str): + The optional assignment ID. Assignment name + will be generated automatically if this field is + empty. This field must only contain lower case + alphanumeric characters or dashes. Max length is + 64 characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + assignment: "Assignment" = proto.Field( + proto.MESSAGE, + number=2, + message="Assignment", + ) + assignment_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListAssignmentsRequest(proto.Message): + r"""The request for + [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. + + Attributes: + parent (str): + Required. The parent resource name e.g.: + + ``projects/myproject/locations/US/reservations/team1-prod`` + + Or: + + ``projects/myproject/locations/US/reservations/-`` + page_size (int): + The maximum number of items to return per + page. + page_token (str): + The next_page_token value returned from a previous List + request, if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAssignmentsResponse(proto.Message): + r"""The response for + [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. + + Attributes: + assignments (MutableSequence[google.cloud.bigquery_reservation_v1.types.Assignment]): + List of assignments visible to the user. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + assignments: MutableSequence["Assignment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Assignment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteAssignmentRequest(proto.Message): + r"""The request for + [ReservationService.DeleteAssignment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteAssignment]. + Note: "bigquery.reservationAssignments.delete" permission is + required on the related assignee. + + Attributes: + name (str): + Required. Name of the resource, e.g. + ``projects/myproject/locations/US/reservations/team1-prod/assignments/123`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SearchAssignmentsRequest(proto.Message): + r"""The request for + [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. + Note: "bigquery.reservationAssignments.search" permission is + required on the related assignee. + + Attributes: + parent (str): + Required. The resource name of the admin project(containing + project and location), e.g.: + ``projects/myproject/locations/US``. + query (str): + Please specify resource name as assignee in the query. + + Examples: + + - ``assignee=projects/myproject`` + - ``assignee=folders/123`` + - ``assignee=organizations/456`` + page_size (int): + The maximum number of items to return per + page. + page_token (str): + The next_page_token value returned from a previous List + request, if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + query: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class SearchAllAssignmentsRequest(proto.Message): + r"""The request for + [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. + Note: "bigquery.reservationAssignments.search" permission is + required on the related assignee. + + Attributes: + parent (str): + Required. The resource name with location (project name + could be the wildcard '-'), e.g.: + ``projects/-/locations/US``. + query (str): + Please specify resource name as assignee in the query. + + Examples: + + - ``assignee=projects/myproject`` + - ``assignee=folders/123`` + - ``assignee=organizations/456`` + page_size (int): + The maximum number of items to return per + page. + page_token (str): + The next_page_token value returned from a previous List + request, if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + query: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class SearchAssignmentsResponse(proto.Message): + r"""The response for + [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. + + Attributes: + assignments (MutableSequence[google.cloud.bigquery_reservation_v1.types.Assignment]): + List of assignments visible to the user. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + assignments: MutableSequence["Assignment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Assignment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SearchAllAssignmentsResponse(proto.Message): + r"""The response for + [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. + + Attributes: + assignments (MutableSequence[google.cloud.bigquery_reservation_v1.types.Assignment]): + List of assignments visible to the user. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + assignments: MutableSequence["Assignment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Assignment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MoveAssignmentRequest(proto.Message): + r"""The request for + [ReservationService.MoveAssignment][google.cloud.bigquery.reservation.v1.ReservationService.MoveAssignment]. + + **Note**: "bigquery.reservationAssignments.create" permission is + required on the destination_id. + + **Note**: "bigquery.reservationAssignments.create" and + "bigquery.reservationAssignments.delete" permission are required on + the related assignee. + + Attributes: + name (str): + Required. The resource name of the assignment, e.g. + ``projects/myproject/locations/US/reservations/team1-prod/assignments/123`` + destination_id (str): + The new reservation ID, e.g.: + ``projects/myotherproject/locations/US/reservations/team2-prod`` + assignment_id (str): + The optional assignment ID. A new assignment + name is generated if this field is empty. + + This field can contain only lowercase + alphanumeric characters or dashes. Max length is + 64 characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + destination_id: str = proto.Field( + proto.STRING, + number=3, + ) + assignment_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class UpdateAssignmentRequest(proto.Message): + r"""The request for + [ReservationService.UpdateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateAssignment]. + + Attributes: + assignment (google.cloud.bigquery_reservation_v1.types.Assignment): + Content of the assignment to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Standard field mask for the set of fields to + be updated. + """ + + assignment: "Assignment" = proto.Field( + proto.MESSAGE, + number=1, + message="Assignment", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class TableReference(proto.Message): + r"""Fully qualified reference to BigQuery table. + Internally stored as google.cloud.bi.v1.BqTableReference. + + Attributes: + project_id (str): + The assigned project ID of the project. + dataset_id (str): + The ID of the dataset in the above project. + table_id (str): + The ID of the table in the above dataset. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + dataset_id: str = proto.Field( + proto.STRING, + number=2, + ) + table_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class BiReservation(proto.Message): + r"""Represents a BI Reservation. + + Attributes: + name (str): + The resource name of the singleton BI reservation. + Reservation names have the form + ``projects/{project_id}/locations/{location_id}/biReservation``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of a + reservation. + size (int): + Size of a reservation, in bytes. + preferred_tables (MutableSequence[google.cloud.bigquery_reservation_v1.types.TableReference]): + Preferred tables to use BI capacity for. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + size: int = proto.Field( + proto.INT64, + number=4, + ) + preferred_tables: MutableSequence["TableReference"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="TableReference", + ) + + +class GetBiReservationRequest(proto.Message): + r"""A request to get a singleton BI reservation. + + Attributes: + name (str): + Required. Name of the requested reservation, for example: + ``projects/{project_id}/locations/{location_id}/biReservation`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateBiReservationRequest(proto.Message): + r"""A request to update a BI reservation. + + Attributes: + bi_reservation (google.cloud.bigquery_reservation_v1.types.BiReservation): + A reservation to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A list of fields to be updated in this + request. + """ + + bi_reservation: "BiReservation" = proto.Field( + proto.MESSAGE, + number=1, + message="BiReservation", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-reservation/mypy.ini b/packages/google-cloud-bigquery-reservation/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-bigquery-reservation/noxfile.py b/packages/google-cloud-bigquery-reservation/noxfile.py new file mode 100644 index 000000000000..be54712bfa8f --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/noxfile.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-bigquery-reservation/renovate.json b/packages/google-cloud-bigquery-reservation/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-bigquery-reservation/samples/AUTHORING_GUIDE.md b/packages/google-cloud-bigquery-reservation/samples/AUTHORING_GUIDE.md new file mode 100644 index 000000000000..55c97b32f4c1 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/google-cloud-bigquery-reservation/samples/CONTRIBUTING.md b/packages/google-cloud-bigquery-reservation/samples/CONTRIBUTING.md new file mode 100644 index 000000000000..34c882b6f1a3 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/packages/google-cloud-bigquery-reservation/scripts/decrypt-secrets.sh b/packages/google-cloud-bigquery-reservation/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-bigquery-reservation/scripts/fixup_bigquery_reservation_v1_keywords.py b/packages/google-cloud-bigquery-reservation/scripts/fixup_bigquery_reservation_v1_keywords.py new file mode 100644 index 000000000000..cfd2a0848ee1 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/scripts/fixup_bigquery_reservation_v1_keywords.py @@ -0,0 +1,196 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class bigquery_reservationCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_assignment': ('parent', 'assignment', 'assignment_id', ), + 'create_capacity_commitment': ('parent', 'capacity_commitment', 'enforce_single_admin_project_per_org', 'capacity_commitment_id', ), + 'create_reservation': ('parent', 'reservation_id', 'reservation', ), + 'delete_assignment': ('name', ), + 'delete_capacity_commitment': ('name', 'force', ), + 'delete_reservation': ('name', ), + 'get_bi_reservation': ('name', ), + 'get_capacity_commitment': ('name', ), + 'get_reservation': ('name', ), + 'list_assignments': ('parent', 'page_size', 'page_token', ), + 'list_capacity_commitments': ('parent', 'page_size', 'page_token', ), + 'list_reservations': ('parent', 'page_size', 'page_token', ), + 'merge_capacity_commitments': ('parent', 'capacity_commitment_ids', ), + 'move_assignment': ('name', 'destination_id', 'assignment_id', ), + 'search_all_assignments': ('parent', 'query', 'page_size', 'page_token', ), + 'search_assignments': ('parent', 'query', 'page_size', 'page_token', ), + 'split_capacity_commitment': ('name', 'slot_count', ), + 'update_assignment': ('assignment', 'update_mask', ), + 'update_bi_reservation': ('bi_reservation', 'update_mask', ), + 'update_capacity_commitment': ('capacity_commitment', 'update_mask', ), + 'update_reservation': ('reservation', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=bigquery_reservationCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the bigquery_reservation client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-bigquery-reservation/scripts/fixup_keywords.py b/packages/google-cloud-bigquery-reservation/scripts/fixup_keywords.py new file mode 100644 index 000000000000..c56190ddd06c --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/scripts/fixup_keywords.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class reservationCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_assignment': ('parent', 'assignment', ), + 'create_capacity_commitment': ('parent', 'capacity_commitment', 'enforce_single_admin_project_per_org', ), + 'create_reservation': ('parent', 'reservation_id', 'reservation', ), + 'delete_assignment': ('name', ), + 'delete_capacity_commitment': ('name', ), + 'delete_reservation': ('name', ), + 'get_bi_reservation': ('name', ), + 'get_capacity_commitment': ('name', ), + 'get_reservation': ('name', ), + 'list_assignments': ('parent', 'page_size', 'page_token', ), + 'list_capacity_commitments': ('parent', 'page_size', 'page_token', ), + 'list_reservations': ('parent', 'page_size', 'page_token', ), + 'merge_capacity_commitments': ('parent', 'capacity_commitment_ids', ), + 'move_assignment': ('name', 'destination_id', ), + 'search_assignments': ('parent', 'query', 'page_size', 'page_token', ), + 'split_capacity_commitment': ('name', 'slot_count', ), + 'update_bi_reservation': ('bi_reservation', 'update_mask', ), + 'update_capacity_commitment': ('capacity_commitment', 'update_mask', ), + 'update_reservation': ('reservation', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=reservationCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the reservation client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-bigquery-reservation/scripts/readme-gen/readme_gen.py b/packages/google-cloud-bigquery-reservation/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..1acc119835b5 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-bigquery-reservation/setup.cfg b/packages/google-cloud-bigquery-reservation/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-bigquery-reservation/setup.py b/packages/google-cloud-bigquery-reservation/setup.py new file mode 100644 index 000000000000..c95baad8e0b8 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-bigquery-reservation" + + +description = "Google Cloud Bigquery Reservation API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/bigquery_reservation/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-bigquery-reservation/testing/.gitignore b/packages/google-cloud-bigquery-reservation/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-bigquery-reservation/testing/constraints-3.10.txt b/packages/google-cloud-bigquery-reservation/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-reservation/testing/constraints-3.11.txt b/packages/google-cloud-bigquery-reservation/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-reservation/testing/constraints-3.12.txt b/packages/google-cloud-bigquery-reservation/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-reservation/testing/constraints-3.7.txt b/packages/google-cloud-bigquery-reservation/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-bigquery-reservation/testing/constraints-3.8.txt b/packages/google-cloud-bigquery-reservation/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-reservation/testing/constraints-3.9.txt b/packages/google-cloud-bigquery-reservation/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-bigquery-reservation/tests/__init__.py b/packages/google-cloud-bigquery-reservation/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-reservation/tests/system/__init__.py b/packages/google-cloud-bigquery-reservation/tests/system/__init__.py new file mode 100644 index 000000000000..6cfb48f4cdf8 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/tests/system/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-bigquery-reservation/tests/system/smoke_test.py b/packages/google-cloud-bigquery-reservation/tests/system/smoke_test.py new file mode 100644 index 000000000000..b655fef5b869 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/tests/system/smoke_test.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + +from google.cloud import bigquery_reservation_v1 + + +@pytest.fixture(scope="session") +def project_id(): + return os.environ["PROJECT_ID"] + + +@pytest.mark.parametrize("transport", ["grpc", "rest"]) +def test_list_reservations(project_id: str, transport: str): + client = bigquery_reservation_v1.ReservationServiceClient(transport=transport) + + parent = client.common_location_path(project_id, location="us-central1") + client.list_reservations(parent=parent) + + # The purpose of this smoke test is to test the communication with the API server, + # rather than API-specific functionality. + # If the smoke test fails, we won't reach this line. + assert True diff --git a/packages/google-cloud-bigquery-reservation/tests/unit/__init__.py b/packages/google-cloud-bigquery-reservation/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/__init__.py b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/__init__.py b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py new file mode 100644 index 000000000000..aa83f09ff2c0 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py @@ -0,0 +1,14016 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.bigquery_reservation_v1.services.reservation_service import ( + ReservationServiceAsyncClient, + ReservationServiceClient, + pagers, + transports, +) +from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation +from google.cloud.bigquery_reservation_v1.types import reservation + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ReservationServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ReservationServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ReservationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ReservationServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ReservationServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ReservationServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ReservationServiceClient, "grpc"), + (ReservationServiceAsyncClient, "grpc_asyncio"), + (ReservationServiceClient, "rest"), + ], +) +def test_reservation_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "bigqueryreservation.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigqueryreservation.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ReservationServiceGrpcTransport, "grpc"), + (transports.ReservationServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ReservationServiceRestTransport, "rest"), + ], +) +def test_reservation_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ReservationServiceClient, "grpc"), + (ReservationServiceAsyncClient, "grpc_asyncio"), + (ReservationServiceClient, "rest"), + ], +) +def test_reservation_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "bigqueryreservation.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigqueryreservation.googleapis.com" + ) + + +def test_reservation_service_client_get_transport_class(): + transport = ReservationServiceClient.get_transport_class() + available_transports = [ + transports.ReservationServiceGrpcTransport, + transports.ReservationServiceRestTransport, + ] + assert transport in available_transports + + transport = ReservationServiceClient.get_transport_class("grpc") + assert transport == transports.ReservationServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc"), + ( + ReservationServiceAsyncClient, + transports.ReservationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ReservationServiceClient, transports.ReservationServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + ReservationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ReservationServiceClient), +) +@mock.patch.object( + ReservationServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ReservationServiceAsyncClient), +) +def test_reservation_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ReservationServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ReservationServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ReservationServiceClient, + transports.ReservationServiceGrpcTransport, + "grpc", + "true", + ), + ( + ReservationServiceAsyncClient, + transports.ReservationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ReservationServiceClient, + transports.ReservationServiceGrpcTransport, + "grpc", + "false", + ), + ( + ReservationServiceAsyncClient, + transports.ReservationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + ReservationServiceClient, + transports.ReservationServiceRestTransport, + "rest", + "true", + ), + ( + ReservationServiceClient, + transports.ReservationServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + ReservationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ReservationServiceClient), +) +@mock.patch.object( + ReservationServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ReservationServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_reservation_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ReservationServiceClient, ReservationServiceAsyncClient] +) +@mock.patch.object( + ReservationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ReservationServiceClient), +) +@mock.patch.object( + ReservationServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ReservationServiceAsyncClient), +) +def test_reservation_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc"), + ( + ReservationServiceAsyncClient, + transports.ReservationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ReservationServiceClient, transports.ReservationServiceRestTransport, "rest"), + ], +) +def test_reservation_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ReservationServiceClient, + transports.ReservationServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ReservationServiceAsyncClient, + transports.ReservationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + ReservationServiceClient, + transports.ReservationServiceRestTransport, + "rest", + None, + ), + ], +) +def test_reservation_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_reservation_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ReservationServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ReservationServiceClient, + transports.ReservationServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ReservationServiceAsyncClient, + transports.ReservationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_reservation_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "bigqueryreservation.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + scopes=None, + default_host="bigqueryreservation.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcbr_reservation.CreateReservationRequest, + dict, + ], +) +def test_create_reservation(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcbr_reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=gcbr_reservation.Edition.STANDARD, + ) + response = client.create_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcbr_reservation.CreateReservationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcbr_reservation.Reservation) + assert response.name == "name_value" + assert response.slot_capacity == 1391 + assert response.ignore_idle_slots is True + assert response.concurrency == 1195 + assert response.multi_region_auxiliary is True + assert response.edition == gcbr_reservation.Edition.STANDARD + + +def test_create_reservation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation), "__call__" + ) as call: + client.create_reservation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcbr_reservation.CreateReservationRequest() + + +@pytest.mark.asyncio +async def test_create_reservation_async( + transport: str = "grpc_asyncio", + request_type=gcbr_reservation.CreateReservationRequest, +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcbr_reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=gcbr_reservation.Edition.STANDARD, + ) + ) + response = await client.create_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcbr_reservation.CreateReservationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcbr_reservation.Reservation) + assert response.name == "name_value" + assert response.slot_capacity == 1391 + assert response.ignore_idle_slots is True + assert response.concurrency == 1195 + assert response.multi_region_auxiliary is True + assert response.edition == gcbr_reservation.Edition.STANDARD + + +@pytest.mark.asyncio +async def test_create_reservation_async_from_dict(): + await test_create_reservation_async(request_type=dict) + + +def test_create_reservation_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcbr_reservation.CreateReservationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation), "__call__" + ) as call: + call.return_value = gcbr_reservation.Reservation() + client.create_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_reservation_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcbr_reservation.CreateReservationRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcbr_reservation.Reservation() + ) + await client.create_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_reservation_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcbr_reservation.Reservation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_reservation( + parent="parent_value", + reservation=gcbr_reservation.Reservation(name="name_value"), + reservation_id="reservation_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].reservation + mock_val = gcbr_reservation.Reservation(name="name_value") + assert arg == mock_val + arg = args[0].reservation_id + mock_val = "reservation_id_value" + assert arg == mock_val + + +def test_create_reservation_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_reservation( + gcbr_reservation.CreateReservationRequest(), + parent="parent_value", + reservation=gcbr_reservation.Reservation(name="name_value"), + reservation_id="reservation_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_reservation_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcbr_reservation.Reservation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcbr_reservation.Reservation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_reservation( + parent="parent_value", + reservation=gcbr_reservation.Reservation(name="name_value"), + reservation_id="reservation_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].reservation + mock_val = gcbr_reservation.Reservation(name="name_value") + assert arg == mock_val + arg = args[0].reservation_id + mock_val = "reservation_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_reservation_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_reservation( + gcbr_reservation.CreateReservationRequest(), + parent="parent_value", + reservation=gcbr_reservation.Reservation(name="name_value"), + reservation_id="reservation_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.ListReservationsRequest, + dict, + ], +) +def test_list_reservations(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ListReservationsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_reservations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.ListReservationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReservationsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_reservations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservations), "__call__" + ) as call: + client.list_reservations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.ListReservationsRequest() + + +@pytest.mark.asyncio +async def test_list_reservations_async( + transport: str = "grpc_asyncio", request_type=reservation.ListReservationsRequest +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListReservationsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_reservations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.ListReservationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReservationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_reservations_async_from_dict(): + await test_list_reservations_async(request_type=dict) + + +def test_list_reservations_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.ListReservationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservations), "__call__" + ) as call: + call.return_value = reservation.ListReservationsResponse() + client.list_reservations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_reservations_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.ListReservationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListReservationsResponse() + ) + await client.list_reservations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_reservations_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ListReservationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_reservations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_reservations_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_reservations( + reservation.ListReservationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_reservations_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ListReservationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListReservationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_reservations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_reservations_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_reservations( + reservation.ListReservationsRequest(), + parent="parent_value", + ) + + +def test_list_reservations_pager(transport_name: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + reservation.Reservation(), + reservation.Reservation(), + ], + next_page_token="abc", + ), + reservation.ListReservationsResponse( + reservations=[], + next_page_token="def", + ), + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + ], + next_page_token="ghi", + ), + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + reservation.Reservation(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_reservations(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.Reservation) for i in results) + + +def test_list_reservations_pages(transport_name: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + reservation.Reservation(), + reservation.Reservation(), + ], + next_page_token="abc", + ), + reservation.ListReservationsResponse( + reservations=[], + next_page_token="def", + ), + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + ], + next_page_token="ghi", + ), + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + reservation.Reservation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_reservations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_reservations_async_pager(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + reservation.Reservation(), + reservation.Reservation(), + ], + next_page_token="abc", + ), + reservation.ListReservationsResponse( + reservations=[], + next_page_token="def", + ), + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + ], + next_page_token="ghi", + ), + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + reservation.Reservation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_reservations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, reservation.Reservation) for i in responses) + + +@pytest.mark.asyncio +async def test_list_reservations_async_pages(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + reservation.Reservation(), + reservation.Reservation(), + ], + next_page_token="abc", + ), + reservation.ListReservationsResponse( + reservations=[], + next_page_token="def", + ), + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + ], + next_page_token="ghi", + ), + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + reservation.Reservation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_reservations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.GetReservationRequest, + dict, + ], +) +def test_get_reservation(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_reservation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + response = client.get_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.GetReservationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.Reservation) + assert response.name == "name_value" + assert response.slot_capacity == 1391 + assert response.ignore_idle_slots is True + assert response.concurrency == 1195 + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +def test_get_reservation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_reservation), "__call__") as call: + client.get_reservation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.GetReservationRequest() + + +@pytest.mark.asyncio +async def test_get_reservation_async( + transport: str = "grpc_asyncio", request_type=reservation.GetReservationRequest +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_reservation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + ) + response = await client.get_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.GetReservationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.Reservation) + assert response.name == "name_value" + assert response.slot_capacity == 1391 + assert response.ignore_idle_slots is True + assert response.concurrency == 1195 + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +@pytest.mark.asyncio +async def test_get_reservation_async_from_dict(): + await test_get_reservation_async(request_type=dict) + + +def test_get_reservation_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.GetReservationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_reservation), "__call__") as call: + call.return_value = reservation.Reservation() + client.get_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_reservation_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.GetReservationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_reservation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Reservation() + ) + await client.get_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_reservation_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_reservation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.Reservation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_reservation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_reservation_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_reservation( + reservation.GetReservationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_reservation_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_reservation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.Reservation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Reservation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_reservation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_reservation_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_reservation( + reservation.GetReservationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.DeleteReservationRequest, + dict, + ], +) +def test_delete_reservation(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.DeleteReservationRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_reservation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation), "__call__" + ) as call: + client.delete_reservation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.DeleteReservationRequest() + + +@pytest.mark.asyncio +async def test_delete_reservation_async( + transport: str = "grpc_asyncio", request_type=reservation.DeleteReservationRequest +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.DeleteReservationRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_reservation_async_from_dict(): + await test_delete_reservation_async(request_type=dict) + + +def test_delete_reservation_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.DeleteReservationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation), "__call__" + ) as call: + call.return_value = None + client.delete_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_reservation_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.DeleteReservationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_reservation_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_reservation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_reservation_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_reservation( + reservation.DeleteReservationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_reservation_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_reservation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_reservation_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_reservation( + reservation.DeleteReservationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcbr_reservation.UpdateReservationRequest, + dict, + ], +) +def test_update_reservation(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcbr_reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=gcbr_reservation.Edition.STANDARD, + ) + response = client.update_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcbr_reservation.UpdateReservationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcbr_reservation.Reservation) + assert response.name == "name_value" + assert response.slot_capacity == 1391 + assert response.ignore_idle_slots is True + assert response.concurrency == 1195 + assert response.multi_region_auxiliary is True + assert response.edition == gcbr_reservation.Edition.STANDARD + + +def test_update_reservation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_reservation), "__call__" + ) as call: + client.update_reservation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcbr_reservation.UpdateReservationRequest() + + +@pytest.mark.asyncio +async def test_update_reservation_async( + transport: str = "grpc_asyncio", + request_type=gcbr_reservation.UpdateReservationRequest, +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcbr_reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=gcbr_reservation.Edition.STANDARD, + ) + ) + response = await client.update_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcbr_reservation.UpdateReservationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcbr_reservation.Reservation) + assert response.name == "name_value" + assert response.slot_capacity == 1391 + assert response.ignore_idle_slots is True + assert response.concurrency == 1195 + assert response.multi_region_auxiliary is True + assert response.edition == gcbr_reservation.Edition.STANDARD + + +@pytest.mark.asyncio +async def test_update_reservation_async_from_dict(): + await test_update_reservation_async(request_type=dict) + + +def test_update_reservation_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcbr_reservation.UpdateReservationRequest() + + request.reservation.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_reservation), "__call__" + ) as call: + call.return_value = gcbr_reservation.Reservation() + client.update_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "reservation.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_reservation_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcbr_reservation.UpdateReservationRequest() + + request.reservation.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_reservation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcbr_reservation.Reservation() + ) + await client.update_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "reservation.name=name_value", + ) in kw["metadata"] + + +def test_update_reservation_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcbr_reservation.Reservation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_reservation( + reservation=gcbr_reservation.Reservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].reservation + mock_val = gcbr_reservation.Reservation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_reservation_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_reservation( + gcbr_reservation.UpdateReservationRequest(), + reservation=gcbr_reservation.Reservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_reservation_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcbr_reservation.Reservation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcbr_reservation.Reservation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_reservation( + reservation=gcbr_reservation.Reservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].reservation + mock_val = gcbr_reservation.Reservation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_reservation_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_reservation( + gcbr_reservation.UpdateReservationRequest(), + reservation=gcbr_reservation.Reservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.CreateCapacityCommitmentRequest, + dict, + ], +) +def test_create_capacity_commitment(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + response = client.create_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.CreateCapacityCommitmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.CapacityCommitment) + assert response.name == "name_value" + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +def test_create_capacity_commitment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_capacity_commitment), "__call__" + ) as call: + client.create_capacity_commitment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.CreateCapacityCommitmentRequest() + + +@pytest.mark.asyncio +async def test_create_capacity_commitment_async( + transport: str = "grpc_asyncio", + request_type=reservation.CreateCapacityCommitmentRequest, +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + ) + response = await client.create_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.CreateCapacityCommitmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.CapacityCommitment) + assert response.name == "name_value" + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +@pytest.mark.asyncio +async def test_create_capacity_commitment_async_from_dict(): + await test_create_capacity_commitment_async(request_type=dict) + + +def test_create_capacity_commitment_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.CreateCapacityCommitmentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_capacity_commitment), "__call__" + ) as call: + call.return_value = reservation.CapacityCommitment() + client.create_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_capacity_commitment_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.CreateCapacityCommitmentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_capacity_commitment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment() + ) + await client.create_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_capacity_commitment_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.CapacityCommitment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_capacity_commitment( + parent="parent_value", + capacity_commitment=reservation.CapacityCommitment(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].capacity_commitment + mock_val = reservation.CapacityCommitment(name="name_value") + assert arg == mock_val + + +def test_create_capacity_commitment_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_capacity_commitment( + reservation.CreateCapacityCommitmentRequest(), + parent="parent_value", + capacity_commitment=reservation.CapacityCommitment(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_capacity_commitment_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.CapacityCommitment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_capacity_commitment( + parent="parent_value", + capacity_commitment=reservation.CapacityCommitment(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].capacity_commitment + mock_val = reservation.CapacityCommitment(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_capacity_commitment_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_capacity_commitment( + reservation.CreateCapacityCommitmentRequest(), + parent="parent_value", + capacity_commitment=reservation.CapacityCommitment(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.ListCapacityCommitmentsRequest, + dict, + ], +) +def test_list_capacity_commitments(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_capacity_commitments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ListCapacityCommitmentsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_capacity_commitments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.ListCapacityCommitmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCapacityCommitmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_capacity_commitments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_capacity_commitments), "__call__" + ) as call: + client.list_capacity_commitments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.ListCapacityCommitmentsRequest() + + +@pytest.mark.asyncio +async def test_list_capacity_commitments_async( + transport: str = "grpc_asyncio", + request_type=reservation.ListCapacityCommitmentsRequest, +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_capacity_commitments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListCapacityCommitmentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_capacity_commitments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.ListCapacityCommitmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCapacityCommitmentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_capacity_commitments_async_from_dict(): + await test_list_capacity_commitments_async(request_type=dict) + + +def test_list_capacity_commitments_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.ListCapacityCommitmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_capacity_commitments), "__call__" + ) as call: + call.return_value = reservation.ListCapacityCommitmentsResponse() + client.list_capacity_commitments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_capacity_commitments_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.ListCapacityCommitmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_capacity_commitments), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListCapacityCommitmentsResponse() + ) + await client.list_capacity_commitments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_capacity_commitments_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_capacity_commitments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ListCapacityCommitmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_capacity_commitments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_capacity_commitments_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_capacity_commitments( + reservation.ListCapacityCommitmentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_capacity_commitments_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_capacity_commitments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ListCapacityCommitmentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListCapacityCommitmentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_capacity_commitments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_capacity_commitments_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_capacity_commitments( + reservation.ListCapacityCommitmentsRequest(), + parent="parent_value", + ) + + +def test_list_capacity_commitments_pager(transport_name: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_capacity_commitments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + ], + next_page_token="abc", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[], + next_page_token="def", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + ], + next_page_token="ghi", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_capacity_commitments(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.CapacityCommitment) for i in results) + + +def test_list_capacity_commitments_pages(transport_name: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_capacity_commitments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + ], + next_page_token="abc", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[], + next_page_token="def", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + ], + next_page_token="ghi", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_capacity_commitments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_capacity_commitments_async_pager(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_capacity_commitments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + ], + next_page_token="abc", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[], + next_page_token="def", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + ], + next_page_token="ghi", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_capacity_commitments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, reservation.CapacityCommitment) for i in responses) + + +@pytest.mark.asyncio +async def test_list_capacity_commitments_async_pages(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_capacity_commitments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + ], + next_page_token="abc", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[], + next_page_token="def", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + ], + next_page_token="ghi", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_capacity_commitments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.GetCapacityCommitmentRequest, + dict, + ], +) +def test_get_capacity_commitment(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + response = client.get_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.GetCapacityCommitmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.CapacityCommitment) + assert response.name == "name_value" + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +def test_get_capacity_commitment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_capacity_commitment), "__call__" + ) as call: + client.get_capacity_commitment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.GetCapacityCommitmentRequest() + + +@pytest.mark.asyncio +async def test_get_capacity_commitment_async( + transport: str = "grpc_asyncio", + request_type=reservation.GetCapacityCommitmentRequest, +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + ) + response = await client.get_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.GetCapacityCommitmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.CapacityCommitment) + assert response.name == "name_value" + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +@pytest.mark.asyncio +async def test_get_capacity_commitment_async_from_dict(): + await test_get_capacity_commitment_async(request_type=dict) + + +def test_get_capacity_commitment_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.GetCapacityCommitmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_capacity_commitment), "__call__" + ) as call: + call.return_value = reservation.CapacityCommitment() + client.get_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_capacity_commitment_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.GetCapacityCommitmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_capacity_commitment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment() + ) + await client.get_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_capacity_commitment_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.CapacityCommitment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_capacity_commitment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_capacity_commitment_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_capacity_commitment( + reservation.GetCapacityCommitmentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_capacity_commitment_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.CapacityCommitment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_capacity_commitment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_capacity_commitment_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_capacity_commitment( + reservation.GetCapacityCommitmentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.DeleteCapacityCommitmentRequest, + dict, + ], +) +def test_delete_capacity_commitment(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.DeleteCapacityCommitmentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_capacity_commitment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_capacity_commitment), "__call__" + ) as call: + client.delete_capacity_commitment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.DeleteCapacityCommitmentRequest() + + +@pytest.mark.asyncio +async def test_delete_capacity_commitment_async( + transport: str = "grpc_asyncio", + request_type=reservation.DeleteCapacityCommitmentRequest, +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.DeleteCapacityCommitmentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_capacity_commitment_async_from_dict(): + await test_delete_capacity_commitment_async(request_type=dict) + + +def test_delete_capacity_commitment_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.DeleteCapacityCommitmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_capacity_commitment), "__call__" + ) as call: + call.return_value = None + client.delete_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_capacity_commitment_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.DeleteCapacityCommitmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_capacity_commitment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_capacity_commitment_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_capacity_commitment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_capacity_commitment_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_capacity_commitment( + reservation.DeleteCapacityCommitmentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_capacity_commitment_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_capacity_commitment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_capacity_commitment_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_capacity_commitment( + reservation.DeleteCapacityCommitmentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.UpdateCapacityCommitmentRequest, + dict, + ], +) +def test_update_capacity_commitment(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + response = client.update_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.UpdateCapacityCommitmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.CapacityCommitment) + assert response.name == "name_value" + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +def test_update_capacity_commitment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_capacity_commitment), "__call__" + ) as call: + client.update_capacity_commitment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.UpdateCapacityCommitmentRequest() + + +@pytest.mark.asyncio +async def test_update_capacity_commitment_async( + transport: str = "grpc_asyncio", + request_type=reservation.UpdateCapacityCommitmentRequest, +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + ) + response = await client.update_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.UpdateCapacityCommitmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.CapacityCommitment) + assert response.name == "name_value" + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +@pytest.mark.asyncio +async def test_update_capacity_commitment_async_from_dict(): + await test_update_capacity_commitment_async(request_type=dict) + + +def test_update_capacity_commitment_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.UpdateCapacityCommitmentRequest() + + request.capacity_commitment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_capacity_commitment), "__call__" + ) as call: + call.return_value = reservation.CapacityCommitment() + client.update_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "capacity_commitment.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_capacity_commitment_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.UpdateCapacityCommitmentRequest() + + request.capacity_commitment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_capacity_commitment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment() + ) + await client.update_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "capacity_commitment.name=name_value", + ) in kw["metadata"] + + +def test_update_capacity_commitment_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.CapacityCommitment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_capacity_commitment( + capacity_commitment=reservation.CapacityCommitment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].capacity_commitment + mock_val = reservation.CapacityCommitment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_capacity_commitment_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_capacity_commitment( + reservation.UpdateCapacityCommitmentRequest(), + capacity_commitment=reservation.CapacityCommitment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_capacity_commitment_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.CapacityCommitment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_capacity_commitment( + capacity_commitment=reservation.CapacityCommitment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].capacity_commitment + mock_val = reservation.CapacityCommitment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_capacity_commitment_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_capacity_commitment( + reservation.UpdateCapacityCommitmentRequest(), + capacity_commitment=reservation.CapacityCommitment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.SplitCapacityCommitmentRequest, + dict, + ], +) +def test_split_capacity_commitment(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.split_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.SplitCapacityCommitmentResponse() + response = client.split_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.SplitCapacityCommitmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.SplitCapacityCommitmentResponse) + + +def test_split_capacity_commitment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.split_capacity_commitment), "__call__" + ) as call: + client.split_capacity_commitment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.SplitCapacityCommitmentRequest() + + +@pytest.mark.asyncio +async def test_split_capacity_commitment_async( + transport: str = "grpc_asyncio", + request_type=reservation.SplitCapacityCommitmentRequest, +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.split_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.SplitCapacityCommitmentResponse() + ) + response = await client.split_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.SplitCapacityCommitmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.SplitCapacityCommitmentResponse) + + +@pytest.mark.asyncio +async def test_split_capacity_commitment_async_from_dict(): + await test_split_capacity_commitment_async(request_type=dict) + + +def test_split_capacity_commitment_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.SplitCapacityCommitmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.split_capacity_commitment), "__call__" + ) as call: + call.return_value = reservation.SplitCapacityCommitmentResponse() + client.split_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_split_capacity_commitment_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.SplitCapacityCommitmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.split_capacity_commitment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.SplitCapacityCommitmentResponse() + ) + await client.split_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_split_capacity_commitment_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.split_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.SplitCapacityCommitmentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.split_capacity_commitment( + name="name_value", + slot_count=1098, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].slot_count + mock_val = 1098 + assert arg == mock_val + + +def test_split_capacity_commitment_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.split_capacity_commitment( + reservation.SplitCapacityCommitmentRequest(), + name="name_value", + slot_count=1098, + ) + + +@pytest.mark.asyncio +async def test_split_capacity_commitment_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.split_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.SplitCapacityCommitmentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.SplitCapacityCommitmentResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.split_capacity_commitment( + name="name_value", + slot_count=1098, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].slot_count + mock_val = 1098 + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_split_capacity_commitment_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.split_capacity_commitment( + reservation.SplitCapacityCommitmentRequest(), + name="name_value", + slot_count=1098, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.MergeCapacityCommitmentsRequest, + dict, + ], +) +def test_merge_capacity_commitments(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.merge_capacity_commitments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + response = client.merge_capacity_commitments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.MergeCapacityCommitmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.CapacityCommitment) + assert response.name == "name_value" + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +def test_merge_capacity_commitments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.merge_capacity_commitments), "__call__" + ) as call: + client.merge_capacity_commitments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.MergeCapacityCommitmentsRequest() + + +@pytest.mark.asyncio +async def test_merge_capacity_commitments_async( + transport: str = "grpc_asyncio", + request_type=reservation.MergeCapacityCommitmentsRequest, +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.merge_capacity_commitments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + ) + response = await client.merge_capacity_commitments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.MergeCapacityCommitmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.CapacityCommitment) + assert response.name == "name_value" + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +@pytest.mark.asyncio +async def test_merge_capacity_commitments_async_from_dict(): + await test_merge_capacity_commitments_async(request_type=dict) + + +def test_merge_capacity_commitments_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.MergeCapacityCommitmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.merge_capacity_commitments), "__call__" + ) as call: + call.return_value = reservation.CapacityCommitment() + client.merge_capacity_commitments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_merge_capacity_commitments_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.MergeCapacityCommitmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.merge_capacity_commitments), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment() + ) + await client.merge_capacity_commitments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_merge_capacity_commitments_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.merge_capacity_commitments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.CapacityCommitment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.merge_capacity_commitments( + parent="parent_value", + capacity_commitment_ids=["capacity_commitment_ids_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].capacity_commitment_ids + mock_val = ["capacity_commitment_ids_value"] + assert arg == mock_val + + +def test_merge_capacity_commitments_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.merge_capacity_commitments( + reservation.MergeCapacityCommitmentsRequest(), + parent="parent_value", + capacity_commitment_ids=["capacity_commitment_ids_value"], + ) + + +@pytest.mark.asyncio +async def test_merge_capacity_commitments_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.merge_capacity_commitments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.CapacityCommitment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.merge_capacity_commitments( + parent="parent_value", + capacity_commitment_ids=["capacity_commitment_ids_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].capacity_commitment_ids + mock_val = ["capacity_commitment_ids_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_merge_capacity_commitments_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.merge_capacity_commitments( + reservation.MergeCapacityCommitmentsRequest(), + parent="parent_value", + capacity_commitment_ids=["capacity_commitment_ids_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.CreateAssignmentRequest, + dict, + ], +) +def test_create_assignment(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.Assignment( + name="name_value", + assignee="assignee_value", + job_type=reservation.Assignment.JobType.PIPELINE, + state=reservation.Assignment.State.PENDING, + ) + response = client.create_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.CreateAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.Assignment) + assert response.name == "name_value" + assert response.assignee == "assignee_value" + assert response.job_type == reservation.Assignment.JobType.PIPELINE + assert response.state == reservation.Assignment.State.PENDING + + +def test_create_assignment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_assignment), "__call__" + ) as call: + client.create_assignment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.CreateAssignmentRequest() + + +@pytest.mark.asyncio +async def test_create_assignment_async( + transport: str = "grpc_asyncio", request_type=reservation.CreateAssignmentRequest +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Assignment( + name="name_value", + assignee="assignee_value", + job_type=reservation.Assignment.JobType.PIPELINE, + state=reservation.Assignment.State.PENDING, + ) + ) + response = await client.create_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.CreateAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.Assignment) + assert response.name == "name_value" + assert response.assignee == "assignee_value" + assert response.job_type == reservation.Assignment.JobType.PIPELINE + assert response.state == reservation.Assignment.State.PENDING + + +@pytest.mark.asyncio +async def test_create_assignment_async_from_dict(): + await test_create_assignment_async(request_type=dict) + + +def test_create_assignment_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.CreateAssignmentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_assignment), "__call__" + ) as call: + call.return_value = reservation.Assignment() + client.create_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_assignment_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.CreateAssignmentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_assignment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Assignment() + ) + await client.create_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_assignment_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.Assignment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_assignment( + parent="parent_value", + assignment=reservation.Assignment(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].assignment + mock_val = reservation.Assignment(name="name_value") + assert arg == mock_val + + +def test_create_assignment_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_assignment( + reservation.CreateAssignmentRequest(), + parent="parent_value", + assignment=reservation.Assignment(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_assignment_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.Assignment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Assignment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_assignment( + parent="parent_value", + assignment=reservation.Assignment(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].assignment + mock_val = reservation.Assignment(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_assignment_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_assignment( + reservation.CreateAssignmentRequest(), + parent="parent_value", + assignment=reservation.Assignment(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.ListAssignmentsRequest, + dict, + ], +) +def test_list_assignments(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_assignments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ListAssignmentsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.ListAssignmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssignmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_assignments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_assignments), "__call__") as call: + client.list_assignments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.ListAssignmentsRequest() + + +@pytest.mark.asyncio +async def test_list_assignments_async( + transport: str = "grpc_asyncio", request_type=reservation.ListAssignmentsRequest +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_assignments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListAssignmentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.ListAssignmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssignmentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_assignments_async_from_dict(): + await test_list_assignments_async(request_type=dict) + + +def test_list_assignments_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.ListAssignmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_assignments), "__call__") as call: + call.return_value = reservation.ListAssignmentsResponse() + client.list_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_assignments_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.ListAssignmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_assignments), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListAssignmentsResponse() + ) + await client.list_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_assignments_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_assignments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ListAssignmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_assignments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_assignments_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_assignments( + reservation.ListAssignmentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_assignments_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_assignments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ListAssignmentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListAssignmentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_assignments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_assignments_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_assignments( + reservation.ListAssignmentsRequest(), + parent="parent_value", + ) + + +def test_list_assignments_pager(transport_name: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_assignments), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.ListAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_assignments(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.Assignment) for i in results) + + +def test_list_assignments_pages(transport_name: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_assignments), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.ListAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_assignments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_assignments_async_pager(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assignments), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.ListAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_assignments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, reservation.Assignment) for i in responses) + + +@pytest.mark.asyncio +async def test_list_assignments_async_pages(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assignments), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.ListAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_assignments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.DeleteAssignmentRequest, + dict, + ], +) +def test_delete_assignment(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.DeleteAssignmentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_assignment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_assignment), "__call__" + ) as call: + client.delete_assignment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.DeleteAssignmentRequest() + + +@pytest.mark.asyncio +async def test_delete_assignment_async( + transport: str = "grpc_asyncio", request_type=reservation.DeleteAssignmentRequest +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.DeleteAssignmentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_assignment_async_from_dict(): + await test_delete_assignment_async(request_type=dict) + + +def test_delete_assignment_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.DeleteAssignmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_assignment), "__call__" + ) as call: + call.return_value = None + client.delete_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_assignment_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.DeleteAssignmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_assignment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_assignment_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_assignment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_assignment_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_assignment( + reservation.DeleteAssignmentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_assignment_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_assignment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_assignment_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_assignment( + reservation.DeleteAssignmentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.SearchAssignmentsRequest, + dict, + ], +) +def test_search_assignments(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.SearchAssignmentsResponse( + next_page_token="next_page_token_value", + ) + response = client.search_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.SearchAssignmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAssignmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_search_assignments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_assignments), "__call__" + ) as call: + client.search_assignments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.SearchAssignmentsRequest() + + +@pytest.mark.asyncio +async def test_search_assignments_async( + transport: str = "grpc_asyncio", request_type=reservation.SearchAssignmentsRequest +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.SearchAssignmentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.search_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.SearchAssignmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAssignmentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_search_assignments_async_from_dict(): + await test_search_assignments_async(request_type=dict) + + +def test_search_assignments_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.SearchAssignmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_assignments), "__call__" + ) as call: + call.return_value = reservation.SearchAssignmentsResponse() + client.search_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_search_assignments_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.SearchAssignmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_assignments), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.SearchAssignmentsResponse() + ) + await client.search_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_search_assignments_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.SearchAssignmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.search_assignments( + parent="parent_value", + query="query_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].query + mock_val = "query_value" + assert arg == mock_val + + +def test_search_assignments_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_assignments( + reservation.SearchAssignmentsRequest(), + parent="parent_value", + query="query_value", + ) + + +@pytest.mark.asyncio +async def test_search_assignments_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.SearchAssignmentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.SearchAssignmentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.search_assignments( + parent="parent_value", + query="query_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].query + mock_val = "query_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_search_assignments_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.search_assignments( + reservation.SearchAssignmentsRequest(), + parent="parent_value", + query="query_value", + ) + + +def test_search_assignments_pager(transport_name: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_assignments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.SearchAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.search_assignments(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.Assignment) for i in results) + + +def test_search_assignments_pages(transport_name: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_assignments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.SearchAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + RuntimeError, + ) + pages = list(client.search_assignments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_search_assignments_async_pager(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_assignments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.SearchAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_assignments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, reservation.Assignment) for i in responses) + + +@pytest.mark.asyncio +async def test_search_assignments_async_pages(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_assignments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.SearchAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.search_assignments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.SearchAllAssignmentsRequest, + dict, + ], +) +def test_search_all_assignments(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.SearchAllAssignmentsResponse( + next_page_token="next_page_token_value", + ) + response = client.search_all_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.SearchAllAssignmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllAssignmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_search_all_assignments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_assignments), "__call__" + ) as call: + client.search_all_assignments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.SearchAllAssignmentsRequest() + + +@pytest.mark.asyncio +async def test_search_all_assignments_async( + transport: str = "grpc_asyncio", + request_type=reservation.SearchAllAssignmentsRequest, +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.SearchAllAssignmentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.search_all_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.SearchAllAssignmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllAssignmentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_search_all_assignments_async_from_dict(): + await test_search_all_assignments_async(request_type=dict) + + +def test_search_all_assignments_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.SearchAllAssignmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_assignments), "__call__" + ) as call: + call.return_value = reservation.SearchAllAssignmentsResponse() + client.search_all_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_search_all_assignments_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.SearchAllAssignmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_assignments), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.SearchAllAssignmentsResponse() + ) + await client.search_all_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_search_all_assignments_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.SearchAllAssignmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.search_all_assignments( + parent="parent_value", + query="query_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].query + mock_val = "query_value" + assert arg == mock_val + + +def test_search_all_assignments_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_all_assignments( + reservation.SearchAllAssignmentsRequest(), + parent="parent_value", + query="query_value", + ) + + +@pytest.mark.asyncio +async def test_search_all_assignments_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.SearchAllAssignmentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.SearchAllAssignmentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.search_all_assignments( + parent="parent_value", + query="query_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].query + mock_val = "query_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_search_all_assignments_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.search_all_assignments( + reservation.SearchAllAssignmentsRequest(), + parent="parent_value", + query="query_value", + ) + + +def test_search_all_assignments_pager(transport_name: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_assignments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.search_all_assignments(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.Assignment) for i in results) + + +def test_search_all_assignments_pages(transport_name: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_assignments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + RuntimeError, + ) + pages = list(client.search_all_assignments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_search_all_assignments_async_pager(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_assignments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_all_assignments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, reservation.Assignment) for i in responses) + + +@pytest.mark.asyncio +async def test_search_all_assignments_async_pages(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_assignments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.search_all_assignments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.MoveAssignmentRequest, + dict, + ], +) +def test_move_assignment(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.move_assignment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.Assignment( + name="name_value", + assignee="assignee_value", + job_type=reservation.Assignment.JobType.PIPELINE, + state=reservation.Assignment.State.PENDING, + ) + response = client.move_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.MoveAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.Assignment) + assert response.name == "name_value" + assert response.assignee == "assignee_value" + assert response.job_type == reservation.Assignment.JobType.PIPELINE + assert response.state == reservation.Assignment.State.PENDING + + +def test_move_assignment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.move_assignment), "__call__") as call: + client.move_assignment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.MoveAssignmentRequest() + + +@pytest.mark.asyncio +async def test_move_assignment_async( + transport: str = "grpc_asyncio", request_type=reservation.MoveAssignmentRequest +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.move_assignment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Assignment( + name="name_value", + assignee="assignee_value", + job_type=reservation.Assignment.JobType.PIPELINE, + state=reservation.Assignment.State.PENDING, + ) + ) + response = await client.move_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.MoveAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.Assignment) + assert response.name == "name_value" + assert response.assignee == "assignee_value" + assert response.job_type == reservation.Assignment.JobType.PIPELINE + assert response.state == reservation.Assignment.State.PENDING + + +@pytest.mark.asyncio +async def test_move_assignment_async_from_dict(): + await test_move_assignment_async(request_type=dict) + + +def test_move_assignment_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.MoveAssignmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.move_assignment), "__call__") as call: + call.return_value = reservation.Assignment() + client.move_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_move_assignment_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.MoveAssignmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.move_assignment), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Assignment() + ) + await client.move_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_move_assignment_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.move_assignment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.Assignment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.move_assignment( + name="name_value", + destination_id="destination_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].destination_id + mock_val = "destination_id_value" + assert arg == mock_val + + +def test_move_assignment_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move_assignment( + reservation.MoveAssignmentRequest(), + name="name_value", + destination_id="destination_id_value", + ) + + +@pytest.mark.asyncio +async def test_move_assignment_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.move_assignment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.Assignment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Assignment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.move_assignment( + name="name_value", + destination_id="destination_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].destination_id + mock_val = "destination_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_move_assignment_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.move_assignment( + reservation.MoveAssignmentRequest(), + name="name_value", + destination_id="destination_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.UpdateAssignmentRequest, + dict, + ], +) +def test_update_assignment(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.Assignment( + name="name_value", + assignee="assignee_value", + job_type=reservation.Assignment.JobType.PIPELINE, + state=reservation.Assignment.State.PENDING, + ) + response = client.update_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.UpdateAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.Assignment) + assert response.name == "name_value" + assert response.assignee == "assignee_value" + assert response.job_type == reservation.Assignment.JobType.PIPELINE + assert response.state == reservation.Assignment.State.PENDING + + +def test_update_assignment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_assignment), "__call__" + ) as call: + client.update_assignment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.UpdateAssignmentRequest() + + +@pytest.mark.asyncio +async def test_update_assignment_async( + transport: str = "grpc_asyncio", request_type=reservation.UpdateAssignmentRequest +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Assignment( + name="name_value", + assignee="assignee_value", + job_type=reservation.Assignment.JobType.PIPELINE, + state=reservation.Assignment.State.PENDING, + ) + ) + response = await client.update_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.UpdateAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.Assignment) + assert response.name == "name_value" + assert response.assignee == "assignee_value" + assert response.job_type == reservation.Assignment.JobType.PIPELINE + assert response.state == reservation.Assignment.State.PENDING + + +@pytest.mark.asyncio +async def test_update_assignment_async_from_dict(): + await test_update_assignment_async(request_type=dict) + + +def test_update_assignment_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.UpdateAssignmentRequest() + + request.assignment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_assignment), "__call__" + ) as call: + call.return_value = reservation.Assignment() + client.update_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "assignment.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_assignment_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.UpdateAssignmentRequest() + + request.assignment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_assignment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Assignment() + ) + await client.update_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "assignment.name=name_value", + ) in kw["metadata"] + + +def test_update_assignment_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.Assignment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_assignment( + assignment=reservation.Assignment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].assignment + mock_val = reservation.Assignment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_assignment_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_assignment( + reservation.UpdateAssignmentRequest(), + assignment=reservation.Assignment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_assignment_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.Assignment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Assignment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_assignment( + assignment=reservation.Assignment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].assignment + mock_val = reservation.Assignment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_assignment_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_assignment( + reservation.UpdateAssignmentRequest(), + assignment=reservation.Assignment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.GetBiReservationRequest, + dict, + ], +) +def test_get_bi_reservation(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bi_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.BiReservation( + name="name_value", + size=443, + ) + response = client.get_bi_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.GetBiReservationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.BiReservation) + assert response.name == "name_value" + assert response.size == 443 + + +def test_get_bi_reservation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bi_reservation), "__call__" + ) as call: + client.get_bi_reservation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.GetBiReservationRequest() + + +@pytest.mark.asyncio +async def test_get_bi_reservation_async( + transport: str = "grpc_asyncio", request_type=reservation.GetBiReservationRequest +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bi_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.BiReservation( + name="name_value", + size=443, + ) + ) + response = await client.get_bi_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.GetBiReservationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.BiReservation) + assert response.name == "name_value" + assert response.size == 443 + + +@pytest.mark.asyncio +async def test_get_bi_reservation_async_from_dict(): + await test_get_bi_reservation_async(request_type=dict) + + +def test_get_bi_reservation_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.GetBiReservationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bi_reservation), "__call__" + ) as call: + call.return_value = reservation.BiReservation() + client.get_bi_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_bi_reservation_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.GetBiReservationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bi_reservation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.BiReservation() + ) + await client.get_bi_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_bi_reservation_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bi_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.BiReservation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_bi_reservation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_bi_reservation_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_bi_reservation( + reservation.GetBiReservationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_bi_reservation_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bi_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.BiReservation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.BiReservation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_bi_reservation( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_bi_reservation_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_bi_reservation( + reservation.GetBiReservationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.UpdateBiReservationRequest, + dict, + ], +) +def test_update_bi_reservation(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bi_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.BiReservation( + name="name_value", + size=443, + ) + response = client.update_bi_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.UpdateBiReservationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.BiReservation) + assert response.name == "name_value" + assert response.size == 443 + + +def test_update_bi_reservation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bi_reservation), "__call__" + ) as call: + client.update_bi_reservation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.UpdateBiReservationRequest() + + +@pytest.mark.asyncio +async def test_update_bi_reservation_async( + transport: str = "grpc_asyncio", request_type=reservation.UpdateBiReservationRequest +): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bi_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.BiReservation( + name="name_value", + size=443, + ) + ) + response = await client.update_bi_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.UpdateBiReservationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.BiReservation) + assert response.name == "name_value" + assert response.size == 443 + + +@pytest.mark.asyncio +async def test_update_bi_reservation_async_from_dict(): + await test_update_bi_reservation_async(request_type=dict) + + +def test_update_bi_reservation_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.UpdateBiReservationRequest() + + request.bi_reservation.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bi_reservation), "__call__" + ) as call: + call.return_value = reservation.BiReservation() + client.update_bi_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "bi_reservation.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_bi_reservation_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.UpdateBiReservationRequest() + + request.bi_reservation.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bi_reservation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.BiReservation() + ) + await client.update_bi_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "bi_reservation.name=name_value", + ) in kw["metadata"] + + +def test_update_bi_reservation_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bi_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.BiReservation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_bi_reservation( + bi_reservation=reservation.BiReservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].bi_reservation + mock_val = reservation.BiReservation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_bi_reservation_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_bi_reservation( + reservation.UpdateBiReservationRequest(), + bi_reservation=reservation.BiReservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_bi_reservation_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bi_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.BiReservation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.BiReservation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_bi_reservation( + bi_reservation=reservation.BiReservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].bi_reservation + mock_val = reservation.BiReservation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_bi_reservation_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_bi_reservation( + reservation.UpdateBiReservationRequest(), + bi_reservation=reservation.BiReservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcbr_reservation.CreateReservationRequest, + dict, + ], +) +def test_create_reservation_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["reservation"] = { + "name": "name_value", + "slot_capacity": 1391, + "ignore_idle_slots": True, + "autoscale": {"current_slots": 1431, "max_slots": 986}, + "concurrency": 1195, + "creation_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "multi_region_auxiliary": True, + "edition": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcbr_reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=gcbr_reservation.Edition.STANDARD, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcbr_reservation.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_reservation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcbr_reservation.Reservation) + assert response.name == "name_value" + assert response.slot_capacity == 1391 + assert response.ignore_idle_slots is True + assert response.concurrency == 1195 + assert response.multi_region_auxiliary is True + assert response.edition == gcbr_reservation.Edition.STANDARD + + +def test_create_reservation_rest_required_fields( + request_type=gcbr_reservation.CreateReservationRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_reservation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_reservation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("reservation_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcbr_reservation.Reservation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcbr_reservation.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_reservation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_reservation_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_reservation._get_unset_required_fields({}) + assert set(unset_fields) == (set(("reservationId",)) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_reservation_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_create_reservation" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_create_reservation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcbr_reservation.CreateReservationRequest.pb( + gcbr_reservation.CreateReservationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcbr_reservation.Reservation.to_json( + gcbr_reservation.Reservation() + ) + + request = gcbr_reservation.CreateReservationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcbr_reservation.Reservation() + + client.create_reservation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_reservation_rest_bad_request( + transport: str = "rest", request_type=gcbr_reservation.CreateReservationRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["reservation"] = { + "name": "name_value", + "slot_capacity": 1391, + "ignore_idle_slots": True, + "autoscale": {"current_slots": 1431, "max_slots": 986}, + "concurrency": 1195, + "creation_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "multi_region_auxiliary": True, + "edition": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_reservation(request) + + +def test_create_reservation_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcbr_reservation.Reservation() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + reservation=gcbr_reservation.Reservation(name="name_value"), + reservation_id="reservation_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcbr_reservation.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_reservation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/reservations" + % client.transport._host, + args[1], + ) + + +def test_create_reservation_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_reservation( + gcbr_reservation.CreateReservationRequest(), + parent="parent_value", + reservation=gcbr_reservation.Reservation(name="name_value"), + reservation_id="reservation_id_value", + ) + + +def test_create_reservation_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.ListReservationsRequest, + dict, + ], +) +def test_list_reservations_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.ListReservationsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.ListReservationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_reservations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReservationsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_reservations_rest_required_fields( + request_type=reservation.ListReservationsRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_reservations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_reservations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.ListReservationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = reservation.ListReservationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_reservations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_reservations_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_reservations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_reservations_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_list_reservations" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_list_reservations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.ListReservationsRequest.pb( + reservation.ListReservationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.ListReservationsResponse.to_json( + reservation.ListReservationsResponse() + ) + + request = reservation.ListReservationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.ListReservationsResponse() + + client.list_reservations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_reservations_rest_bad_request( + transport: str = "rest", request_type=reservation.ListReservationsRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_reservations(request) + + +def test_list_reservations_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.ListReservationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.ListReservationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_reservations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/reservations" + % client.transport._host, + args[1], + ) + + +def test_list_reservations_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_reservations( + reservation.ListReservationsRequest(), + parent="parent_value", + ) + + +def test_list_reservations_rest_pager(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + reservation.Reservation(), + reservation.Reservation(), + ], + next_page_token="abc", + ), + reservation.ListReservationsResponse( + reservations=[], + next_page_token="def", + ), + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + ], + next_page_token="ghi", + ), + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + reservation.Reservation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + reservation.ListReservationsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_reservations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.Reservation) for i in results) + + pages = list(client.list_reservations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.GetReservationRequest, + dict, + ], +) +def test_get_reservation_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_reservation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.Reservation) + assert response.name == "name_value" + assert response.slot_capacity == 1391 + assert response.ignore_idle_slots is True + assert response.concurrency == 1195 + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +def test_get_reservation_rest_required_fields( + request_type=reservation.GetReservationRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_reservation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_reservation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.Reservation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = reservation.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_reservation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_reservation_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_reservation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_reservation_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_get_reservation" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_get_reservation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.GetReservationRequest.pb( + reservation.GetReservationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.Reservation.to_json( + reservation.Reservation() + ) + + request = reservation.GetReservationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.Reservation() + + client.get_reservation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_reservation_rest_bad_request( + transport: str = "rest", request_type=reservation.GetReservationRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_reservation(request) + + +def test_get_reservation_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.Reservation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/reservations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_reservation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/reservations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_reservation_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_reservation( + reservation.GetReservationRequest(), + name="name_value", + ) + + +def test_get_reservation_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.DeleteReservationRequest, + dict, + ], +) +def test_delete_reservation_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_reservation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_reservation_rest_required_fields( + request_type=reservation.DeleteReservationRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_reservation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_reservation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_reservation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_reservation_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_reservation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_reservation_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_delete_reservation" + ) as pre: + pre.assert_not_called() + pb_message = reservation.DeleteReservationRequest.pb( + reservation.DeleteReservationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = reservation.DeleteReservationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_reservation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_reservation_rest_bad_request( + transport: str = "rest", request_type=reservation.DeleteReservationRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_reservation(request) + + +def test_delete_reservation_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/reservations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_reservation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/reservations/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_reservation_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_reservation( + reservation.DeleteReservationRequest(), + name="name_value", + ) + + +def test_delete_reservation_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcbr_reservation.UpdateReservationRequest, + dict, + ], +) +def test_update_reservation_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "reservation": { + "name": "projects/sample1/locations/sample2/reservations/sample3" + } + } + request_init["reservation"] = { + "name": "projects/sample1/locations/sample2/reservations/sample3", + "slot_capacity": 1391, + "ignore_idle_slots": True, + "autoscale": {"current_slots": 1431, "max_slots": 986}, + "concurrency": 1195, + "creation_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "multi_region_auxiliary": True, + "edition": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcbr_reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=gcbr_reservation.Edition.STANDARD, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcbr_reservation.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_reservation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcbr_reservation.Reservation) + assert response.name == "name_value" + assert response.slot_capacity == 1391 + assert response.ignore_idle_slots is True + assert response.concurrency == 1195 + assert response.multi_region_auxiliary is True + assert response.edition == gcbr_reservation.Edition.STANDARD + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_reservation_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_update_reservation" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_update_reservation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gcbr_reservation.UpdateReservationRequest.pb( + gcbr_reservation.UpdateReservationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcbr_reservation.Reservation.to_json( + gcbr_reservation.Reservation() + ) + + request = gcbr_reservation.UpdateReservationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcbr_reservation.Reservation() + + client.update_reservation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_reservation_rest_bad_request( + transport: str = "rest", request_type=gcbr_reservation.UpdateReservationRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "reservation": { + "name": "projects/sample1/locations/sample2/reservations/sample3" + } + } + request_init["reservation"] = { + "name": "projects/sample1/locations/sample2/reservations/sample3", + "slot_capacity": 1391, + "ignore_idle_slots": True, + "autoscale": {"current_slots": 1431, "max_slots": 986}, + "concurrency": 1195, + "creation_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "multi_region_auxiliary": True, + "edition": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_reservation(request) + + +def test_update_reservation_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcbr_reservation.Reservation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "reservation": { + "name": "projects/sample1/locations/sample2/reservations/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + reservation=gcbr_reservation.Reservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcbr_reservation.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_reservation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{reservation.name=projects/*/locations/*/reservations/*}" + % client.transport._host, + args[1], + ) + + +def test_update_reservation_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_reservation( + gcbr_reservation.UpdateReservationRequest(), + reservation=gcbr_reservation.Reservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_reservation_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.CreateCapacityCommitmentRequest, + dict, + ], +) +def test_create_capacity_commitment_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["capacity_commitment"] = { + "name": "name_value", + "slot_count": 1098, + "plan": 3, + "state": 1, + "commitment_start_time": {"seconds": 751, "nanos": 543}, + "commitment_end_time": {}, + "failure_status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "renewal_plan": 3, + "multi_region_auxiliary": True, + "edition": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.CapacityCommitment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_capacity_commitment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.CapacityCommitment) + assert response.name == "name_value" + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +def test_create_capacity_commitment_rest_required_fields( + request_type=reservation.CreateCapacityCommitmentRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_capacity_commitment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_capacity_commitment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "capacity_commitment_id", + "enforce_single_admin_project_per_org", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.CapacityCommitment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = reservation.CapacityCommitment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_capacity_commitment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_capacity_commitment_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_capacity_commitment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "capacityCommitmentId", + "enforceSingleAdminProjectPerOrg", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_capacity_commitment_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_create_capacity_commitment" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_create_capacity_commitment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.CreateCapacityCommitmentRequest.pb( + reservation.CreateCapacityCommitmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.CapacityCommitment.to_json( + reservation.CapacityCommitment() + ) + + request = reservation.CreateCapacityCommitmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.CapacityCommitment() + + client.create_capacity_commitment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_capacity_commitment_rest_bad_request( + transport: str = "rest", request_type=reservation.CreateCapacityCommitmentRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["capacity_commitment"] = { + "name": "name_value", + "slot_count": 1098, + "plan": 3, + "state": 1, + "commitment_start_time": {"seconds": 751, "nanos": 543}, + "commitment_end_time": {}, + "failure_status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "renewal_plan": 3, + "multi_region_auxiliary": True, + "edition": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_capacity_commitment(request) + + +def test_create_capacity_commitment_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.CapacityCommitment() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + capacity_commitment=reservation.CapacityCommitment(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.CapacityCommitment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_capacity_commitment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/capacityCommitments" + % client.transport._host, + args[1], + ) + + +def test_create_capacity_commitment_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_capacity_commitment( + reservation.CreateCapacityCommitmentRequest(), + parent="parent_value", + capacity_commitment=reservation.CapacityCommitment(name="name_value"), + ) + + +def test_create_capacity_commitment_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.ListCapacityCommitmentsRequest, + dict, + ], +) +def test_list_capacity_commitments_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.ListCapacityCommitmentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.ListCapacityCommitmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_capacity_commitments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCapacityCommitmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_capacity_commitments_rest_required_fields( + request_type=reservation.ListCapacityCommitmentsRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_capacity_commitments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_capacity_commitments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.ListCapacityCommitmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = reservation.ListCapacityCommitmentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_capacity_commitments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_capacity_commitments_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_capacity_commitments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_capacity_commitments_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_list_capacity_commitments" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_list_capacity_commitments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.ListCapacityCommitmentsRequest.pb( + reservation.ListCapacityCommitmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.ListCapacityCommitmentsResponse.to_json( + reservation.ListCapacityCommitmentsResponse() + ) + + request = reservation.ListCapacityCommitmentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.ListCapacityCommitmentsResponse() + + client.list_capacity_commitments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_capacity_commitments_rest_bad_request( + transport: str = "rest", request_type=reservation.ListCapacityCommitmentsRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_capacity_commitments(request) + + +def test_list_capacity_commitments_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.ListCapacityCommitmentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.ListCapacityCommitmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_capacity_commitments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/capacityCommitments" + % client.transport._host, + args[1], + ) + + +def test_list_capacity_commitments_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_capacity_commitments( + reservation.ListCapacityCommitmentsRequest(), + parent="parent_value", + ) + + +def test_list_capacity_commitments_rest_pager(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + ], + next_page_token="abc", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[], + next_page_token="def", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + ], + next_page_token="ghi", + ), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + reservation.ListCapacityCommitmentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_capacity_commitments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.CapacityCommitment) for i in results) + + pages = list(client.list_capacity_commitments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.GetCapacityCommitmentRequest, + dict, + ], +) +def test_get_capacity_commitment_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.CapacityCommitment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_capacity_commitment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.CapacityCommitment) + assert response.name == "name_value" + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +def test_get_capacity_commitment_rest_required_fields( + request_type=reservation.GetCapacityCommitmentRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_capacity_commitment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_capacity_commitment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.CapacityCommitment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = reservation.CapacityCommitment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_capacity_commitment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_capacity_commitment_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_capacity_commitment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_capacity_commitment_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_get_capacity_commitment" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_get_capacity_commitment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.GetCapacityCommitmentRequest.pb( + reservation.GetCapacityCommitmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.CapacityCommitment.to_json( + reservation.CapacityCommitment() + ) + + request = reservation.GetCapacityCommitmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.CapacityCommitment() + + client.get_capacity_commitment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_capacity_commitment_rest_bad_request( + transport: str = "rest", request_type=reservation.GetCapacityCommitmentRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_capacity_commitment(request) + + +def test_get_capacity_commitment_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.CapacityCommitment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.CapacityCommitment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_capacity_commitment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/capacityCommitments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_capacity_commitment_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_capacity_commitment( + reservation.GetCapacityCommitmentRequest(), + name="name_value", + ) + + +def test_get_capacity_commitment_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.DeleteCapacityCommitmentRequest, + dict, + ], +) +def test_delete_capacity_commitment_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_capacity_commitment(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_capacity_commitment_rest_required_fields( + request_type=reservation.DeleteCapacityCommitmentRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_capacity_commitment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_capacity_commitment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_capacity_commitment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_capacity_commitment_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_capacity_commitment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_capacity_commitment_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_delete_capacity_commitment" + ) as pre: + pre.assert_not_called() + pb_message = reservation.DeleteCapacityCommitmentRequest.pb( + reservation.DeleteCapacityCommitmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = reservation.DeleteCapacityCommitmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_capacity_commitment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_capacity_commitment_rest_bad_request( + transport: str = "rest", request_type=reservation.DeleteCapacityCommitmentRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_capacity_commitment(request) + + +def test_delete_capacity_commitment_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_capacity_commitment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/capacityCommitments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_capacity_commitment_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_capacity_commitment( + reservation.DeleteCapacityCommitmentRequest(), + name="name_value", + ) + + +def test_delete_capacity_commitment_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.UpdateCapacityCommitmentRequest, + dict, + ], +) +def test_update_capacity_commitment_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "capacity_commitment": { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } + } + request_init["capacity_commitment"] = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3", + "slot_count": 1098, + "plan": 3, + "state": 1, + "commitment_start_time": {"seconds": 751, "nanos": 543}, + "commitment_end_time": {}, + "failure_status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "renewal_plan": 3, + "multi_region_auxiliary": True, + "edition": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.CapacityCommitment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_capacity_commitment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.CapacityCommitment) + assert response.name == "name_value" + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_capacity_commitment_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_update_capacity_commitment" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_update_capacity_commitment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.UpdateCapacityCommitmentRequest.pb( + reservation.UpdateCapacityCommitmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.CapacityCommitment.to_json( + reservation.CapacityCommitment() + ) + + request = reservation.UpdateCapacityCommitmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.CapacityCommitment() + + client.update_capacity_commitment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_capacity_commitment_rest_bad_request( + transport: str = "rest", request_type=reservation.UpdateCapacityCommitmentRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "capacity_commitment": { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } + } + request_init["capacity_commitment"] = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3", + "slot_count": 1098, + "plan": 3, + "state": 1, + "commitment_start_time": {"seconds": 751, "nanos": 543}, + "commitment_end_time": {}, + "failure_status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "renewal_plan": 3, + "multi_region_auxiliary": True, + "edition": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_capacity_commitment(request) + + +def test_update_capacity_commitment_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.CapacityCommitment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "capacity_commitment": { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + capacity_commitment=reservation.CapacityCommitment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.CapacityCommitment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_capacity_commitment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{capacity_commitment.name=projects/*/locations/*/capacityCommitments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_capacity_commitment_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_capacity_commitment( + reservation.UpdateCapacityCommitmentRequest(), + capacity_commitment=reservation.CapacityCommitment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_capacity_commitment_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.SplitCapacityCommitmentRequest, + dict, + ], +) +def test_split_capacity_commitment_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.SplitCapacityCommitmentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.SplitCapacityCommitmentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.split_capacity_commitment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.SplitCapacityCommitmentResponse) + + +def test_split_capacity_commitment_rest_required_fields( + request_type=reservation.SplitCapacityCommitmentRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).split_capacity_commitment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).split_capacity_commitment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.SplitCapacityCommitmentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = reservation.SplitCapacityCommitmentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.split_capacity_commitment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_split_capacity_commitment_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.split_capacity_commitment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_split_capacity_commitment_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_split_capacity_commitment" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_split_capacity_commitment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.SplitCapacityCommitmentRequest.pb( + reservation.SplitCapacityCommitmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.SplitCapacityCommitmentResponse.to_json( + reservation.SplitCapacityCommitmentResponse() + ) + + request = reservation.SplitCapacityCommitmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.SplitCapacityCommitmentResponse() + + client.split_capacity_commitment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_split_capacity_commitment_rest_bad_request( + transport: str = "rest", request_type=reservation.SplitCapacityCommitmentRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.split_capacity_commitment(request) + + +def test_split_capacity_commitment_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.SplitCapacityCommitmentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + slot_count=1098, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.SplitCapacityCommitmentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.split_capacity_commitment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/capacityCommitments/*}:split" + % client.transport._host, + args[1], + ) + + +def test_split_capacity_commitment_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.split_capacity_commitment( + reservation.SplitCapacityCommitmentRequest(), + name="name_value", + slot_count=1098, + ) + + +def test_split_capacity_commitment_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.MergeCapacityCommitmentsRequest, + dict, + ], +) +def test_merge_capacity_commitments_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.CapacityCommitment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.merge_capacity_commitments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.CapacityCommitment) + assert response.name == "name_value" + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_merge_capacity_commitments_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_merge_capacity_commitments" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_merge_capacity_commitments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.MergeCapacityCommitmentsRequest.pb( + reservation.MergeCapacityCommitmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.CapacityCommitment.to_json( + reservation.CapacityCommitment() + ) + + request = reservation.MergeCapacityCommitmentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.CapacityCommitment() + + client.merge_capacity_commitments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_merge_capacity_commitments_rest_bad_request( + transport: str = "rest", request_type=reservation.MergeCapacityCommitmentsRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.merge_capacity_commitments(request) + + +def test_merge_capacity_commitments_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.CapacityCommitment() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + capacity_commitment_ids=["capacity_commitment_ids_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.CapacityCommitment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.merge_capacity_commitments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/capacityCommitments:merge" + % client.transport._host, + args[1], + ) + + +def test_merge_capacity_commitments_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.merge_capacity_commitments( + reservation.MergeCapacityCommitmentsRequest(), + parent="parent_value", + capacity_commitment_ids=["capacity_commitment_ids_value"], + ) + + +def test_merge_capacity_commitments_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.CreateAssignmentRequest, + dict, + ], +) +def test_create_assignment_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/reservations/sample3"} + request_init["assignment"] = { + "name": "name_value", + "assignee": "assignee_value", + "job_type": 1, + "state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.Assignment( + name="name_value", + assignee="assignee_value", + job_type=reservation.Assignment.JobType.PIPELINE, + state=reservation.Assignment.State.PENDING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.Assignment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_assignment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.Assignment) + assert response.name == "name_value" + assert response.assignee == "assignee_value" + assert response.job_type == reservation.Assignment.JobType.PIPELINE + assert response.state == reservation.Assignment.State.PENDING + + +def test_create_assignment_rest_required_fields( + request_type=reservation.CreateAssignmentRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_assignment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("assignment_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.Assignment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = reservation.Assignment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_assignment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_assignment_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_assignment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assignmentId",)) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_assignment_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_create_assignment" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_create_assignment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.CreateAssignmentRequest.pb( + reservation.CreateAssignmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.Assignment.to_json( + reservation.Assignment() + ) + + request = reservation.CreateAssignmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.Assignment() + + client.create_assignment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_assignment_rest_bad_request( + transport: str = "rest", request_type=reservation.CreateAssignmentRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/reservations/sample3"} + request_init["assignment"] = { + "name": "name_value", + "assignee": "assignee_value", + "job_type": 1, + "state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_assignment(request) + + +def test_create_assignment_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.Assignment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/reservations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + assignment=reservation.Assignment(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.Assignment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_assignment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/reservations/*}/assignments" + % client.transport._host, + args[1], + ) + + +def test_create_assignment_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_assignment( + reservation.CreateAssignmentRequest(), + parent="parent_value", + assignment=reservation.Assignment(name="name_value"), + ) + + +def test_create_assignment_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.ListAssignmentsRequest, + dict, + ], +) +def test_list_assignments_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/reservations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.ListAssignmentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.ListAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_assignments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssignmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_assignments_rest_required_fields( + request_type=reservation.ListAssignmentsRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_assignments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_assignments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.ListAssignmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = reservation.ListAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_assignments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_assignments_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_assignments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_assignments_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_list_assignments" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_list_assignments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.ListAssignmentsRequest.pb( + reservation.ListAssignmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.ListAssignmentsResponse.to_json( + reservation.ListAssignmentsResponse() + ) + + request = reservation.ListAssignmentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.ListAssignmentsResponse() + + client.list_assignments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_assignments_rest_bad_request( + transport: str = "rest", request_type=reservation.ListAssignmentsRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/reservations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_assignments(request) + + +def test_list_assignments_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.ListAssignmentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/reservations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.ListAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_assignments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/reservations/*}/assignments" + % client.transport._host, + args[1], + ) + + +def test_list_assignments_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_assignments( + reservation.ListAssignmentsRequest(), + parent="parent_value", + ) + + +def test_list_assignments_rest_pager(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.ListAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + reservation.ListAssignmentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/reservations/sample3" + } + + pager = client.list_assignments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.Assignment) for i in results) + + pages = list(client.list_assignments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.DeleteAssignmentRequest, + dict, + ], +) +def test_delete_assignment_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_assignment(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_assignment_rest_required_fields( + request_type=reservation.DeleteAssignmentRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_assignment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_assignment_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_assignment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_assignment_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_delete_assignment" + ) as pre: + pre.assert_not_called() + pb_message = reservation.DeleteAssignmentRequest.pb( + reservation.DeleteAssignmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = reservation.DeleteAssignmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_assignment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_assignment_rest_bad_request( + transport: str = "rest", request_type=reservation.DeleteAssignmentRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_assignment(request) + + +def test_delete_assignment_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_assignment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/reservations/*/assignments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_assignment_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_assignment( + reservation.DeleteAssignmentRequest(), + name="name_value", + ) + + +def test_delete_assignment_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.SearchAssignmentsRequest, + dict, + ], +) +def test_search_assignments_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.SearchAssignmentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.SearchAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.search_assignments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAssignmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_search_assignments_rest_required_fields( + request_type=reservation.SearchAssignmentsRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_assignments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_assignments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "query", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.SearchAssignmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = reservation.SearchAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.search_assignments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_search_assignments_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.search_assignments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "query", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_assignments_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_search_assignments" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_search_assignments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.SearchAssignmentsRequest.pb( + reservation.SearchAssignmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.SearchAssignmentsResponse.to_json( + reservation.SearchAssignmentsResponse() + ) + + request = reservation.SearchAssignmentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.SearchAssignmentsResponse() + + client.search_assignments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_assignments_rest_bad_request( + transport: str = "rest", request_type=reservation.SearchAssignmentsRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_assignments(request) + + +def test_search_assignments_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.SearchAssignmentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + query="query_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.SearchAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.search_assignments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}:searchAssignments" + % client.transport._host, + args[1], + ) + + +def test_search_assignments_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_assignments( + reservation.SearchAssignmentsRequest(), + parent="parent_value", + query="query_value", + ) + + +def test_search_assignments_rest_pager(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.SearchAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + reservation.SearchAssignmentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.search_assignments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.Assignment) for i in results) + + pages = list(client.search_assignments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.SearchAllAssignmentsRequest, + dict, + ], +) +def test_search_all_assignments_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.SearchAllAssignmentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.SearchAllAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.search_all_assignments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllAssignmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_search_all_assignments_rest_required_fields( + request_type=reservation.SearchAllAssignmentsRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_all_assignments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_all_assignments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "query", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.SearchAllAssignmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = reservation.SearchAllAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.search_all_assignments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_search_all_assignments_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.search_all_assignments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "query", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_all_assignments_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_search_all_assignments" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_search_all_assignments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.SearchAllAssignmentsRequest.pb( + reservation.SearchAllAssignmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.SearchAllAssignmentsResponse.to_json( + reservation.SearchAllAssignmentsResponse() + ) + + request = reservation.SearchAllAssignmentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.SearchAllAssignmentsResponse() + + client.search_all_assignments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_all_assignments_rest_bad_request( + transport: str = "rest", request_type=reservation.SearchAllAssignmentsRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_all_assignments(request) + + +def test_search_all_assignments_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.SearchAllAssignmentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + query="query_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.SearchAllAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.search_all_assignments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}:searchAllAssignments" + % client.transport._host, + args[1], + ) + + +def test_search_all_assignments_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_all_assignments( + reservation.SearchAllAssignmentsRequest(), + parent="parent_value", + query="query_value", + ) + + +def test_search_all_assignments_rest_pager(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + reservation.SearchAllAssignmentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.search_all_assignments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.Assignment) for i in results) + + pages = list(client.search_all_assignments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.MoveAssignmentRequest, + dict, + ], +) +def test_move_assignment_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.Assignment( + name="name_value", + assignee="assignee_value", + job_type=reservation.Assignment.JobType.PIPELINE, + state=reservation.Assignment.State.PENDING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.Assignment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.move_assignment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.Assignment) + assert response.name == "name_value" + assert response.assignee == "assignee_value" + assert response.job_type == reservation.Assignment.JobType.PIPELINE + assert response.state == reservation.Assignment.State.PENDING + + +def test_move_assignment_rest_required_fields( + request_type=reservation.MoveAssignmentRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).move_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).move_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.Assignment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = reservation.Assignment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.move_assignment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_move_assignment_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.move_assignment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_move_assignment_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_move_assignment" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_move_assignment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.MoveAssignmentRequest.pb( + reservation.MoveAssignmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.Assignment.to_json( + reservation.Assignment() + ) + + request = reservation.MoveAssignmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.Assignment() + + client.move_assignment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_move_assignment_rest_bad_request( + transport: str = "rest", request_type=reservation.MoveAssignmentRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move_assignment(request) + + +def test_move_assignment_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.Assignment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + destination_id="destination_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.Assignment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.move_assignment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/reservations/*/assignments/*}:move" + % client.transport._host, + args[1], + ) + + +def test_move_assignment_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move_assignment( + reservation.MoveAssignmentRequest(), + name="name_value", + destination_id="destination_id_value", + ) + + +def test_move_assignment_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.UpdateAssignmentRequest, + dict, + ], +) +def test_update_assignment_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "assignment": { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } + } + request_init["assignment"] = { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4", + "assignee": "assignee_value", + "job_type": 1, + "state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.Assignment( + name="name_value", + assignee="assignee_value", + job_type=reservation.Assignment.JobType.PIPELINE, + state=reservation.Assignment.State.PENDING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.Assignment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_assignment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.Assignment) + assert response.name == "name_value" + assert response.assignee == "assignee_value" + assert response.job_type == reservation.Assignment.JobType.PIPELINE + assert response.state == reservation.Assignment.State.PENDING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_assignment_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_update_assignment" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_update_assignment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.UpdateAssignmentRequest.pb( + reservation.UpdateAssignmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.Assignment.to_json( + reservation.Assignment() + ) + + request = reservation.UpdateAssignmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.Assignment() + + client.update_assignment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_assignment_rest_bad_request( + transport: str = "rest", request_type=reservation.UpdateAssignmentRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "assignment": { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } + } + request_init["assignment"] = { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4", + "assignee": "assignee_value", + "job_type": 1, + "state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_assignment(request) + + +def test_update_assignment_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.Assignment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "assignment": { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + assignment=reservation.Assignment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.Assignment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_assignment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{assignment.name=projects/*/locations/*/reservations/*/assignments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_assignment_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_assignment( + reservation.UpdateAssignmentRequest(), + assignment=reservation.Assignment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_assignment_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.GetBiReservationRequest, + dict, + ], +) +def test_get_bi_reservation_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/biReservation"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.BiReservation( + name="name_value", + size=443, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.BiReservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_bi_reservation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.BiReservation) + assert response.name == "name_value" + assert response.size == 443 + + +def test_get_bi_reservation_rest_required_fields( + request_type=reservation.GetBiReservationRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_bi_reservation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_bi_reservation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.BiReservation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = reservation.BiReservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_bi_reservation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_bi_reservation_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_bi_reservation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_bi_reservation_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_get_bi_reservation" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_get_bi_reservation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.GetBiReservationRequest.pb( + reservation.GetBiReservationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.BiReservation.to_json( + reservation.BiReservation() + ) + + request = reservation.GetBiReservationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.BiReservation() + + client.get_bi_reservation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_bi_reservation_rest_bad_request( + transport: str = "rest", request_type=reservation.GetBiReservationRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/biReservation"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_bi_reservation(request) + + +def test_get_bi_reservation_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.BiReservation() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/biReservation"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.BiReservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_bi_reservation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/biReservation}" + % client.transport._host, + args[1], + ) + + +def test_get_bi_reservation_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_bi_reservation( + reservation.GetBiReservationRequest(), + name="name_value", + ) + + +def test_get_bi_reservation_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.UpdateBiReservationRequest, + dict, + ], +) +def test_update_bi_reservation_rest(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "bi_reservation": {"name": "projects/sample1/locations/sample2/biReservation"} + } + request_init["bi_reservation"] = { + "name": "projects/sample1/locations/sample2/biReservation", + "update_time": {"seconds": 751, "nanos": 543}, + "size": 443, + "preferred_tables": [ + { + "project_id": "project_id_value", + "dataset_id": "dataset_id_value", + "table_id": "table_id_value", + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.BiReservation( + name="name_value", + size=443, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.BiReservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_bi_reservation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.BiReservation) + assert response.name == "name_value" + assert response.size == 443 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_bi_reservation_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_update_bi_reservation" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_update_bi_reservation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = reservation.UpdateBiReservationRequest.pb( + reservation.UpdateBiReservationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = reservation.BiReservation.to_json( + reservation.BiReservation() + ) + + request = reservation.UpdateBiReservationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.BiReservation() + + client.update_bi_reservation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_bi_reservation_rest_bad_request( + transport: str = "rest", request_type=reservation.UpdateBiReservationRequest +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "bi_reservation": {"name": "projects/sample1/locations/sample2/biReservation"} + } + request_init["bi_reservation"] = { + "name": "projects/sample1/locations/sample2/biReservation", + "update_time": {"seconds": 751, "nanos": 543}, + "size": 443, + "preferred_tables": [ + { + "project_id": "project_id_value", + "dataset_id": "dataset_id_value", + "table_id": "table_id_value", + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_bi_reservation(request) + + +def test_update_bi_reservation_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.BiReservation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "bi_reservation": { + "name": "projects/sample1/locations/sample2/biReservation" + } + } + + # get truthy value for each flattened field + mock_args = dict( + bi_reservation=reservation.BiReservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = reservation.BiReservation.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_bi_reservation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{bi_reservation.name=projects/*/locations/*/biReservation}" + % client.transport._host, + args[1], + ) + + +def test_update_bi_reservation_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_bi_reservation( + reservation.UpdateBiReservationRequest(), + bi_reservation=reservation.BiReservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_bi_reservation_rest_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ReservationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ReservationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ReservationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ReservationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ReservationServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ReservationServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ReservationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ReservationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ReservationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ReservationServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ReservationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ReservationServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ReservationServiceGrpcTransport, + transports.ReservationServiceGrpcAsyncIOTransport, + transports.ReservationServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ReservationServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ReservationServiceGrpcTransport, + ) + + +def test_reservation_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ReservationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_reservation_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ReservationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_reservation", + "list_reservations", + "get_reservation", + "delete_reservation", + "update_reservation", + "create_capacity_commitment", + "list_capacity_commitments", + "get_capacity_commitment", + "delete_capacity_commitment", + "update_capacity_commitment", + "split_capacity_commitment", + "merge_capacity_commitments", + "create_assignment", + "list_assignments", + "delete_assignment", + "search_assignments", + "search_all_assignments", + "move_assignment", + "update_assignment", + "get_bi_reservation", + "update_bi_reservation", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_reservation_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ReservationServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_reservation_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ReservationServiceTransport() + adc.assert_called_once() + + +def test_reservation_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ReservationServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ReservationServiceGrpcTransport, + transports.ReservationServiceGrpcAsyncIOTransport, + ], +) +def test_reservation_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ReservationServiceGrpcTransport, + transports.ReservationServiceGrpcAsyncIOTransport, + transports.ReservationServiceRestTransport, + ], +) +def test_reservation_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ReservationServiceGrpcTransport, grpc_helpers), + (transports.ReservationServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_reservation_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "bigqueryreservation.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/cloud-platform", + ), + scopes=["1", "2"], + default_host="bigqueryreservation.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ReservationServiceGrpcTransport, + transports.ReservationServiceGrpcAsyncIOTransport, + ], +) +def test_reservation_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_reservation_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ReservationServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_reservation_service_host_no_port(transport_name): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="bigqueryreservation.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "bigqueryreservation.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigqueryreservation.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_reservation_service_host_with_port(transport_name): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="bigqueryreservation.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "bigqueryreservation.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigqueryreservation.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_reservation_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ReservationServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ReservationServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_reservation._session + session2 = client2.transport.create_reservation._session + assert session1 != session2 + session1 = client1.transport.list_reservations._session + session2 = client2.transport.list_reservations._session + assert session1 != session2 + session1 = client1.transport.get_reservation._session + session2 = client2.transport.get_reservation._session + assert session1 != session2 + session1 = client1.transport.delete_reservation._session + session2 = client2.transport.delete_reservation._session + assert session1 != session2 + session1 = client1.transport.update_reservation._session + session2 = client2.transport.update_reservation._session + assert session1 != session2 + session1 = client1.transport.create_capacity_commitment._session + session2 = client2.transport.create_capacity_commitment._session + assert session1 != session2 + session1 = client1.transport.list_capacity_commitments._session + session2 = client2.transport.list_capacity_commitments._session + assert session1 != session2 + session1 = client1.transport.get_capacity_commitment._session + session2 = client2.transport.get_capacity_commitment._session + assert session1 != session2 + session1 = client1.transport.delete_capacity_commitment._session + session2 = client2.transport.delete_capacity_commitment._session + assert session1 != session2 + session1 = client1.transport.update_capacity_commitment._session + session2 = client2.transport.update_capacity_commitment._session + assert session1 != session2 + session1 = client1.transport.split_capacity_commitment._session + session2 = client2.transport.split_capacity_commitment._session + assert session1 != session2 + session1 = client1.transport.merge_capacity_commitments._session + session2 = client2.transport.merge_capacity_commitments._session + assert session1 != session2 + session1 = client1.transport.create_assignment._session + session2 = client2.transport.create_assignment._session + assert session1 != session2 + session1 = client1.transport.list_assignments._session + session2 = client2.transport.list_assignments._session + assert session1 != session2 + session1 = client1.transport.delete_assignment._session + session2 = client2.transport.delete_assignment._session + assert session1 != session2 + session1 = client1.transport.search_assignments._session + session2 = client2.transport.search_assignments._session + assert session1 != session2 + session1 = client1.transport.search_all_assignments._session + session2 = client2.transport.search_all_assignments._session + assert session1 != session2 + session1 = client1.transport.move_assignment._session + session2 = client2.transport.move_assignment._session + assert session1 != session2 + session1 = client1.transport.update_assignment._session + session2 = client2.transport.update_assignment._session + assert session1 != session2 + session1 = client1.transport.get_bi_reservation._session + session2 = client2.transport.get_bi_reservation._session + assert session1 != session2 + session1 = client1.transport.update_bi_reservation._session + session2 = client2.transport.update_bi_reservation._session + assert session1 != session2 + + +def test_reservation_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ReservationServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_reservation_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ReservationServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ReservationServiceGrpcTransport, + transports.ReservationServiceGrpcAsyncIOTransport, + ], +) +def test_reservation_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ReservationServiceGrpcTransport, + transports.ReservationServiceGrpcAsyncIOTransport, + ], +) +def test_reservation_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_assignment_path(): + project = "squid" + location = "clam" + reservation = "whelk" + assignment = "octopus" + expected = "projects/{project}/locations/{location}/reservations/{reservation}/assignments/{assignment}".format( + project=project, + location=location, + reservation=reservation, + assignment=assignment, + ) + actual = ReservationServiceClient.assignment_path( + project, location, reservation, assignment + ) + assert expected == actual + + +def test_parse_assignment_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "reservation": "cuttlefish", + "assignment": "mussel", + } + path = ReservationServiceClient.assignment_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationServiceClient.parse_assignment_path(path) + assert expected == actual + + +def test_bi_reservation_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}/biReservation".format( + project=project, + location=location, + ) + actual = ReservationServiceClient.bi_reservation_path(project, location) + assert expected == actual + + +def test_parse_bi_reservation_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ReservationServiceClient.bi_reservation_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationServiceClient.parse_bi_reservation_path(path) + assert expected == actual + + +def test_capacity_commitment_path(): + project = "squid" + location = "clam" + capacity_commitment = "whelk" + expected = "projects/{project}/locations/{location}/capacityCommitments/{capacity_commitment}".format( + project=project, + location=location, + capacity_commitment=capacity_commitment, + ) + actual = ReservationServiceClient.capacity_commitment_path( + project, location, capacity_commitment + ) + assert expected == actual + + +def test_parse_capacity_commitment_path(): + expected = { + "project": "octopus", + "location": "oyster", + "capacity_commitment": "nudibranch", + } + path = ReservationServiceClient.capacity_commitment_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationServiceClient.parse_capacity_commitment_path(path) + assert expected == actual + + +def test_reservation_path(): + project = "cuttlefish" + location = "mussel" + reservation = "winkle" + expected = ( + "projects/{project}/locations/{location}/reservations/{reservation}".format( + project=project, + location=location, + reservation=reservation, + ) + ) + actual = ReservationServiceClient.reservation_path(project, location, reservation) + assert expected == actual + + +def test_parse_reservation_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "reservation": "abalone", + } + path = ReservationServiceClient.reservation_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationServiceClient.parse_reservation_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ReservationServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ReservationServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ReservationServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ReservationServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ReservationServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ReservationServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = ReservationServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ReservationServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ReservationServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ReservationServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ReservationServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ReservationServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ReservationServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ReservationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ReservationServiceClient, transports.ReservationServiceGrpcTransport), + ( + ReservationServiceAsyncClient, + transports.ReservationServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-contact-center-insights/CONTRIBUTING.rst b/packages/google-cloud-contact-center-insights/CONTRIBUTING.rst index aa972d9de3fb..4be792105a32 100644 --- a/packages/google-cloud-contact-center-insights/CONTRIBUTING.rst +++ b/packages/google-cloud-contact-center-insights/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system- -- -k + $ nox -s system-3.11 -- -k .. note:: - System tests are only configured to run under Python. + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py index 116bd5746629..64cabec4b64a 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/__init__.py @@ -130,6 +130,7 @@ SilenceData, SmartComposeSuggestionData, SmartReplyData, + SpeechConfig, View, ) @@ -239,5 +240,6 @@ "SilenceData", "SmartComposeSuggestionData", "SmartReplyData", + "SpeechConfig", "View", ) diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py index ac3bc60c3fb3..360a0d13ebdd 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py index 5f3ada6f8f36..4b68bced5210 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/__init__.py @@ -128,6 +128,7 @@ SilenceData, SmartComposeSuggestionData, SmartReplyData, + SpeechConfig, View, ) @@ -226,6 +227,7 @@ "SilenceData", "SmartComposeSuggestionData", "SmartReplyData", + "SpeechConfig", "UndeployIssueModelMetadata", "UndeployIssueModelRequest", "UndeployIssueModelResponse", diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py index ac3bc60c3fb3..360a0d13ebdd 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py index 4cb3e4748b15..4586b6a2f19a 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/async_client.py @@ -99,6 +99,10 @@ class ContactCenterInsightsAsyncClient: parse_phrase_matcher_path = staticmethod( ContactCenterInsightsClient.parse_phrase_matcher_path ) + recognizer_path = staticmethod(ContactCenterInsightsClient.recognizer_path) + parse_recognizer_path = staticmethod( + ContactCenterInsightsClient.parse_recognizer_path + ) settings_path = staticmethod(ContactCenterInsightsClient.settings_path) parse_settings_path = staticmethod(ContactCenterInsightsClient.parse_settings_path) view_path = staticmethod(ContactCenterInsightsClient.view_path) diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py index a5dafefc6ed7..7c0528617444 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py @@ -352,6 +352,30 @@ def parse_phrase_matcher_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def recognizer_path( + project: str, + location: str, + recognizer: str, + ) -> str: + """Returns a fully-qualified recognizer string.""" + return ( + "projects/{project}/locations/{location}/recognizers/{recognizer}".format( + project=project, + location=location, + recognizer=recognizer, + ) + ) + + @staticmethod + def parse_recognizer_path(path: str) -> Dict[str, str]: + """Parses a recognizer path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/recognizers/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def settings_path( project: str, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py index b17db47501d1..ecbc4bcf9db9 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/__init__.py @@ -119,6 +119,7 @@ SilenceData, SmartComposeSuggestionData, SmartReplyData, + SpeechConfig, View, ) @@ -226,5 +227,6 @@ "SilenceData", "SmartComposeSuggestionData", "SmartReplyData", + "SpeechConfig", "View", ) diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py index e5953d126498..464613619ccc 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/contact_center_insights.py @@ -360,6 +360,10 @@ class UploadConversationRequest(proto.Message): Optional. DLP settings for transcript redaction. Optional, will default to the config specified in Settings. + speech_config (google.cloud.contact_center_insights_v1.types.SpeechConfig): + Optional. Default Speech-to-Text + configuration. Optional, will default to the + config specified in Settings. """ parent: str = proto.Field( @@ -380,6 +384,11 @@ class UploadConversationRequest(proto.Message): number=4, message=resources.RedactionConfig, ) + speech_config: resources.SpeechConfig = proto.Field( + proto.MESSAGE, + number=11, + message=resources.SpeechConfig, + ) class UploadConversationMetadata(proto.Message): @@ -584,7 +593,9 @@ class IngestConversationsRequest(proto.Message): Attributes: gcs_source (google.cloud.contact_center_insights_v1.types.IngestConversationsRequest.GcsSource): - A cloud storage bucket source. + A cloud storage bucket source. Note that any + previously ingested objects from the source will + be skipped to avoid duplication. This field is a member of `oneof`_ ``source``. transcript_object_config (google.cloud.contact_center_insights_v1.types.IngestConversationsRequest.TranscriptObjectConfig): diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py index 39b9ceb9fd7d..55e1f740d185 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py @@ -56,6 +56,7 @@ "ExactMatchConfig", "Settings", "RedactionConfig", + "SpeechConfig", "RuntimeAnnotation", "AnswerFeedback", "ArticleSuggestionData", @@ -1735,6 +1736,11 @@ class Settings(proto.Message): redaction_config (google.cloud.contact_center_insights_v1.types.RedactionConfig): Default DLP redaction resources to be applied while ingesting conversations. + speech_config (google.cloud.contact_center_insights_v1.types.SpeechConfig): + Optional. Default Speech-to-Text resources to + be used while ingesting audio files. Optional, + CCAI Insights will create a default if not + provided. """ class AnalysisConfig(proto.Message): @@ -1806,6 +1812,11 @@ class AnalysisConfig(proto.Message): number=10, message="RedactionConfig", ) + speech_config: "SpeechConfig" = proto.Field( + proto.MESSAGE, + number=11, + message="SpeechConfig", + ) class RedactionConfig(proto.Message): @@ -1833,6 +1844,21 @@ class RedactionConfig(proto.Message): ) +class SpeechConfig(proto.Message): + r"""Speech-to-Text configuration. + + Attributes: + speech_recognizer (str): + The fully-qualified Speech Recognizer resource name. Format: + ``projects/{project_id}/locations/{location}/recognizer/{recognizer}`` + """ + + speech_recognizer: str = proto.Field( + proto.STRING, + number=1, + ) + + class RuntimeAnnotation(proto.Message): r"""An annotation that was generated during the customer and agent interaction. diff --git a/packages/google-cloud-contact-center-insights/noxfile.py b/packages/google-cloud-contact-center-insights/noxfile.py index 9a2acd8b6787..be54712bfa8f 100644 --- a/packages/google-cloud-contact-center-insights/noxfile.py +++ b/packages/google-cloud-contact-center-insights/noxfile.py @@ -46,7 +46,7 @@ UNIT_TEST_EXTRAS = [] UNIT_TEST_EXTRAS_BY_PYTHON = {} -SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -405,24 +405,3 @@ def prerelease_deps(session): session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json index d94d393ac183..c3e9d3df64ac 100644 --- a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-contact-center-insights", - "version": "1.12.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py b/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py index 63cb6b1c4c2b..6c0a0dd4ff19 100644 --- a/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py +++ b/packages/google-cloud-contact-center-insights/scripts/fixup_contact_center_insights_v1_keywords.py @@ -76,7 +76,7 @@ class contact_center_insightsCallTransformer(cst.CSTTransformer): 'update_phrase_matcher': ('phrase_matcher', 'update_mask', ), 'update_settings': ('settings', 'update_mask', ), 'update_view': ('view', 'update_mask', ), - 'upload_conversation': ('parent', 'conversation', 'conversation_id', 'redaction_config', ), + 'upload_conversation': ('parent', 'conversation', 'conversation_id', 'redaction_config', 'speech_config', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py index da906d57ce8f..720618882f47 100644 --- a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py +++ b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py @@ -20902,6 +20902,7 @@ def test_update_settings_rest(request_type): "deidentify_template": "deidentify_template_value", "inspect_template": "inspect_template_value", }, + "speech_config": {"speech_recognizer": "speech_recognizer_value"}, } request = request_type(**request_init) @@ -21117,6 +21118,7 @@ def test_update_settings_rest_bad_request( "deidentify_template": "deidentify_template_value", "inspect_template": "inspect_template_value", }, + "speech_config": {"speech_recognizer": "speech_recognizer_value"}, } request = request_type(**request_init) @@ -23587,9 +23589,37 @@ def test_parse_phrase_matcher_path(): assert expected == actual -def test_settings_path(): +def test_recognizer_path(): project = "scallop" location = "abalone" + recognizer = "squid" + expected = ( + "projects/{project}/locations/{location}/recognizers/{recognizer}".format( + project=project, + location=location, + recognizer=recognizer, + ) + ) + actual = ContactCenterInsightsClient.recognizer_path(project, location, recognizer) + assert expected == actual + + +def test_parse_recognizer_path(): + expected = { + "project": "clam", + "location": "whelk", + "recognizer": "octopus", + } + path = ContactCenterInsightsClient.recognizer_path(**expected) + + # Check that the path construction is reversible. + actual = ContactCenterInsightsClient.parse_recognizer_path(path) + assert expected == actual + + +def test_settings_path(): + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}/settings".format( project=project, location=location, @@ -23600,8 +23630,8 @@ def test_settings_path(): def test_parse_settings_path(): expected = { - "project": "squid", - "location": "clam", + "project": "cuttlefish", + "location": "mussel", } path = ContactCenterInsightsClient.settings_path(**expected) @@ -23611,9 +23641,9 @@ def test_parse_settings_path(): def test_view_path(): - project = "whelk" - location = "octopus" - view = "oyster" + project = "winkle" + location = "nautilus" + view = "scallop" expected = "projects/{project}/locations/{location}/views/{view}".format( project=project, location=location, @@ -23625,9 +23655,9 @@ def test_view_path(): def test_parse_view_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "view": "mussel", + "project": "abalone", + "location": "squid", + "view": "clam", } path = ContactCenterInsightsClient.view_path(**expected) @@ -23637,7 +23667,7 @@ def test_parse_view_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -23647,7 +23677,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "octopus", } path = ContactCenterInsightsClient.common_billing_account_path(**expected) @@ -23657,7 +23687,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -23667,7 +23697,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "nudibranch", } path = ContactCenterInsightsClient.common_folder_path(**expected) @@ -23677,7 +23707,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -23687,7 +23717,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "mussel", } path = ContactCenterInsightsClient.common_organization_path(**expected) @@ -23697,7 +23727,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -23707,7 +23737,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "nautilus", } path = ContactCenterInsightsClient.common_project_path(**expected) @@ -23717,8 +23747,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -23729,8 +23759,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "squid", + "location": "clam", } path = ContactCenterInsightsClient.common_location_path(**expected) diff --git a/packages/google-cloud-container/.OwlBot.yaml b/packages/google-cloud-container/.OwlBot.yaml new file mode 100644 index 000000000000..353f25f0a477 --- /dev/null +++ b/packages/google-cloud-container/.OwlBot.yaml @@ -0,0 +1,24 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/container/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-container/$1 + +begin-after-commit-hash: 130ce904e5d546c312943d10f48799590f9c0f66 + diff --git a/packages/google-cloud-container/.coveragerc b/packages/google-cloud-container/.coveragerc new file mode 100644 index 000000000000..113f6dbccc9f --- /dev/null +++ b/packages/google-cloud-container/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/container/__init__.py + google/cloud/container/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-container/.flake8 b/packages/google-cloud-container/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-container/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-container/.gitignore b/packages/google-cloud-container/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-container/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-container/.repo-metadata.json b/packages/google-cloud-container/.repo-metadata.json new file mode 100644 index 000000000000..e49ed77eb111 --- /dev/null +++ b/packages/google-cloud-container/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "container", + "name_pretty": "Kubernetes Engine", + "product_documentation": "https://cloud.google.com/kubernetes-engine/", + "client_documentation": "https://cloud.google.com/python/docs/reference/container/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559746", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-container", + "api_id": "container.googleapis.com", + "requires_billing": true, + "default_version": "v1", + "api_shortname": "container", + "api_description": "The Google Kubernetes Engine API is used for building and managing container based applications, powered by the open source Kubernetes technology." +} diff --git a/packages/google-cloud-container/CHANGELOG.md b/packages/google-cloud-container/CHANGELOG.md new file mode 100644 index 000000000000..3098a21baae1 --- /dev/null +++ b/packages/google-cloud-container/CHANGELOG.md @@ -0,0 +1,704 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-container/#history + +## [2.31.0](https://github.com/googleapis/python-container/compare/v2.30.0...v2.31.0) (2023-08-28) + + +### Features + +* Add `machine_type`, `disk_type`, `disk_size_gb` fields to `UpdateNodePoolRequest` ([a1e508c](https://github.com/googleapis/python-container/commit/a1e508c53415ea816ed2649a46b791947bf87705)) +* Add support for NodeConfig Update ([a1e508c](https://github.com/googleapis/python-container/commit/a1e508c53415ea816ed2649a46b791947bf87705)) +* Publicize tpu topology in v1 API ([a1e508c](https://github.com/googleapis/python-container/commit/a1e508c53415ea816ed2649a46b791947bf87705)) + +## [2.30.0](https://github.com/googleapis/python-container/compare/v2.29.0...v2.30.0) (2023-08-11) + + +### Features + +* **v1beta1:** Add preview support for monitoring a cluster's pods for compliance with a provided Binary Authorization platform policy via Binary Authorization Continuous Validation ([#405](https://github.com/googleapis/python-container/issues/405)) ([c5806de](https://github.com/googleapis/python-container/commit/c5806de9c61b1c9855f1c9a5c3fac1ea343e1acc)) + +## [2.29.0](https://github.com/googleapis/python-container/compare/v2.28.0...v2.29.0) (2023-08-09) + + +### Features + +* Add APIs for GKE OOTB metrics packages ([#403](https://github.com/googleapis/python-container/issues/403)) ([834872a](https://github.com/googleapis/python-container/commit/834872af197b113183024f6711e36e8a88aad47e)) + +## [2.28.0](https://github.com/googleapis/python-container/compare/v2.27.0...v2.28.0) (2023-07-24) + + +### Features + +* **v1beta1:** Add enable_multi_networking to NetworkConfig ([02c497e](https://github.com/googleapis/python-container/commit/02c497ec42f3d51a28e31b3cca940e5a147960bd)) +* **v1beta1:** Add policy_name to PlacementPolicy message within a node pool ([02c497e](https://github.com/googleapis/python-container/commit/02c497ec42f3d51a28e31b3cca940e5a147960bd)) +* **v1beta1:** Add support for AdditionalPodNetworkConfig and AdditionalNodeNetworkConfig ([02c497e](https://github.com/googleapis/python-container/commit/02c497ec42f3d51a28e31b3cca940e5a147960bd)) +* **v1beta1:** Add support for HostMaintenancePolicy ([02c497e](https://github.com/googleapis/python-container/commit/02c497ec42f3d51a28e31b3cca940e5a147960bd)) + +## [2.27.0](https://github.com/googleapis/python-container/compare/v2.26.0...v2.27.0) (2023-07-19) + + +### Features + +* Add a Pod IP Utilization API ([b5dfb67](https://github.com/googleapis/python-container/commit/b5dfb6725c1a9097254959908af739cd3f7a20ea)) +* Add advanced_datapath_observability_config to monitoring_config ([b5dfb67](https://github.com/googleapis/python-container/commit/b5dfb6725c1a9097254959908af739cd3f7a20ea)) +* Add Multi-networking API ([5c5cc4f](https://github.com/googleapis/python-container/commit/5c5cc4fbe54affde3744da3203c6bd2aeb733d61)) +* Add policy_name to PlacementPolicy message within a node pool ([5c5cc4f](https://github.com/googleapis/python-container/commit/5c5cc4fbe54affde3744da3203c6bd2aeb733d61)) + +## [2.26.0](https://github.com/googleapis/python-container/compare/v2.25.0...v2.26.0) (2023-07-04) + + +### Features + +* **v1beta1:** Add `InsecureKubeletReadonlyPortEnabled` in `NodeKubeletConfig` and `AutoProvisioningNodePoolDefaults` ([be6b0ab](https://github.com/googleapis/python-container/commit/be6b0ab88931a3c6a64cc92b8b0dbd33b82be255)) +* **v1beta1:** Add `KUBE_DNS` option to `DNSConfig.cluster_dns` ([be6b0ab](https://github.com/googleapis/python-container/commit/be6b0ab88931a3c6a64cc92b8b0dbd33b82be255)) +* **v1beta1:** Add a Pod IP Utilization API ([be6b0ab](https://github.com/googleapis/python-container/commit/be6b0ab88931a3c6a64cc92b8b0dbd33b82be255)) +* **v1beta1:** Add Tier 1 cluster-level API network_performance_config ([be6b0ab](https://github.com/googleapis/python-container/commit/be6b0ab88931a3c6a64cc92b8b0dbd33b82be255)) +* **v1beta1:** Publicize tpu topology ([be6b0ab](https://github.com/googleapis/python-container/commit/be6b0ab88931a3c6a64cc92b8b0dbd33b82be255)) + + +### Bug Fixes + +* Add async context manager return types ([#385](https://github.com/googleapis/python-container/issues/385)) ([074edd4](https://github.com/googleapis/python-container/commit/074edd4c4f8cd10d8cd2afa5d8fb09f1c951c292)) + +## [2.25.0](https://github.com/googleapis/python-container/compare/v2.24.0...v2.25.0) (2023-06-29) + + +### Features + +* Add `KUBE_DNS` option to `DNSConfig.cluster_dns` ([91b001a](https://github.com/googleapis/python-container/commit/91b001a4f89557e8a503c57de3e45d6bf399ea90)) +* Add Tier 1 cluster-level API network_performance_config ([91b001a](https://github.com/googleapis/python-container/commit/91b001a4f89557e8a503c57de3e45d6bf399ea90)) + +## [2.24.0](https://github.com/googleapis/python-container/compare/v2.23.0...v2.24.0) (2023-06-13) + + +### Features + +* Add API for GPU driver installation config ([d6da309](https://github.com/googleapis/python-container/commit/d6da30922e76d8c56b8ec3c78b0cb8ffbb5bd82a)) +* Add SecurityPostureConfig API field to allow customers to enable GKE Security Posture capabilities for their clusters ([d6da309](https://github.com/googleapis/python-container/commit/d6da30922e76d8c56b8ec3c78b0cb8ffbb5bd82a)) +* Add workloadPolicyConfig API field to allow customer enable NET_ADMIN capability for their autopilot clusters ([d6da309](https://github.com/googleapis/python-container/commit/d6da30922e76d8c56b8ec3c78b0cb8ffbb5bd82a)) + +## [2.23.0](https://github.com/googleapis/python-container/compare/v2.22.0...v2.23.0) (2023-06-06) + + +### Features + +* Add a API field to enable FQDN Network Policy on clusters ([52d1480](https://github.com/googleapis/python-container/commit/52d14803c65c06f65c1dcf7679faa9c6a0cca784)) +* Add CheckAutopilotCompatibility API to get autopilot compatibility issues for a given standard cluster ([52d1480](https://github.com/googleapis/python-container/commit/52d14803c65c06f65c1dcf7679faa9c6a0cca784)) +* Turn on public visibility for best effort provision ([52d1480](https://github.com/googleapis/python-container/commit/52d14803c65c06f65c1dcf7679faa9c6a0cca784)) + +## [2.22.0](https://github.com/googleapis/python-container/compare/v2.21.0...v2.22.0) (2023-06-01) + + +### Features + +* Add SoleTenantConfig API ([f3126af](https://github.com/googleapis/python-container/commit/f3126afbc21296e5f3e0608276bc7903ae969fe3)) +* Cluster resizes will now have their own operation type (RESIZE_CLUSTER) instead of reusing REPAIR_CLUSTER; they will start using this in the near future ([f3126af](https://github.com/googleapis/python-container/commit/f3126afbc21296e5f3e0608276bc7903ae969fe3)) +* Support fleet registration via cluster update ([f3126af](https://github.com/googleapis/python-container/commit/f3126afbc21296e5f3e0608276bc7903ae969fe3)) + + +### Documentation + +* Clarified release channel defaulting behavior for create cluster requests when release channel is unspecified ([f3126af](https://github.com/googleapis/python-container/commit/f3126afbc21296e5f3e0608276bc7903ae969fe3)) +* Operation.self_link and Operation.target_link given examples ([f3126af](https://github.com/googleapis/python-container/commit/f3126afbc21296e5f3e0608276bc7903ae969fe3)) +* Operation.Type is now documented in detail ([f3126af](https://github.com/googleapis/python-container/commit/f3126afbc21296e5f3e0608276bc7903ae969fe3)) + +## [2.21.0](https://github.com/googleapis/python-container/compare/v2.20.0...v2.21.0) (2023-04-15) + + +### Features + +* Add support for updating additional pod IPv4 ranges for Standard and Autopilot clusters ([#367](https://github.com/googleapis/python-container/issues/367)) ([533b4f9](https://github.com/googleapis/python-container/commit/533b4f91a9175cbd9bf892b93a38b283316f272d)) + +## [2.20.0](https://github.com/googleapis/python-container/compare/v2.19.0...v2.20.0) (2023-04-11) + + +### Features + +* Add support for updating additional pod IPv4 ranges for Standard and Autopilot clusters ([#365](https://github.com/googleapis/python-container/issues/365)) ([c9c29c4](https://github.com/googleapis/python-container/commit/c9c29c46f3c6a8d4dfebec3ec36e5c7fc6052bb7)) + +## [2.19.0](https://github.com/googleapis/python-container/compare/v2.18.0...v2.19.0) (2023-04-06) + + +### Features + +* Add support for disabling pod IP cidr overprovision. This feature requires special allowlisting for the projects. ([56f65fa](https://github.com/googleapis/python-container/commit/56f65fa23de31317e4cf39e50351ad1e1bb04b57)) +* Add update support for accelerator config ([56f65fa](https://github.com/googleapis/python-container/commit/56f65fa23de31317e4cf39e50351ad1e1bb04b57)) + +## [2.18.0](https://github.com/googleapis/python-container/compare/v2.17.4...v2.18.0) (2023-03-28) + + +### Features + +* Add a new fleet registration feature to v1beta1, v1 ([#360](https://github.com/googleapis/python-container/issues/360)) ([0bfdffe](https://github.com/googleapis/python-container/commit/0bfdffe31e660c3914081181c611efd5582a05d3)) + + +### Documentation + +* Fix formatting of request arg in docstring ([#359](https://github.com/googleapis/python-container/issues/359)) ([d10ac7e](https://github.com/googleapis/python-container/commit/d10ac7e8db4d79011f1b5fa1bb508a8829a683ac)) +* Minor typo fix ([#356](https://github.com/googleapis/python-container/issues/356)) ([f41b699](https://github.com/googleapis/python-container/commit/f41b699907991248b0d14bd9d5ae3ab4a8e1aff7)) + +## [2.17.4](https://github.com/googleapis/python-container/compare/v2.17.3...v2.17.4) (2023-02-28) + + +### Documentation + +* Minor grammar improvements ([#351](https://github.com/googleapis/python-container/issues/351)) ([2a0eeae](https://github.com/googleapis/python-container/commit/2a0eeae897e7f2312690eed1e52119a6a572c667)) + +## [2.17.3](https://github.com/googleapis/python-container/compare/v2.17.2...v2.17.3) (2023-02-03) + + +### Documentation + +* Add clarification on whether `NodePool.version` is a required field ([#344](https://github.com/googleapis/python-container/issues/344)) ([071c147](https://github.com/googleapis/python-container/commit/071c147df8e6edd72ff66b1997f21e881acd9b32)) + +## [2.17.2](https://github.com/googleapis/python-container/compare/v2.17.1...v2.17.2) (2023-01-30) + + +### Documentation + +* Add references for available node image types ([76cfff8](https://github.com/googleapis/python-container/commit/76cfff85542aa1a8326efbfbc6d1c95b663e1452)) +* Clarified wording around the NodePoolUpdateStrategy default behavior ([76cfff8](https://github.com/googleapis/python-container/commit/76cfff85542aa1a8326efbfbc6d1c95b663e1452)) + +## [2.17.1](https://github.com/googleapis/python-container/compare/v2.17.0...v2.17.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([cc5ebab](https://github.com/googleapis/python-container/commit/cc5ebabb1d584b137c42f45f55e6b803db72bf6d)) + + +### Documentation + +* Add documentation for enums ([cc5ebab](https://github.com/googleapis/python-container/commit/cc5ebabb1d584b137c42f45f55e6b803db72bf6d)) + +## [2.17.0](https://github.com/googleapis/python-container/compare/v2.16.0...v2.17.0) (2023-01-17) + + +### Features + +* Add support for viewing the subnet IPv6 CIDR and services IPv6 CIDR assigned to dual stack clusters ([#333](https://github.com/googleapis/python-container/issues/333)) ([4d0a583](https://github.com/googleapis/python-container/commit/4d0a583f4ff23dca157b7835d9e59ec7ca9d16da)) + +## [2.16.0](https://github.com/googleapis/python-container/compare/v2.15.0...v2.16.0) (2023-01-10) + + +### Features + +* Add etags for cluster and node pool update operations ([8eeee3b](https://github.com/googleapis/python-container/commit/8eeee3bda1e0eaae6fb375b47cc68b959f9b9feb)) +* Add support for python 3.11 ([8eeee3b](https://github.com/googleapis/python-container/commit/8eeee3bda1e0eaae6fb375b47cc68b959f9b9feb)) + +## [2.15.0](https://github.com/googleapis/python-container/compare/v2.14.0...v2.15.0) (2023-01-09) + + +### Features + +* Add EphemeralStorageLocalSsdConfig and LocalNvmeSsdBlockConfig APIs ([403c1ad](https://github.com/googleapis/python-container/commit/403c1ad328e6d052d9e6aab667bb74b8b6a559b7)) +* Add etags for cluster and node pool update operations ([403c1ad](https://github.com/googleapis/python-container/commit/403c1ad328e6d052d9e6aab667bb74b8b6a559b7)) +* Add support for specifying stack type for clusters ([403c1ad](https://github.com/googleapis/python-container/commit/403c1ad328e6d052d9e6aab667bb74b8b6a559b7)) +* Add WindowsNodeConfig field ([403c1ad](https://github.com/googleapis/python-container/commit/403c1ad328e6d052d9e6aab667bb74b8b6a559b7)) +* CLUSTER_SCOPE option now available in DNSScope ([403c1ad](https://github.com/googleapis/python-container/commit/403c1ad328e6d052d9e6aab667bb74b8b6a559b7)) +* Release GKE CloudDNS Cluster Scope ([403c1ad](https://github.com/googleapis/python-container/commit/403c1ad328e6d052d9e6aab667bb74b8b6a559b7)) + +## [2.14.0](https://github.com/googleapis/python-container/compare/v2.13.0...v2.14.0) (2022-12-15) + + +### Features + +* Add API to enable GKE Gateway controller ([944001d](https://github.com/googleapis/python-container/commit/944001d24215b0757da36898c26b8e22ca3f7a12)) +* Add compact placement feature for node pools ([944001d](https://github.com/googleapis/python-container/commit/944001d24215b0757da36898c26b8e22ca3f7a12)) +* Add nodeconfig resource_labels api ([944001d](https://github.com/googleapis/python-container/commit/944001d24215b0757da36898c26b8e22ca3f7a12)) +* Add support for `google.cloud.container.__version__` ([944001d](https://github.com/googleapis/python-container/commit/944001d24215b0757da36898c26b8e22ca3f7a12)) +* Add support for specifying stack type for clusters. This will allow clusters to be created as dual stack or toggled between IPV4 and dual stack ([#323](https://github.com/googleapis/python-container/issues/323)) ([5c1d04f](https://github.com/googleapis/python-container/commit/5c1d04f874b64aabc378aa18370e0b6be503a886)) +* Add typing to proto.Message based class attributes ([944001d](https://github.com/googleapis/python-container/commit/944001d24215b0757da36898c26b8e22ca3f7a12)) +* GKE cluster's control plan/node-pool network isolation ([944001d](https://github.com/googleapis/python-container/commit/944001d24215b0757da36898c26b8e22ca3f7a12)) +* **v1:** Add a FastSocket API ([4d61084](https://github.com/googleapis/python-container/commit/4d61084846ae9583140b04a4c68da070479d79b9)) +* **v1beta1:** Add a FastSocket API ([#319](https://github.com/googleapis/python-container/issues/319)) ([5072864](https://github.com/googleapis/python-container/commit/50728649c915df27e8876af572ef824a26a660b7)) + + +### Bug Fixes + +* Add dict typing for client_options ([944001d](https://github.com/googleapis/python-container/commit/944001d24215b0757da36898c26b8e22ca3f7a12)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([25c47a8](https://github.com/googleapis/python-container/commit/25c47a8433050c1cfcdf033dff16b3dcaedb1a9b)) +* Drop usage of pkg_resources ([25c47a8](https://github.com/googleapis/python-container/commit/25c47a8433050c1cfcdf033dff16b3dcaedb1a9b)) +* Fix timeout default values ([25c47a8](https://github.com/googleapis/python-container/commit/25c47a8433050c1cfcdf033dff16b3dcaedb1a9b)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([944001d](https://github.com/googleapis/python-container/commit/944001d24215b0757da36898c26b8e22ca3f7a12)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([25c47a8](https://github.com/googleapis/python-container/commit/25c47a8433050c1cfcdf033dff16b3dcaedb1a9b)) + +## [2.13.0](https://github.com/googleapis/python-container/compare/v2.12.2...v2.13.0) (2022-10-26) + + +### Features + +* launch GKE Cost Allocations configuration to the v1 GKE API ([d625e34](https://github.com/googleapis/python-container/commit/d625e3456bd37aa6cca4b0cf9de44c9ddb69ec21)) +* vulnerability scanning exposed to public ([d625e34](https://github.com/googleapis/python-container/commit/d625e3456bd37aa6cca4b0cf9de44c9ddb69ec21)) + +## [2.12.2](https://github.com/googleapis/python-container/compare/v2.12.1...v2.12.2) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#305](https://github.com/googleapis/python-container/issues/305)) ([728fc48](https://github.com/googleapis/python-container/commit/728fc485d91c113a151dea2641ccfe163a5accaf)) + +## [2.12.1](https://github.com/googleapis/python-container/compare/v2.12.0...v2.12.1) (2022-10-03) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#301](https://github.com/googleapis/python-container/issues/301)) ([a4d1351](https://github.com/googleapis/python-container/commit/a4d1351c67659624d373f1fe4e3f5c61e99fa074)) + +## [2.12.0](https://github.com/googleapis/python-container/compare/v2.11.2...v2.12.0) (2022-09-16) + + +### Features + +* Added High Throughput Logging API for Google Kubernetes Engine ([#297](https://github.com/googleapis/python-container/issues/297)) ([f774719](https://github.com/googleapis/python-container/commit/f7747196207f8487a4d50c93d76f8ea6e02f3f7c)) + + +### Documentation + +* missing period in description for min CPU platform ([f774719](https://github.com/googleapis/python-container/commit/f7747196207f8487a4d50c93d76f8ea6e02f3f7c)) +* ReservationAffinity key field docs incorrect ([f774719](https://github.com/googleapis/python-container/commit/f7747196207f8487a4d50c93d76f8ea6e02f3f7c)) + +## [2.11.2](https://github.com/googleapis/python-container/compare/v2.11.1...v2.11.2) (2022-08-12) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([d68c842](https://github.com/googleapis/python-container/commit/d68c842bddad08c52ff0e4f1b34b70b4db667f8e)) +* **deps:** require proto-plus >= 1.22.0 ([d68c842](https://github.com/googleapis/python-container/commit/d68c842bddad08c52ff0e4f1b34b70b4db667f8e)) + +## [2.11.1](https://github.com/googleapis/python-container/compare/v2.11.0...v2.11.1) (2022-08-02) + + +### Documentation + +* **v1beta1:** BinaryAuthorization.enabled field is marked as deprecated ([0088035](https://github.com/googleapis/python-container/commit/00880358b4021191ff90f1f2a0f08160ce7b6d6a)) +* **v1:** BinaryAuthorization.enabled field is marked as deprecated ([#272](https://github.com/googleapis/python-container/issues/272)) ([0088035](https://github.com/googleapis/python-container/commit/00880358b4021191ff90f1f2a0f08160ce7b6d6a)) + +## [2.11.0](https://github.com/googleapis/python-container/compare/v2.10.8...v2.11.0) (2022-07-16) + + +### Features + +* add audience parameter ([e9dbd98](https://github.com/googleapis/python-container/commit/e9dbd9833c97733c402339a64e3440fa0dfb375e)) +* add Binauthz Evaluation mode support to GKE Classic ([e9dbd98](https://github.com/googleapis/python-container/commit/e9dbd9833c97733c402339a64e3440fa0dfb375e)) +* add GKE Identity Service ([e9dbd98](https://github.com/googleapis/python-container/commit/e9dbd9833c97733c402339a64e3440fa0dfb375e)) +* add Location Policy API ([e9dbd98](https://github.com/googleapis/python-container/commit/e9dbd9833c97733c402339a64e3440fa0dfb375e)) +* add managed prometheus feature ([e9dbd98](https://github.com/googleapis/python-container/commit/e9dbd9833c97733c402339a64e3440fa0dfb375e)) +* add network tags to autopilot cluster ([e9dbd98](https://github.com/googleapis/python-container/commit/e9dbd9833c97733c402339a64e3440fa0dfb375e)) +* add support to modify kubelet pod pid limit in node system configuration ([e9dbd98](https://github.com/googleapis/python-container/commit/e9dbd9833c97733c402339a64e3440fa0dfb375e)) +* support enabling Confidential Nodes in the node pool ([e9dbd98](https://github.com/googleapis/python-container/commit/e9dbd9833c97733c402339a64e3440fa0dfb375e)) +* support GPU timesharing ([e9dbd98](https://github.com/googleapis/python-container/commit/e9dbd9833c97733c402339a64e3440fa0dfb375e)) +* support node pool blue-green upgrade ([e9dbd98](https://github.com/googleapis/python-container/commit/e9dbd9833c97733c402339a64e3440fa0dfb375e)) +* support spot VM ([e9dbd98](https://github.com/googleapis/python-container/commit/e9dbd9833c97733c402339a64e3440fa0dfb375e)) +* support Tier 1 bandwidth ([e9dbd98](https://github.com/googleapis/python-container/commit/e9dbd9833c97733c402339a64e3440fa0dfb375e)) +* update support for node pool labels, taints and network tags ([e9dbd98](https://github.com/googleapis/python-container/commit/e9dbd9833c97733c402339a64e3440fa0dfb375e)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([e9dbd98](https://github.com/googleapis/python-container/commit/e9dbd9833c97733c402339a64e3440fa0dfb375e)) +* require python 3.7+ ([#266](https://github.com/googleapis/python-container/issues/266)) ([01b78af](https://github.com/googleapis/python-container/commit/01b78af7d314551d69075005abd5f4e4ac826f5f)) + +## [2.10.8](https://github.com/googleapis/python-container/compare/v2.10.7...v2.10.8) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#252](https://github.com/googleapis/python-container/issues/252)) ([f5ab2a8](https://github.com/googleapis/python-container/commit/f5ab2a89d1238b2315963f4dd8469323746459f0)) + + +### Documentation + +* fix changelog header to consistent size ([#253](https://github.com/googleapis/python-container/issues/253)) ([9db4c78](https://github.com/googleapis/python-container/commit/9db4c786430da9a5831893a31321cb0e65db4751)) + +## [2.10.7](https://github.com/googleapis/python-container/compare/v2.10.6...v2.10.7) (2022-03-22) + + +### Bug Fixes + +* test cleanup stages with try finally ([#212](https://github.com/googleapis/python-container/issues/212)) ([529bcbf](https://github.com/googleapis/python-container/commit/529bcbf618858aab17b6f5e86d25069a1266860a)) + +## [2.10.6](https://github.com/googleapis/python-container/compare/v2.10.5...v2.10.6) (2022-03-07) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#202](https://github.com/googleapis/python-container/issues/202)) ([444b806](https://github.com/googleapis/python-container/commit/444b8065a22da4c261b8b33ae8564d8329d3435d)) +* **deps:** require proto-plus>=1.15.0 ([444b806](https://github.com/googleapis/python-container/commit/444b8065a22da4c261b8b33ae8564d8329d3435d)) + +## [2.10.5](https://github.com/googleapis/python-container/compare/v2.10.4...v2.10.5) (2022-02-16) + + +### Documentation + +* **samples:** add usage samples to show handling of LRO response Operation ([#191](https://github.com/googleapis/python-container/issues/191)) ([309ad62](https://github.com/googleapis/python-container/commit/309ad6219a6e80d08bcd365a163e8273a6413ede)) + +## [2.10.4](https://github.com/googleapis/python-container/compare/v2.10.3...v2.10.4) (2022-02-14) + + +### Bug Fixes + +* **deps:** move libcst to extras ([#194](https://github.com/googleapis/python-container/issues/194)) ([1c308c2](https://github.com/googleapis/python-container/commit/1c308c2e44dc16d0e8df5976de0b65d1e7c2041e)) + +## [2.10.3](https://github.com/googleapis/python-container/compare/v2.10.2...v2.10.3) (2022-02-11) + + +### Documentation + +* add generated snippets ([#192](https://github.com/googleapis/python-container/issues/192)) ([e3a3a05](https://github.com/googleapis/python-container/commit/e3a3a056d80ac713edbf5cb4a8358063f8a83214)) + +## [2.10.2](https://github.com/googleapis/python-container/compare/v2.10.1...v2.10.2) (2022-02-04) + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([07fbf3c](https://github.com/googleapis/python-container/commit/07fbf3cb1e140abf020e7cfbd083ed79aae701bf)) + +## [2.10.1](https://www.github.com/googleapis/python-container/compare/v2.10.0...v2.10.1) (2021-11-01) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([838a97b](https://www.github.com/googleapis/python-container/commit/838a97b0a45dcb16e81ec3795beaf35eaa5e460d)) +* **deps:** require google-api-core >= 1.28.0 ([838a97b](https://www.github.com/googleapis/python-container/commit/838a97b0a45dcb16e81ec3795beaf35eaa5e460d)) + + +### Documentation + +* list oneofs in docstring ([838a97b](https://www.github.com/googleapis/python-container/commit/838a97b0a45dcb16e81ec3795beaf35eaa5e460d)) + +## [2.10.0](https://www.github.com/googleapis/python-container/compare/v2.9.0...v2.10.0) (2021-10-13) + + +### Features + +* add support for python 3.10 ([#160](https://www.github.com/googleapis/python-container/issues/160)) ([ab146a5](https://www.github.com/googleapis/python-container/commit/ab146a5017805ec200dab2b74e025de0c647d742)) + +## [2.9.0](https://www.github.com/googleapis/python-container/compare/v2.8.1...v2.9.0) (2021-10-08) + + +### Features + +* add context manager support in client ([#157](https://www.github.com/googleapis/python-container/issues/157)) ([03953f8](https://www.github.com/googleapis/python-container/commit/03953f8087b2583369b877672be81f2b8638020c)) + +## [2.8.1](https://www.github.com/googleapis/python-container/compare/v2.8.0...v2.8.1) (2021-10-04) + + +### Bug Fixes + +* improper types in pagers generation ([6814251](https://www.github.com/googleapis/python-container/commit/68142512b75ee81a1fee0e982edd00a617706a00)) + +## [2.8.0](https://www.github.com/googleapis/python-container/compare/v2.7.1...v2.8.0) (2021-09-23) + + +### Features + +* added a flag to enable/disable gvnic on a node pool ([#147](https://www.github.com/googleapis/python-container/issues/147)) ([616b21a](https://www.github.com/googleapis/python-container/commit/616b21a6abe2b0c4dd647cf56d544c2aff7312f7)) +* added configuration for node pool defaults, autopilot, logging and monitoring ([616b21a](https://www.github.com/googleapis/python-container/commit/616b21a6abe2b0c4dd647cf56d544c2aff7312f7)) +* added configuration for workload certificates and identity service component ([616b21a](https://www.github.com/googleapis/python-container/commit/616b21a6abe2b0c4dd647cf56d544c2aff7312f7)) +* added node pool level network config ([616b21a](https://www.github.com/googleapis/python-container/commit/616b21a6abe2b0c4dd647cf56d544c2aff7312f7)) +* added the option to list supported windows versions ([616b21a](https://www.github.com/googleapis/python-container/commit/616b21a6abe2b0c4dd647cf56d544c2aff7312f7)) +* added the option to specify L4 load balancer configuration and IP v6 configuration ([616b21a](https://www.github.com/googleapis/python-container/commit/616b21a6abe2b0c4dd647cf56d544c2aff7312f7)) +* added update support for node pool labels, taints and network tags ([616b21a](https://www.github.com/googleapis/python-container/commit/616b21a6abe2b0c4dd647cf56d544c2aff7312f7)) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([c912605](https://www.github.com/googleapis/python-container/commit/c9126057cde7fc28094785cceab9cf43e42ca8e0)) +* deprecated cluster status condition code ([616b21a](https://www.github.com/googleapis/python-container/commit/616b21a6abe2b0c4dd647cf56d544c2aff7312f7)) +* deprecated KALM addon config option ([616b21a](https://www.github.com/googleapis/python-container/commit/616b21a6abe2b0c4dd647cf56d544c2aff7312f7)) +* **deps:** require proto-plus 1.15.0 ([616b21a](https://www.github.com/googleapis/python-container/commit/616b21a6abe2b0c4dd647cf56d544c2aff7312f7)) + + +### Documentation + +* clarified SetNodePoolSize API behavior ([616b21a](https://www.github.com/googleapis/python-container/commit/616b21a6abe2b0c4dd647cf56d544c2aff7312f7)) + +## [2.7.1](https://www.github.com/googleapis/python-container/compare/v2.7.0...v2.7.1) (2021-07-24) + + +### Bug Fixes + +* enable self signed jwt for grpc ([#133](https://www.github.com/googleapis/python-container/issues/133)) ([6e34b81](https://www.github.com/googleapis/python-container/commit/6e34b81070b14de226c703191e8fe7f37357dea8)) + +## [2.7.0](https://www.github.com/googleapis/python-container/compare/v2.6.1...v2.7.0) (2021-07-22) + + +### Features + +* add Samples section to CONTRIBUTING.rst ([#129](https://www.github.com/googleapis/python-container/issues/129)) ([a5905b8](https://www.github.com/googleapis/python-container/commit/a5905b820c970217a3ad1604982a7e38412d8dda)) + +## [2.6.1](https://www.github.com/googleapis/python-container/compare/v2.6.0...v2.6.1) (2021-07-20) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#128](https://www.github.com/googleapis/python-container/issues/128)) ([7a8fb93](https://www.github.com/googleapis/python-container/commit/7a8fb93cc083ffbd44b9c321a706ce6f37066ee1)) + +## [2.6.0](https://www.github.com/googleapis/python-container/compare/v2.5.0...v2.6.0) (2021-07-09) + + +### Features + +* allow updating security group on existing clusters ([#120](https://www.github.com/googleapis/python-container/issues/120)) ([28a3fc9](https://www.github.com/googleapis/python-container/commit/28a3fc94cd7587b5900408bbadf994f143b0d0c3)) +* allow updating security group on existing clusters ([#123](https://www.github.com/googleapis/python-container/issues/123)) ([e0d70e9](https://www.github.com/googleapis/python-container/commit/e0d70e98991eec24880497516829a0d4ed1dbc18)) + +## [2.5.0](https://www.github.com/googleapis/python-container/compare/v2.4.1...v2.5.0) (2021-06-30) + + +### Features + +* add always_use_jwt_access ([#119](https://www.github.com/googleapis/python-container/issues/119)) ([bb598c4](https://www.github.com/googleapis/python-container/commit/bb598c45f5f2c5ca75a638c17168d6a4a15547a4)) +* support for NodeAutoprovisioning ImageType ([#107](https://www.github.com/googleapis/python-container/issues/107)) ([d56f699](https://www.github.com/googleapis/python-container/commit/d56f699dad3e7fdf654861e36a007a79df760790)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-container/issues/1127)) ([#112](https://www.github.com/googleapis/python-container/issues/112)) ([5a3941a](https://www.github.com/googleapis/python-container/commit/5a3941a03c48e3cef4d21ac10fc8e7b1b594ad1e)), closes [#1126](https://www.github.com/googleapis/python-container/issues/1126) + +## [2.4.1](https://www.github.com/googleapis/python-container/compare/v2.4.0...v2.4.1) (2021-05-16) + + +### Bug Fixes + +* **deps:** add packaging requirement ([#97](https://www.github.com/googleapis/python-container/issues/97)) ([1c46866](https://www.github.com/googleapis/python-container/commit/1c468663bd59bcb529311bd5736861b332a269b3)) + +## [2.4.0](https://www.github.com/googleapis/python-container/compare/v2.3.1...v2.4.0) (2021-03-24) + + +### Features + +* add `from_service_account_info` ([#66](https://www.github.com/googleapis/python-container/issues/66)) ([f4f154d](https://www.github.com/googleapis/python-container/commit/f4f154db737fed68c49303133f4479199c0fdb12)) + +## [2.3.1](https://www.github.com/googleapis/python-container/compare/v2.3.0...v2.3.1) (2021-02-18) + + +### Bug Fixes + +* remove client recv msg limit fix: add enums to `types/__init__.py` ([#60](https://www.github.com/googleapis/python-container/issues/60)) ([9207193](https://www.github.com/googleapis/python-container/commit/9207193fbaae7c6d91d87ffb9db57223f02544d6)) + +## [2.3.0](https://www.github.com/googleapis/python-container/compare/v2.2.0...v2.3.0) (2020-12-08) + + +### Features + +* sync v1beta1 GKE API; deprecate SetLocations and use UpdateCluster; support for sysctls config in Linux nodes; support for node kubelet config controlling CPU manager policy, CFS quota; support for Customer Managed Encryption ([17f0a29](https://www.github.com/googleapis/python-container/commit/17f0a29401ffeaafca6166f9f6169a83c00b145a)) + + +### Bug Fixes + +* Update CODEOWNERS ([#59](https://www.github.com/googleapis/python-container/issues/59)) ([0f9a41e](https://www.github.com/googleapis/python-container/commit/0f9a41eb3394d4940941bc38a3e2e5cb3ad6b8dd)), closes [#58](https://www.github.com/googleapis/python-container/issues/58) + + +### Documentation + +* **python:** update intersphinx for grpc and auth ([#53](https://www.github.com/googleapis/python-container/issues/53)) ([6a0fef7](https://www.github.com/googleapis/python-container/commit/6a0fef7f30976357cc9f42c0213931d1a2c76eac)) + +## [2.2.0](https://www.github.com/googleapis/python-container/compare/v2.1.0...v2.2.0) (2020-11-17) + +All changes are from [#51](https://www.github.com/googleapis/python-container/issues/51) / [d3f5465](https://www.github.com/googleapis/python-container/commit/d3f546574300cd18bb0cb1627f226cfe34ee8098) + +### Features + +* support for GetJSONWebKeys +* support for Workload Identity +* support for Gvisor in nodes +* support for node reservation affinity +* support for Customer Managed Encryption in nodes +* support for NodeLocalDNS +* support for ConfigConnector +* support for private cluster VPC peering +* support for CloudRun load balancers +* support using routes for pod IPs +* support for Shielded Nodes +* support for release channels +* support for disabling default sNAT +* operations now store more granular progress +* support for node Surge Upgrades +* support for updating node pool locations. +* support for Node Auto Provisioning +* support for specifying node disk size and type + + + +### Bug Fixes +* deprecate SetLocations; use UpdateCluster +* provide name alias for GetOperation (as method signature annotation) +* deprecate basic auth fields (removed in 1.19 clusters) +* deprecate Cluster/NodePool.status_message; use conditions + +## [2.1.0](https://www.github.com/googleapis/python-container/compare/v2.0.1...v2.1.0) (2020-09-16) + + +### Features + +* regenerate client lib to pick up new mtls env ([#44](https://www.github.com/googleapis/python-container/issues/44)) ([c4ffea0](https://www.github.com/googleapis/python-container/commit/c4ffea02fbc6c6566a4e772e2b353a5b4dc5b2fc)) + +## [2.0.1](https://www.github.com/googleapis/python-container/compare/v2.0.0...v2.0.1) (2020-07-24) + + +### Bug Fixes + +* Update README.rst ([#35](https://www.github.com/googleapis/python-container/issues/35)) ([e7d1c66](https://www.github.com/googleapis/python-container/commit/e7d1c66a3f14dc9554a9fbdc78ec16bc912de5f9)) + + +### Documentation + +* link to migration guide ([#39](https://www.github.com/googleapis/python-container/issues/39)) ([5341b96](https://www.github.com/googleapis/python-container/commit/5341b96719a82cb8509f4dcc9e66ee05acd95ae9)) + +## [2.0.0](https://www.github.com/googleapis/python-container/compare/v1.0.1...v2.0.0) (2020-07-16) + + +### ⚠ BREAKING CHANGES + +* migrate to microgenerator (#33). See the [migration guide](https://github.com/googleapis/python-container/blob/main/UPGRADING.md). + +### Features + +* migrate to microgenerator ([#33](https://www.github.com/googleapis/python-container/issues/33)) ([aa9b20c](https://www.github.com/googleapis/python-container/commit/aa9b20c6f4ccb6dff305bfcd72e1bde4a1ee86cd)) + +## [1.0.1](https://www.github.com/googleapis/python-container/compare/v1.0.0...v1.0.1) (2020-06-16) + + +### Bug Fixes + +* fix `release_status` in `setup.py` ([#27](https://www.github.com/googleapis/python-container/issues/27)) ([d853d99](https://www.github.com/googleapis/python-container/commit/d853d99c73f4716721aa26d96ec6bc1a5c916dc4)) + +## [1.0.0](https://www.github.com/googleapis/python-container/compare/v0.5.0...v1.0.0) (2020-06-16) + + +### Features + +* release as production/stable ([#24](https://www.github.com/googleapis/python-container/issues/24)) ([0e0095d](https://www.github.com/googleapis/python-container/commit/0e0095d8fad004d8098af62c6c27a40aa96d6257)) + +## [0.5.0](https://www.github.com/googleapis/python-container/compare/v0.4.0...v0.5.0) (2020-04-14) + + +### Features + +* make `project_id`, `zone`, `cluster_id`, `node_pool` optional arguments to methods in `cluster_manager_client`; change default timeout config; add 2.7 sunset warning; bump copyright year to 2020 (via synth)([#8](https://www.github.com/googleapis/python-container/issues/8)) ([6afc050](https://www.github.com/googleapis/python-container/commit/6afc050f21c57a2d0eda3327c07510f2226aa6a6)) + +## [0.4.0](https://www.github.com/googleapis/python-container/compare/v0.3.0...v0.4.0) (2020-02-03) + + +### Features + +* **container:** add 'list_usable_subnetworks' method; apply proto annotations (via synth) ([#9741](https://www.github.com/googleapis/python-container/issues/9741)) ([541a9e3](https://www.github.com/googleapis/python-container/commit/541a9e3974c38e2601c17c569099ce8602a1c4be)) + +## 0.3.0 + +07-30-2019 10:28 PDT + + +### Implementation Changes + +### New Features +- Add 'client_options' support, update list method docstrings (via synth). ([#8501](https://github.com/googleapis/google-cloud-python/pull/8501)) +- Add synth support for v1beta1 API version (via manual synth). ([#8436](https://github.com/googleapis/google-cloud-python/pull/8436)) +- Allow kwargs to be passed to create_channel (via synth). ([#8384](https://github.com/googleapis/google-cloud-python/pull/8384)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) +- Update pin for 'grpc-google-iam-v1' to 0.12.3+. ([#8647](https://github.com/googleapis/google-cloud-python/pull/8647)) +- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) + +### Documentation +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) +- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) +- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) + +### Internal / Testing Changes +- Pin black version (via synth). ([#8575](https://github.com/googleapis/google-cloud-python/pull/8575)) +- Declare encoding as utf-8 in pb2 files (via synth). ([#8347](https://github.com/googleapis/google-cloud-python/pull/8347)) +- Add disclaimer to auto-generated template files (via synth). ([#8309](https://github.com/googleapis/google-cloud-python/pull/8309)) +- Update noxfile and setup.py (via synth). ([#8298](https://github.com/googleapis/google-cloud-python/pull/8298)) +- Blacken (via synth). ([#8285](https://github.com/googleapis/google-cloud-python/pull/8285)) +- Add routing header to method metadata, add nox session `docs` (via synth). ([#7922](https://github.com/googleapis/google-cloud-python/pull/7922)) +- Copy proto files alongside protoc versions. +- Minor gapic-generator change. ([#7225](https://github.com/googleapis/google-cloud-python/pull/7225)) +- Add protos as an artifact to library ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) +- Update copyright headers ([#7140](https://github.com/googleapis/google-cloud-python/pull/7140)) +- Protoc-generated serialization update. ([#7078](https://github.com/googleapis/google-cloud-python/pull/7078)) +- Pick up stub docstring fix in GAPIC generator. ([#6966](https://github.com/googleapis/google-cloud-python/pull/6966)) + +## 0.2.1 + +12-17-2018 16:36 PST + + +### Documentation +- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) +- Improve linkage between container docs pages. ([#6852](https://github.com/googleapis/google-cloud-python/pull/6852)) + +### Internal / Testing Changes +- Add baseline for synth.metadata + +## 0.2.0 + +12-04-2018 11:28 PST + + +### Implementation Changes +- Import `iam.policy` from `google.api_core.iam.policy` ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) +- Pick up fixes to GAPIC generator. ([#6634](https://github.com/googleapis/google-cloud-python/pull/6634)) +- Fix `client_info` bug, update docstrings. ([#6407](https://github.com/googleapis/google-cloud-python/pull/6407)) +- Avoid overwriting '__module__' of messages from shared modules. ([#5364](https://github.com/googleapis/google-cloud-python/pull/5364)) +- Fix bad trove classifier + +### Dependencies +- Bump minimum `api_core` version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391)) + +### Documentation +- Docs: normalize use of support level badges ([#6159](https://github.com/googleapis/google-cloud-python/pull/6159)) +- Container: harmonize / DRY 'README.rst' / 'docs/index.rst'. ([#6018](https://github.com/googleapis/google-cloud-python/pull/6018)) +- Rename releases to changelog and include from CHANGELOG.md ([#5191](https://github.com/googleapis/google-cloud-python/pull/5191)) + +### Internal / Testing Changes +- Update noxfile. +- blacken all gen'd libs ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) +- Omit local dependencies from coverage. ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) +- Run black at end of synth.py ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) +- Unblack container gapic and protos. +- Run Black on Generated libraries ([#6666](https://github.com/googleapis/google-cloud-python/pull/6666)) +- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) +- Pass posargs to py.test ([#6653](https://github.com/googleapis/google-cloud-python/pull/6653)) +- Update synth.py yaml location ([#6480](https://github.com/googleapis/google-cloud-python/pull/6480)) +- Use new Nox ([#6175](https://github.com/googleapis/google-cloud-python/pull/6175)) +- Container: add 'synth.py'. ([#6084](https://github.com/googleapis/google-cloud-python/pull/6084)) +- Nox: use inplace installs ([#5865](https://github.com/googleapis/google-cloud-python/pull/5865)) +- Modify system tests to use prerelease versions of grpcio ([#5304](https://github.com/googleapis/google-cloud-python/pull/5304)) +- Add Test runs for Python 3.7 and remove 3.4 ([#5295](https://github.com/googleapis/google-cloud-python/pull/5295)) + +## 0.1.1 + +### Dependencies + +- Update dependency range for api-core to include v1.0.0 releases (#4944) + +### Documentation + +- Replacing references to `stable/` docs with `latest/`. (#4638) + +### Testing and internal changes + +- Re-enable lint for tests, remove usage of pylint (#4921) +- Normalize all setup.py files (#4909) +- nox unittest updates (#4646) + +## 0.1.0 + +[![release level](https://img.shields.io/badge/release%20level-alpha-orange.svg?style=flat)](https://cloud.google.com/terms/launch-stages) + +Google Kubernetes Engine is a managed environment for deploying containerized +applications. It brings our latest innovations in developer productivity, +resource efficiency, automated operations, and open source flexibility to +accelerate your time to market. + +PyPI: https://pypi.org/project/google-cloud-container/0.1.0/ diff --git a/packages/google-cloud-container/CODE_OF_CONDUCT.md b/packages/google-cloud-container/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-container/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-container/CONTRIBUTING.rst b/packages/google-cloud-container/CONTRIBUTING.rst new file mode 100644 index 000000000000..a92283b22653 --- /dev/null +++ b/packages/google-cloud-container/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.11 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-container + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-container/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-container/LICENSE b/packages/google-cloud-container/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-container/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-container/MANIFEST.in b/packages/google-cloud-container/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-container/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-container/README.rst b/packages/google-cloud-container/README.rst new file mode 100644 index 000000000000..1b9f644ebdfc --- /dev/null +++ b/packages/google-cloud-container/README.rst @@ -0,0 +1,108 @@ +Python Client for Kubernetes Engine +=================================== + +|stable| |pypi| |versions| + +`Kubernetes Engine`_: The Google Kubernetes Engine API is used for building and managing container based applications, powered by the open source Kubernetes technology. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-container.svg + :target: https://pypi.org/project/google-cloud-container/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-container.svg + :target: https://pypi.org/project/google-cloud-container/ +.. _Kubernetes Engine: https://cloud.google.com/kubernetes-engine/ +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/container/latest +.. _Product Documentation: https://cloud.google.com/kubernetes-engine/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Kubernetes Engine.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Kubernetes Engine.: https://cloud.google.com/kubernetes-engine/ +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-container + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-container + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Kubernetes Engine + to see other available methods on the client. +- Read the `Kubernetes Engine Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Kubernetes Engine Product documentation: https://cloud.google.com/kubernetes-engine/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-container/SECURITY.md b/packages/google-cloud-container/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-container/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-container/container-v1-py.tar.gz b/packages/google-cloud-container/container-v1-py.tar.gz new file mode 100644 index 000000000000..e952e390fa0c Binary files /dev/null and b/packages/google-cloud-container/container-v1-py.tar.gz differ diff --git a/packages/google-cloud-container/container-v1beta1-py.tar.gz b/packages/google-cloud-container/container-v1beta1-py.tar.gz new file mode 100644 index 000000000000..08a13544e2c8 Binary files /dev/null and b/packages/google-cloud-container/container-v1beta1-py.tar.gz differ diff --git a/packages/google-cloud-container/docs/CHANGELOG.md b/packages/google-cloud-container/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-container/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-container/docs/README.rst b/packages/google-cloud-container/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-container/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-container/docs/_static/custom.css b/packages/google-cloud-container/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-container/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-container/docs/_templates/layout.html b/packages/google-cloud-container/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-container/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-container/docs/conf.py b/packages/google-cloud-container/docs/conf.py new file mode 100644 index 000000000000..288c5268c208 --- /dev/null +++ b/packages/google-cloud-container/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-container documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-container" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-container", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-container-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-container.tex", + "google-cloud-container Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-container", + "google-cloud-container Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-container", + "google-cloud-container Documentation", + author, + "google-cloud-container", + "google-cloud-container Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-container/docs/container_v1/cluster_manager.rst b/packages/google-cloud-container/docs/container_v1/cluster_manager.rst new file mode 100644 index 000000000000..dc27a3bd5897 --- /dev/null +++ b/packages/google-cloud-container/docs/container_v1/cluster_manager.rst @@ -0,0 +1,10 @@ +ClusterManager +-------------------------------- + +.. automodule:: google.cloud.container_v1.services.cluster_manager + :members: + :inherited-members: + +.. automodule:: google.cloud.container_v1.services.cluster_manager.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-container/docs/container_v1/services.rst b/packages/google-cloud-container/docs/container_v1/services.rst new file mode 100644 index 000000000000..a98a2319ff3c --- /dev/null +++ b/packages/google-cloud-container/docs/container_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Container v1 API +========================================== +.. toctree:: + :maxdepth: 2 + + cluster_manager diff --git a/packages/google-cloud-container/docs/container_v1/types.rst b/packages/google-cloud-container/docs/container_v1/types.rst new file mode 100644 index 000000000000..3ae28cbf51a9 --- /dev/null +++ b/packages/google-cloud-container/docs/container_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Container v1 API +======================================= + +.. automodule:: google.cloud.container_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-container/docs/container_v1beta1/cluster_manager.rst b/packages/google-cloud-container/docs/container_v1beta1/cluster_manager.rst new file mode 100644 index 000000000000..3a3a443d5810 --- /dev/null +++ b/packages/google-cloud-container/docs/container_v1beta1/cluster_manager.rst @@ -0,0 +1,10 @@ +ClusterManager +-------------------------------- + +.. automodule:: google.cloud.container_v1beta1.services.cluster_manager + :members: + :inherited-members: + +.. automodule:: google.cloud.container_v1beta1.services.cluster_manager.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-container/docs/container_v1beta1/services.rst b/packages/google-cloud-container/docs/container_v1beta1/services.rst new file mode 100644 index 000000000000..8b6dae4d5e90 --- /dev/null +++ b/packages/google-cloud-container/docs/container_v1beta1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Container v1beta1 API +=============================================== +.. toctree:: + :maxdepth: 2 + + cluster_manager diff --git a/packages/google-cloud-container/docs/container_v1beta1/types.rst b/packages/google-cloud-container/docs/container_v1beta1/types.rst new file mode 100644 index 000000000000..d4baa1c5101b --- /dev/null +++ b/packages/google-cloud-container/docs/container_v1beta1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Container v1beta1 API +============================================ + +.. automodule:: google.cloud.container_v1beta1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-container/docs/index.rst b/packages/google-cloud-container/docs/index.rst new file mode 100644 index 000000000000..ea6cc8d8239a --- /dev/null +++ b/packages/google-cloud-container/docs/index.rst @@ -0,0 +1,34 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of Kubernetes Engine. +By default, you will get version ``container_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + container_v1/services + container_v1/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + container_v1beta1/services + container_v1beta1/types + + +Changelog +--------- + +For a list of all ``google-cloud-container`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-container/docs/multiprocessing.rst b/packages/google-cloud-container/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-container/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-container/google/cloud/container/__init__.py b/packages/google-cloud-container/google/cloud/container/__init__.py new file mode 100644 index 000000000000..133ec0095b09 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container/__init__.py @@ -0,0 +1,357 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.container import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.container_v1.services.cluster_manager.async_client import ( + ClusterManagerAsyncClient, +) +from google.cloud.container_v1.services.cluster_manager.client import ( + ClusterManagerClient, +) +from google.cloud.container_v1.types.cluster_service import ( + AcceleratorConfig, + AdditionalNodeNetworkConfig, + AdditionalPodNetworkConfig, + AdditionalPodRangesConfig, + AddonsConfig, + AdvancedDatapathObservabilityConfig, + AdvancedMachineFeatures, + AuthenticatorGroupsConfig, + Autopilot, + AutopilotCompatibilityIssue, + AutoprovisioningNodePoolDefaults, + AutoUpgradeOptions, + BestEffortProvisioning, + BinaryAuthorization, + BlueGreenSettings, + CancelOperationRequest, + CheckAutopilotCompatibilityRequest, + CheckAutopilotCompatibilityResponse, + ClientCertificateConfig, + CloudRunConfig, + Cluster, + ClusterAutoscaling, + ClusterUpdate, + CompleteIPRotationRequest, + CompleteNodePoolUpgradeRequest, + ConfidentialNodes, + ConfigConnectorConfig, + CostManagementConfig, + CreateClusterRequest, + CreateNodePoolRequest, + DailyMaintenanceWindow, + DatabaseEncryption, + DatapathProvider, + DefaultSnatStatus, + DeleteClusterRequest, + DeleteNodePoolRequest, + DnsCacheConfig, + DNSConfig, + EphemeralStorageLocalSsdConfig, + FastSocket, + Fleet, + GatewayAPIConfig, + GcePersistentDiskCsiDriverConfig, + GcfsConfig, + GcpFilestoreCsiDriverConfig, + GcsFuseCsiDriverConfig, + GetClusterRequest, + GetJSONWebKeysRequest, + GetJSONWebKeysResponse, + GetNodePoolRequest, + GetOpenIDConfigRequest, + GetOpenIDConfigResponse, + GetOperationRequest, + GetServerConfigRequest, + GkeBackupAgentConfig, + GPUDriverInstallationConfig, + GPUSharingConfig, + HorizontalPodAutoscaling, + HttpLoadBalancing, + IdentityServiceConfig, + ILBSubsettingConfig, + IntraNodeVisibilityConfig, + IPAllocationPolicy, + IPv6AccessType, + Jwk, + K8sBetaAPIConfig, + KubernetesDashboard, + LegacyAbac, + LinuxNodeConfig, + ListClustersRequest, + ListClustersResponse, + ListNodePoolsRequest, + ListNodePoolsResponse, + ListOperationsRequest, + ListOperationsResponse, + ListUsableSubnetworksRequest, + ListUsableSubnetworksResponse, + LocalNvmeSsdBlockConfig, + LoggingComponentConfig, + LoggingConfig, + LoggingVariantConfig, + MaintenanceExclusionOptions, + MaintenancePolicy, + MaintenanceWindow, + ManagedPrometheusConfig, + MasterAuth, + MasterAuthorizedNetworksConfig, + MaxPodsConstraint, + MeshCertificates, + MonitoringComponentConfig, + MonitoringConfig, + NetworkConfig, + NetworkPolicy, + NetworkPolicyConfig, + NetworkTags, + NodeConfig, + NodeConfigDefaults, + NodeKubeletConfig, + NodeLabels, + NodeManagement, + NodeNetworkConfig, + NodePool, + NodePoolAutoConfig, + NodePoolAutoscaling, + NodePoolDefaults, + NodePoolLoggingConfig, + NodePoolUpdateStrategy, + NodeTaint, + NodeTaints, + NotificationConfig, + Operation, + OperationProgress, + PodCIDROverprovisionConfig, + PrivateClusterConfig, + PrivateClusterMasterGlobalAccessConfig, + PrivateIPv6GoogleAccess, + RangeInfo, + RecurringTimeWindow, + ReleaseChannel, + ReservationAffinity, + ResourceLabels, + ResourceLimit, + ResourceUsageExportConfig, + RollbackNodePoolUpgradeRequest, + SandboxConfig, + SecurityBulletinEvent, + SecurityPostureConfig, + ServerConfig, + ServiceExternalIPsConfig, + SetAddonsConfigRequest, + SetLabelsRequest, + SetLegacyAbacRequest, + SetLocationsRequest, + SetLoggingServiceRequest, + SetMaintenancePolicyRequest, + SetMasterAuthRequest, + SetMonitoringServiceRequest, + SetNetworkPolicyRequest, + SetNodePoolAutoscalingRequest, + SetNodePoolManagementRequest, + SetNodePoolSizeRequest, + ShieldedInstanceConfig, + ShieldedNodes, + SoleTenantConfig, + StackType, + StartIPRotationRequest, + StatusCondition, + TimeWindow, + UpdateClusterRequest, + UpdateMasterRequest, + UpdateNodePoolRequest, + UpgradeAvailableEvent, + UpgradeEvent, + UpgradeResourceType, + UsableSubnetwork, + UsableSubnetworkSecondaryRange, + VerticalPodAutoscaling, + VirtualNIC, + WindowsNodeConfig, + WorkloadIdentityConfig, + WorkloadMetadataConfig, + WorkloadPolicyConfig, +) + +__all__ = ( + "ClusterManagerClient", + "ClusterManagerAsyncClient", + "AcceleratorConfig", + "AdditionalNodeNetworkConfig", + "AdditionalPodNetworkConfig", + "AdditionalPodRangesConfig", + "AddonsConfig", + "AdvancedDatapathObservabilityConfig", + "AdvancedMachineFeatures", + "AuthenticatorGroupsConfig", + "Autopilot", + "AutopilotCompatibilityIssue", + "AutoprovisioningNodePoolDefaults", + "AutoUpgradeOptions", + "BestEffortProvisioning", + "BinaryAuthorization", + "BlueGreenSettings", + "CancelOperationRequest", + "CheckAutopilotCompatibilityRequest", + "CheckAutopilotCompatibilityResponse", + "ClientCertificateConfig", + "CloudRunConfig", + "Cluster", + "ClusterAutoscaling", + "ClusterUpdate", + "CompleteIPRotationRequest", + "CompleteNodePoolUpgradeRequest", + "ConfidentialNodes", + "ConfigConnectorConfig", + "CostManagementConfig", + "CreateClusterRequest", + "CreateNodePoolRequest", + "DailyMaintenanceWindow", + "DatabaseEncryption", + "DefaultSnatStatus", + "DeleteClusterRequest", + "DeleteNodePoolRequest", + "DnsCacheConfig", + "DNSConfig", + "EphemeralStorageLocalSsdConfig", + "FastSocket", + "Fleet", + "GatewayAPIConfig", + "GcePersistentDiskCsiDriverConfig", + "GcfsConfig", + "GcpFilestoreCsiDriverConfig", + "GcsFuseCsiDriverConfig", + "GetClusterRequest", + "GetJSONWebKeysRequest", + "GetJSONWebKeysResponse", + "GetNodePoolRequest", + "GetOpenIDConfigRequest", + "GetOpenIDConfigResponse", + "GetOperationRequest", + "GetServerConfigRequest", + "GkeBackupAgentConfig", + "GPUDriverInstallationConfig", + "GPUSharingConfig", + "HorizontalPodAutoscaling", + "HttpLoadBalancing", + "IdentityServiceConfig", + "ILBSubsettingConfig", + "IntraNodeVisibilityConfig", + "IPAllocationPolicy", + "Jwk", + "K8sBetaAPIConfig", + "KubernetesDashboard", + "LegacyAbac", + "LinuxNodeConfig", + "ListClustersRequest", + "ListClustersResponse", + "ListNodePoolsRequest", + "ListNodePoolsResponse", + "ListOperationsRequest", + "ListOperationsResponse", + "ListUsableSubnetworksRequest", + "ListUsableSubnetworksResponse", + "LocalNvmeSsdBlockConfig", + "LoggingComponentConfig", + "LoggingConfig", + "LoggingVariantConfig", + "MaintenanceExclusionOptions", + "MaintenancePolicy", + "MaintenanceWindow", + "ManagedPrometheusConfig", + "MasterAuth", + "MasterAuthorizedNetworksConfig", + "MaxPodsConstraint", + "MeshCertificates", + "MonitoringComponentConfig", + "MonitoringConfig", + "NetworkConfig", + "NetworkPolicy", + "NetworkPolicyConfig", + "NetworkTags", + "NodeConfig", + "NodeConfigDefaults", + "NodeKubeletConfig", + "NodeLabels", + "NodeManagement", + "NodeNetworkConfig", + "NodePool", + "NodePoolAutoConfig", + "NodePoolAutoscaling", + "NodePoolDefaults", + "NodePoolLoggingConfig", + "NodeTaint", + "NodeTaints", + "NotificationConfig", + "Operation", + "OperationProgress", + "PodCIDROverprovisionConfig", + "PrivateClusterConfig", + "PrivateClusterMasterGlobalAccessConfig", + "RangeInfo", + "RecurringTimeWindow", + "ReleaseChannel", + "ReservationAffinity", + "ResourceLabels", + "ResourceLimit", + "ResourceUsageExportConfig", + "RollbackNodePoolUpgradeRequest", + "SandboxConfig", + "SecurityBulletinEvent", + "SecurityPostureConfig", + "ServerConfig", + "ServiceExternalIPsConfig", + "SetAddonsConfigRequest", + "SetLabelsRequest", + "SetLegacyAbacRequest", + "SetLocationsRequest", + "SetLoggingServiceRequest", + "SetMaintenancePolicyRequest", + "SetMasterAuthRequest", + "SetMonitoringServiceRequest", + "SetNetworkPolicyRequest", + "SetNodePoolAutoscalingRequest", + "SetNodePoolManagementRequest", + "SetNodePoolSizeRequest", + "ShieldedInstanceConfig", + "ShieldedNodes", + "SoleTenantConfig", + "StartIPRotationRequest", + "StatusCondition", + "TimeWindow", + "UpdateClusterRequest", + "UpdateMasterRequest", + "UpdateNodePoolRequest", + "UpgradeAvailableEvent", + "UpgradeEvent", + "UsableSubnetwork", + "UsableSubnetworkSecondaryRange", + "VerticalPodAutoscaling", + "VirtualNIC", + "WindowsNodeConfig", + "WorkloadIdentityConfig", + "WorkloadMetadataConfig", + "WorkloadPolicyConfig", + "DatapathProvider", + "IPv6AccessType", + "NodePoolUpdateStrategy", + "PrivateIPv6GoogleAccess", + "StackType", + "UpgradeResourceType", +) diff --git a/packages/google-cloud-container/google/cloud/container/gapic_version.py b/packages/google-cloud-container/google/cloud/container/gapic_version.py new file mode 100644 index 000000000000..8ab09c42e9c1 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.31.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container/py.typed b/packages/google-cloud-container/google/cloud/container/py.typed new file mode 100644 index 000000000000..d5b0e29f91b4 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-container package uses inline types. diff --git a/packages/google-cloud-container/google/cloud/container_v1/__init__.py b/packages/google-cloud-container/google/cloud/container_v1/__init__.py new file mode 100644 index 000000000000..563c5b826bb1 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/__init__.py @@ -0,0 +1,352 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.container_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.cluster_manager import ClusterManagerAsyncClient, ClusterManagerClient +from .types.cluster_service import ( + AcceleratorConfig, + AdditionalNodeNetworkConfig, + AdditionalPodNetworkConfig, + AdditionalPodRangesConfig, + AddonsConfig, + AdvancedDatapathObservabilityConfig, + AdvancedMachineFeatures, + AuthenticatorGroupsConfig, + Autopilot, + AutopilotCompatibilityIssue, + AutoprovisioningNodePoolDefaults, + AutoUpgradeOptions, + BestEffortProvisioning, + BinaryAuthorization, + BlueGreenSettings, + CancelOperationRequest, + CheckAutopilotCompatibilityRequest, + CheckAutopilotCompatibilityResponse, + ClientCertificateConfig, + CloudRunConfig, + Cluster, + ClusterAutoscaling, + ClusterUpdate, + CompleteIPRotationRequest, + CompleteNodePoolUpgradeRequest, + ConfidentialNodes, + ConfigConnectorConfig, + CostManagementConfig, + CreateClusterRequest, + CreateNodePoolRequest, + DailyMaintenanceWindow, + DatabaseEncryption, + DatapathProvider, + DefaultSnatStatus, + DeleteClusterRequest, + DeleteNodePoolRequest, + DnsCacheConfig, + DNSConfig, + EphemeralStorageLocalSsdConfig, + FastSocket, + Fleet, + GatewayAPIConfig, + GcePersistentDiskCsiDriverConfig, + GcfsConfig, + GcpFilestoreCsiDriverConfig, + GcsFuseCsiDriverConfig, + GetClusterRequest, + GetJSONWebKeysRequest, + GetJSONWebKeysResponse, + GetNodePoolRequest, + GetOpenIDConfigRequest, + GetOpenIDConfigResponse, + GetOperationRequest, + GetServerConfigRequest, + GkeBackupAgentConfig, + GPUDriverInstallationConfig, + GPUSharingConfig, + HorizontalPodAutoscaling, + HttpLoadBalancing, + IdentityServiceConfig, + ILBSubsettingConfig, + IntraNodeVisibilityConfig, + IPAllocationPolicy, + IPv6AccessType, + Jwk, + K8sBetaAPIConfig, + KubernetesDashboard, + LegacyAbac, + LinuxNodeConfig, + ListClustersRequest, + ListClustersResponse, + ListNodePoolsRequest, + ListNodePoolsResponse, + ListOperationsRequest, + ListOperationsResponse, + ListUsableSubnetworksRequest, + ListUsableSubnetworksResponse, + LocalNvmeSsdBlockConfig, + LoggingComponentConfig, + LoggingConfig, + LoggingVariantConfig, + MaintenanceExclusionOptions, + MaintenancePolicy, + MaintenanceWindow, + ManagedPrometheusConfig, + MasterAuth, + MasterAuthorizedNetworksConfig, + MaxPodsConstraint, + MeshCertificates, + MonitoringComponentConfig, + MonitoringConfig, + NetworkConfig, + NetworkPolicy, + NetworkPolicyConfig, + NetworkTags, + NodeConfig, + NodeConfigDefaults, + NodeKubeletConfig, + NodeLabels, + NodeManagement, + NodeNetworkConfig, + NodePool, + NodePoolAutoConfig, + NodePoolAutoscaling, + NodePoolDefaults, + NodePoolLoggingConfig, + NodePoolUpdateStrategy, + NodeTaint, + NodeTaints, + NotificationConfig, + Operation, + OperationProgress, + PodCIDROverprovisionConfig, + PrivateClusterConfig, + PrivateClusterMasterGlobalAccessConfig, + PrivateIPv6GoogleAccess, + RangeInfo, + RecurringTimeWindow, + ReleaseChannel, + ReservationAffinity, + ResourceLabels, + ResourceLimit, + ResourceUsageExportConfig, + RollbackNodePoolUpgradeRequest, + SandboxConfig, + SecurityBulletinEvent, + SecurityPostureConfig, + ServerConfig, + ServiceExternalIPsConfig, + SetAddonsConfigRequest, + SetLabelsRequest, + SetLegacyAbacRequest, + SetLocationsRequest, + SetLoggingServiceRequest, + SetMaintenancePolicyRequest, + SetMasterAuthRequest, + SetMonitoringServiceRequest, + SetNetworkPolicyRequest, + SetNodePoolAutoscalingRequest, + SetNodePoolManagementRequest, + SetNodePoolSizeRequest, + ShieldedInstanceConfig, + ShieldedNodes, + SoleTenantConfig, + StackType, + StartIPRotationRequest, + StatusCondition, + TimeWindow, + UpdateClusterRequest, + UpdateMasterRequest, + UpdateNodePoolRequest, + UpgradeAvailableEvent, + UpgradeEvent, + UpgradeResourceType, + UsableSubnetwork, + UsableSubnetworkSecondaryRange, + VerticalPodAutoscaling, + VirtualNIC, + WindowsNodeConfig, + WorkloadIdentityConfig, + WorkloadMetadataConfig, + WorkloadPolicyConfig, +) + +__all__ = ( + "ClusterManagerAsyncClient", + "AcceleratorConfig", + "AdditionalNodeNetworkConfig", + "AdditionalPodNetworkConfig", + "AdditionalPodRangesConfig", + "AddonsConfig", + "AdvancedDatapathObservabilityConfig", + "AdvancedMachineFeatures", + "AuthenticatorGroupsConfig", + "AutoUpgradeOptions", + "Autopilot", + "AutopilotCompatibilityIssue", + "AutoprovisioningNodePoolDefaults", + "BestEffortProvisioning", + "BinaryAuthorization", + "BlueGreenSettings", + "CancelOperationRequest", + "CheckAutopilotCompatibilityRequest", + "CheckAutopilotCompatibilityResponse", + "ClientCertificateConfig", + "CloudRunConfig", + "Cluster", + "ClusterAutoscaling", + "ClusterManagerClient", + "ClusterUpdate", + "CompleteIPRotationRequest", + "CompleteNodePoolUpgradeRequest", + "ConfidentialNodes", + "ConfigConnectorConfig", + "CostManagementConfig", + "CreateClusterRequest", + "CreateNodePoolRequest", + "DNSConfig", + "DailyMaintenanceWindow", + "DatabaseEncryption", + "DatapathProvider", + "DefaultSnatStatus", + "DeleteClusterRequest", + "DeleteNodePoolRequest", + "DnsCacheConfig", + "EphemeralStorageLocalSsdConfig", + "FastSocket", + "Fleet", + "GPUDriverInstallationConfig", + "GPUSharingConfig", + "GatewayAPIConfig", + "GcePersistentDiskCsiDriverConfig", + "GcfsConfig", + "GcpFilestoreCsiDriverConfig", + "GcsFuseCsiDriverConfig", + "GetClusterRequest", + "GetJSONWebKeysRequest", + "GetJSONWebKeysResponse", + "GetNodePoolRequest", + "GetOpenIDConfigRequest", + "GetOpenIDConfigResponse", + "GetOperationRequest", + "GetServerConfigRequest", + "GkeBackupAgentConfig", + "HorizontalPodAutoscaling", + "HttpLoadBalancing", + "ILBSubsettingConfig", + "IPAllocationPolicy", + "IPv6AccessType", + "IdentityServiceConfig", + "IntraNodeVisibilityConfig", + "Jwk", + "K8sBetaAPIConfig", + "KubernetesDashboard", + "LegacyAbac", + "LinuxNodeConfig", + "ListClustersRequest", + "ListClustersResponse", + "ListNodePoolsRequest", + "ListNodePoolsResponse", + "ListOperationsRequest", + "ListOperationsResponse", + "ListUsableSubnetworksRequest", + "ListUsableSubnetworksResponse", + "LocalNvmeSsdBlockConfig", + "LoggingComponentConfig", + "LoggingConfig", + "LoggingVariantConfig", + "MaintenanceExclusionOptions", + "MaintenancePolicy", + "MaintenanceWindow", + "ManagedPrometheusConfig", + "MasterAuth", + "MasterAuthorizedNetworksConfig", + "MaxPodsConstraint", + "MeshCertificates", + "MonitoringComponentConfig", + "MonitoringConfig", + "NetworkConfig", + "NetworkPolicy", + "NetworkPolicyConfig", + "NetworkTags", + "NodeConfig", + "NodeConfigDefaults", + "NodeKubeletConfig", + "NodeLabels", + "NodeManagement", + "NodeNetworkConfig", + "NodePool", + "NodePoolAutoConfig", + "NodePoolAutoscaling", + "NodePoolDefaults", + "NodePoolLoggingConfig", + "NodePoolUpdateStrategy", + "NodeTaint", + "NodeTaints", + "NotificationConfig", + "Operation", + "OperationProgress", + "PodCIDROverprovisionConfig", + "PrivateClusterConfig", + "PrivateClusterMasterGlobalAccessConfig", + "PrivateIPv6GoogleAccess", + "RangeInfo", + "RecurringTimeWindow", + "ReleaseChannel", + "ReservationAffinity", + "ResourceLabels", + "ResourceLimit", + "ResourceUsageExportConfig", + "RollbackNodePoolUpgradeRequest", + "SandboxConfig", + "SecurityBulletinEvent", + "SecurityPostureConfig", + "ServerConfig", + "ServiceExternalIPsConfig", + "SetAddonsConfigRequest", + "SetLabelsRequest", + "SetLegacyAbacRequest", + "SetLocationsRequest", + "SetLoggingServiceRequest", + "SetMaintenancePolicyRequest", + "SetMasterAuthRequest", + "SetMonitoringServiceRequest", + "SetNetworkPolicyRequest", + "SetNodePoolAutoscalingRequest", + "SetNodePoolManagementRequest", + "SetNodePoolSizeRequest", + "ShieldedInstanceConfig", + "ShieldedNodes", + "SoleTenantConfig", + "StackType", + "StartIPRotationRequest", + "StatusCondition", + "TimeWindow", + "UpdateClusterRequest", + "UpdateMasterRequest", + "UpdateNodePoolRequest", + "UpgradeAvailableEvent", + "UpgradeEvent", + "UpgradeResourceType", + "UsableSubnetwork", + "UsableSubnetworkSecondaryRange", + "VerticalPodAutoscaling", + "VirtualNIC", + "WindowsNodeConfig", + "WorkloadIdentityConfig", + "WorkloadMetadataConfig", + "WorkloadPolicyConfig", +) diff --git a/packages/google-cloud-container/google/cloud/container_v1/gapic_metadata.json b/packages/google-cloud-container/google/cloud/container_v1/gapic_metadata.json new file mode 100644 index 000000000000..85cb487b948d --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/gapic_metadata.json @@ -0,0 +1,363 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.container_v1", + "protoPackage": "google.container.v1", + "schema": "1.0", + "services": { + "ClusterManager": { + "clients": { + "grpc": { + "libraryClient": "ClusterManagerClient", + "rpcs": { + "CancelOperation": { + "methods": [ + "cancel_operation" + ] + }, + "CheckAutopilotCompatibility": { + "methods": [ + "check_autopilot_compatibility" + ] + }, + "CompleteIPRotation": { + "methods": [ + "complete_ip_rotation" + ] + }, + "CompleteNodePoolUpgrade": { + "methods": [ + "complete_node_pool_upgrade" + ] + }, + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateNodePool": { + "methods": [ + "create_node_pool" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteNodePool": { + "methods": [ + "delete_node_pool" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetJSONWebKeys": { + "methods": [ + "get_json_web_keys" + ] + }, + "GetNodePool": { + "methods": [ + "get_node_pool" + ] + }, + "GetOperation": { + "methods": [ + "get_operation" + ] + }, + "GetServerConfig": { + "methods": [ + "get_server_config" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListNodePools": { + "methods": [ + "list_node_pools" + ] + }, + "ListOperations": { + "methods": [ + "list_operations" + ] + }, + "ListUsableSubnetworks": { + "methods": [ + "list_usable_subnetworks" + ] + }, + "RollbackNodePoolUpgrade": { + "methods": [ + "rollback_node_pool_upgrade" + ] + }, + "SetAddonsConfig": { + "methods": [ + "set_addons_config" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "SetLegacyAbac": { + "methods": [ + "set_legacy_abac" + ] + }, + "SetLocations": { + "methods": [ + "set_locations" + ] + }, + "SetLoggingService": { + "methods": [ + "set_logging_service" + ] + }, + "SetMaintenancePolicy": { + "methods": [ + "set_maintenance_policy" + ] + }, + "SetMasterAuth": { + "methods": [ + "set_master_auth" + ] + }, + "SetMonitoringService": { + "methods": [ + "set_monitoring_service" + ] + }, + "SetNetworkPolicy": { + "methods": [ + "set_network_policy" + ] + }, + "SetNodePoolAutoscaling": { + "methods": [ + "set_node_pool_autoscaling" + ] + }, + "SetNodePoolManagement": { + "methods": [ + "set_node_pool_management" + ] + }, + "SetNodePoolSize": { + "methods": [ + "set_node_pool_size" + ] + }, + "StartIPRotation": { + "methods": [ + "start_ip_rotation" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateMaster": { + "methods": [ + "update_master" + ] + }, + "UpdateNodePool": { + "methods": [ + "update_node_pool" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ClusterManagerAsyncClient", + "rpcs": { + "CancelOperation": { + "methods": [ + "cancel_operation" + ] + }, + "CheckAutopilotCompatibility": { + "methods": [ + "check_autopilot_compatibility" + ] + }, + "CompleteIPRotation": { + "methods": [ + "complete_ip_rotation" + ] + }, + "CompleteNodePoolUpgrade": { + "methods": [ + "complete_node_pool_upgrade" + ] + }, + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateNodePool": { + "methods": [ + "create_node_pool" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteNodePool": { + "methods": [ + "delete_node_pool" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetJSONWebKeys": { + "methods": [ + "get_json_web_keys" + ] + }, + "GetNodePool": { + "methods": [ + "get_node_pool" + ] + }, + "GetOperation": { + "methods": [ + "get_operation" + ] + }, + "GetServerConfig": { + "methods": [ + "get_server_config" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListNodePools": { + "methods": [ + "list_node_pools" + ] + }, + "ListOperations": { + "methods": [ + "list_operations" + ] + }, + "ListUsableSubnetworks": { + "methods": [ + "list_usable_subnetworks" + ] + }, + "RollbackNodePoolUpgrade": { + "methods": [ + "rollback_node_pool_upgrade" + ] + }, + "SetAddonsConfig": { + "methods": [ + "set_addons_config" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "SetLegacyAbac": { + "methods": [ + "set_legacy_abac" + ] + }, + "SetLocations": { + "methods": [ + "set_locations" + ] + }, + "SetLoggingService": { + "methods": [ + "set_logging_service" + ] + }, + "SetMaintenancePolicy": { + "methods": [ + "set_maintenance_policy" + ] + }, + "SetMasterAuth": { + "methods": [ + "set_master_auth" + ] + }, + "SetMonitoringService": { + "methods": [ + "set_monitoring_service" + ] + }, + "SetNetworkPolicy": { + "methods": [ + "set_network_policy" + ] + }, + "SetNodePoolAutoscaling": { + "methods": [ + "set_node_pool_autoscaling" + ] + }, + "SetNodePoolManagement": { + "methods": [ + "set_node_pool_management" + ] + }, + "SetNodePoolSize": { + "methods": [ + "set_node_pool_size" + ] + }, + "StartIPRotation": { + "methods": [ + "start_ip_rotation" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateMaster": { + "methods": [ + "update_master" + ] + }, + "UpdateNodePool": { + "methods": [ + "update_node_pool" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py new file mode 100644 index 000000000000..8ab09c42e9c1 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.31.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/py.typed b/packages/google-cloud-container/google/cloud/container_v1/py.typed new file mode 100644 index 000000000000..d5b0e29f91b4 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-container package uses inline types. diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/__init__.py b/packages/google-cloud-container/google/cloud/container_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/__init__.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/__init__.py new file mode 100644 index 000000000000..54d42dba56b7 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ClusterManagerAsyncClient +from .client import ClusterManagerClient + +__all__ = ( + "ClusterManagerClient", + "ClusterManagerAsyncClient", +) diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py new file mode 100644 index 000000000000..b3f3915f2e41 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/async_client.py @@ -0,0 +1,4715 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.container_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.rpc import status_pb2 # type: ignore + +from google.cloud.container_v1.services.cluster_manager import pagers +from google.cloud.container_v1.types import cluster_service + +from .client import ClusterManagerClient +from .transports.base import DEFAULT_CLIENT_INFO, ClusterManagerTransport +from .transports.grpc_asyncio import ClusterManagerGrpcAsyncIOTransport + + +class ClusterManagerAsyncClient: + """Google Kubernetes Engine Cluster Manager v1""" + + _client: ClusterManagerClient + + DEFAULT_ENDPOINT = ClusterManagerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ClusterManagerClient.DEFAULT_MTLS_ENDPOINT + + topic_path = staticmethod(ClusterManagerClient.topic_path) + parse_topic_path = staticmethod(ClusterManagerClient.parse_topic_path) + common_billing_account_path = staticmethod( + ClusterManagerClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ClusterManagerClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ClusterManagerClient.common_folder_path) + parse_common_folder_path = staticmethod( + ClusterManagerClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ClusterManagerClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ClusterManagerClient.parse_common_organization_path + ) + common_project_path = staticmethod(ClusterManagerClient.common_project_path) + parse_common_project_path = staticmethod( + ClusterManagerClient.parse_common_project_path + ) + common_location_path = staticmethod(ClusterManagerClient.common_location_path) + parse_common_location_path = staticmethod( + ClusterManagerClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterManagerAsyncClient: The constructed client. + """ + return ClusterManagerClient.from_service_account_info.__func__(ClusterManagerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterManagerAsyncClient: The constructed client. + """ + return ClusterManagerClient.from_service_account_file.__func__(ClusterManagerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ClusterManagerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ClusterManagerTransport: + """Returns the transport used by the client instance. + + Returns: + ClusterManagerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ClusterManagerClient).get_transport_class, type(ClusterManagerClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ClusterManagerTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cluster manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ClusterManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ClusterManagerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_clusters( + self, + request: Optional[Union[cluster_service.ListClustersRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListClustersResponse: + r"""Lists all clusters owned by a project in either the + specified zone or all zones. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_list_clusters(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.ListClustersRequest( + ) + + # Make the request + response = await client.list_clusters(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.ListClustersRequest, dict]]): + The request object. ListClustersRequest lists clusters. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides, or "-" for all zones. This + field has been deprecated and replaced by the parent + field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parent (:class:`str`): + The parent (project and location) where the clusters + will be listed. Specified in the format + ``projects/*/locations/*``. Location "-" matches all + zones and all regions. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.ListClustersResponse: + ListClustersResponse is the result of + ListClustersRequest. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.ListClustersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_clusters, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_cluster( + self, + request: Optional[Union[cluster_service.GetClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Cluster: + r"""Gets the details of a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_get_cluster(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.GetClusterRequest( + ) + + # Make the request + response = await client.get_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.GetClusterRequest, dict]]): + The request object. GetClusterRequest gets the settings + of a cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster + to retrieve. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster) of the cluster to + retrieve. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Cluster: + A Google Kubernetes Engine cluster. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.GetClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_cluster( + self, + request: Optional[Union[cluster_service.CreateClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster: Optional[cluster_service.Cluster] = None, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Creates a cluster, consisting of the specified number and type + of Google Compute Engine instances. + + By default, the cluster is created in the project's `default + network `__. + + One firewall is added for the cluster. After cluster creation, + the Kubelet creates routes for each node to allow the containers + on that node to communicate with all other instances in the + cluster. + + Finally, an entry is added to the project's global metadata + indicating which CIDR range the cluster is using. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_create_cluster(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.CreateClusterRequest( + ) + + # Make the request + response = await client.create_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.CreateClusterRequest, dict]]): + The request object. CreateClusterRequest creates a + cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the parent field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (:class:`google.cloud.container_v1.types.Cluster`): + Required. A `cluster + resource `__ + + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parent (:class:`str`): + The parent (project and location) where the cluster will + be created. Specified in the format + ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster, parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.CreateClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster is not None: + request.cluster = cluster + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_cluster, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_cluster( + self, + request: Optional[Union[cluster_service.UpdateClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + update: Optional[cluster_service.ClusterUpdate] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Updates the settings of a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_update_cluster(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.UpdateClusterRequest( + ) + + # Make the request + response = await client.update_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.UpdateClusterRequest, dict]]): + The request object. UpdateClusterRequest updates the + settings of a cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster + to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update (:class:`google.cloud.container_v1.types.ClusterUpdate`): + Required. A description of the + update. + + This corresponds to the ``update`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster) of the cluster to + update. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, update, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.UpdateClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if update is not None: + request.update = update + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_cluster, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_node_pool( + self, + request: Optional[Union[cluster_service.UpdateNodePoolRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Updates the version and/or image type for the + specified node pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_update_node_pool(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.UpdateNodePoolRequest( + node_version="node_version_value", + image_type="image_type_value", + ) + + # Make the request + response = await client.update_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.UpdateNodePoolRequest, dict]]): + The request object. UpdateNodePoolRequests update a node + pool's image and/or version. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + request = cluster_service.UpdateNodePoolRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_node_pool, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_node_pool_autoscaling( + self, + request: Optional[ + Union[cluster_service.SetNodePoolAutoscalingRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the autoscaling settings for the specified node + pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_set_node_pool_autoscaling(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.SetNodePoolAutoscalingRequest( + ) + + # Make the request + response = await client.set_node_pool_autoscaling(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.SetNodePoolAutoscalingRequest, dict]]): + The request object. SetNodePoolAutoscalingRequest sets + the autoscaler settings of a node pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + request = cluster_service.SetNodePoolAutoscalingRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_node_pool_autoscaling, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_logging_service( + self, + request: Optional[Union[cluster_service.SetLoggingServiceRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + logging_service: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the logging service for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_set_logging_service(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.SetLoggingServiceRequest( + logging_service="logging_service_value", + ) + + # Make the request + response = await client.set_logging_service(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.SetLoggingServiceRequest, dict]]): + The request object. SetLoggingServiceRequest sets the + logging service of a cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster + to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + logging_service (:class:`str`): + Required. The logging service the cluster should use to + write logs. Currently available options: + + - ``logging.googleapis.com/kubernetes`` - The Cloud + Logging service with a Kubernetes-native resource + model + - ``logging.googleapis.com`` - The legacy Cloud Logging + service (no longer available as of GKE 1.15). + - ``none`` - no logs will be exported from the cluster. + + If left as an empty + string,\ ``logging.googleapis.com/kubernetes`` will be + used for GKE 1.14+ or ``logging.googleapis.com`` for + earlier versions. + + This corresponds to the ``logging_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster) of the cluster to + set logging. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project_id, zone, cluster_id, logging_service, name] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetLoggingServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if logging_service is not None: + request.logging_service = logging_service + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_logging_service, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_monitoring_service( + self, + request: Optional[ + Union[cluster_service.SetMonitoringServiceRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + monitoring_service: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the monitoring service for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_set_monitoring_service(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.SetMonitoringServiceRequest( + monitoring_service="monitoring_service_value", + ) + + # Make the request + response = await client.set_monitoring_service(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.SetMonitoringServiceRequest, dict]]): + The request object. SetMonitoringServiceRequest sets the + monitoring service of a cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster + to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + monitoring_service (:class:`str`): + Required. The monitoring service the cluster should use + to write metrics. Currently available options: + + - "monitoring.googleapis.com/kubernetes" - The Cloud + Monitoring service with a Kubernetes-native resource + model + - ``monitoring.googleapis.com`` - The legacy Cloud + Monitoring service (no longer available as of GKE + 1.15). + - ``none`` - No metrics will be exported from the + cluster. + + If left as an empty + string,\ ``monitoring.googleapis.com/kubernetes`` will + be used for GKE 1.14+ or ``monitoring.googleapis.com`` + for earlier versions. + + This corresponds to the ``monitoring_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster) of the cluster to + set monitoring. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project_id, zone, cluster_id, monitoring_service, name] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetMonitoringServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if monitoring_service is not None: + request.monitoring_service = monitoring_service + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_monitoring_service, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_addons_config( + self, + request: Optional[Union[cluster_service.SetAddonsConfigRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + addons_config: Optional[cluster_service.AddonsConfig] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the addons for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_set_addons_config(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.SetAddonsConfigRequest( + ) + + # Make the request + response = await client.set_addons_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.SetAddonsConfigRequest, dict]]): + The request object. SetAddonsConfigRequest sets the + addons associated with the cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster + to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + addons_config (:class:`google.cloud.container_v1.types.AddonsConfig`): + Required. The desired configurations + for the various addons available to run + in the cluster. + + This corresponds to the ``addons_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster) of the cluster to + set addons. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, addons_config, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetAddonsConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if addons_config is not None: + request.addons_config = addons_config + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_addons_config, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_locations( + self, + request: Optional[Union[cluster_service.SetLocationsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + locations: Optional[MutableSequence[str]] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the locations for a specific cluster. Deprecated. Use + `projects.locations.clusters.update `__ + instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_set_locations(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.SetLocationsRequest( + locations=['locations_value1', 'locations_value2'], + ) + + # Make the request + response = await client.set_locations(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.SetLocationsRequest, dict]]): + The request object. SetLocationsRequest sets the + locations of the cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster + to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + locations (:class:`MutableSequence[str]`): + Required. The desired list of Google Compute Engine + `zones `__ + in which the cluster's nodes should be located. Changing + the locations a cluster is in will result in nodes being + either created or removed from the cluster, depending on + whether locations are being added or removed. + + This list must always include the cluster's primary + zone. + + This corresponds to the ``locations`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster) of the cluster to + set locations. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + warnings.warn( + "ClusterManagerAsyncClient.set_locations is deprecated", DeprecationWarning + ) + + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, locations, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetLocationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if name is not None: + request.name = name + if locations: + request.locations.extend(locations) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_locations, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_master( + self, + request: Optional[Union[cluster_service.UpdateMasterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + master_version: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Updates the master for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_update_master(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.UpdateMasterRequest( + master_version="master_version_value", + ) + + # Make the request + response = await client.update_master(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.UpdateMasterRequest, dict]]): + The request object. UpdateMasterRequest updates the + master of the cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster + to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + master_version (:class:`str`): + Required. The Kubernetes version to + change the master to. + Users may specify either explicit + versions offered by Kubernetes Engine or + version aliases, which have the + following behavior: + + - "latest": picks the highest valid + Kubernetes version + - "1.X": picks the highest valid + patch+gke.N patch in the 1.X version + - "1.X.Y": picks the highest valid gke.N + patch in the 1.X.Y version + - "1.X.Y-gke.N": picks an explicit + Kubernetes version + - "-": picks the default Kubernetes + version + + This corresponds to the ``master_version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster) of the cluster to + update. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, master_version, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.UpdateMasterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if master_version is not None: + request.master_version = master_version + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_master, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_master_auth( + self, + request: Optional[Union[cluster_service.SetMasterAuthRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets master auth materials. Currently supports + changing the admin password or a specific cluster, + either via password generation or explicitly setting the + password. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_set_master_auth(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.SetMasterAuthRequest( + action="SET_USERNAME", + ) + + # Make the request + response = await client.set_master_auth(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.SetMasterAuthRequest, dict]]): + The request object. SetMasterAuthRequest updates the + admin password of a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + request = cluster_service.SetMasterAuthRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_master_auth, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_cluster( + self, + request: Optional[Union[cluster_service.DeleteClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Deletes the cluster, including the Kubernetes + endpoint and all worker nodes. + + Firewalls and routes that were configured during cluster + creation are also deleted. + + Other Google Compute Engine resources that might be in + use by the cluster, such as load balancer resources, are + not deleted if they weren't present when the cluster was + initially created. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_delete_cluster(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.DeleteClusterRequest( + ) + + # Make the request + response = await client.delete_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.DeleteClusterRequest, dict]]): + The request object. DeleteClusterRequest deletes a + cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster + to delete. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster) of the cluster to + delete. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.DeleteClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[Union[cluster_service.ListOperationsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListOperationsResponse: + r"""Lists all operations in a project in a specific zone + or all zones. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_list_operations(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.ListOperationsRequest( + ) + + # Make the request + response = await client.list_operations(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.ListOperationsRequest, dict]]): + The request object. ListOperationsRequest lists + operations. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + to return operations for, or ``-`` for all zones. This + field has been deprecated and replaced by the parent + field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.ListOperationsResponse: + ListOperationsResponse is the result + of ListOperationsRequest. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.ListOperationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[Union[cluster_service.GetOperationRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + operation_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Gets the specified operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_get_operation(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.GetOperationRequest( + ) + + # Make the request + response = await client.get_operation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.GetOperationRequest, dict]]): + The request object. GetOperationRequest gets a single + operation. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation_id (:class:`str`): + Deprecated. The server-assigned ``name`` of the + operation. This field has been deprecated and replaced + by the name field. + + This corresponds to the ``operation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, operation id) of the + operation to get. Specified in the format + ``projects/*/locations/*/operations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, operation_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.GetOperationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if operation_id is not None: + request.operation_id = operation_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[Union[cluster_service.CancelOperationRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + operation_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Cancels the specified operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_cancel_operation(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.CancelOperationRequest( + ) + + # Make the request + await client.cancel_operation(request=request) + + Args: + request (Optional[Union[google.cloud.container_v1.types.CancelOperationRequest, dict]]): + The request object. CancelOperationRequest cancels a + single operation. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the operation resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation_id (:class:`str`): + Deprecated. The server-assigned ``name`` of the + operation. This field has been deprecated and replaced + by the name field. + + This corresponds to the ``operation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, operation id) of the + operation to cancel. Specified in the format + ``projects/*/locations/*/operations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, operation_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.CancelOperationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if operation_id is not None: + request.operation_id = operation_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_server_config( + self, + request: Optional[Union[cluster_service.GetServerConfigRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ServerConfig: + r"""Returns configuration info about the Google + Kubernetes Engine service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_get_server_config(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.GetServerConfigRequest( + ) + + # Make the request + response = await client.get_server_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.GetServerConfigRequest, dict]]): + The request object. Gets the current Kubernetes Engine + service configuration. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + to return operations for. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project and location) of the server config to + get, specified in the format ``projects/*/locations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.ServerConfig: + Kubernetes Engine service + configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.GetServerConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_server_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_json_web_keys( + self, + request: Optional[Union[cluster_service.GetJSONWebKeysRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.GetJSONWebKeysResponse: + r"""Gets the public component of the cluster signing keys + in JSON Web Key format. + This API is not yet intended for general use, and is not + available for all clusters. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_get_json_web_keys(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.GetJSONWebKeysRequest( + ) + + # Make the request + response = await client.get_json_web_keys(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.GetJSONWebKeysRequest, dict]]): + The request object. GetJSONWebKeysRequest gets the public component of the + keys used by the cluster to sign token requests. This + will be the jwks_uri for the discover document returned + by getOpenIDConfig. See the OpenID Connect Discovery 1.0 + specification for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.GetJSONWebKeysResponse: + GetJSONWebKeysResponse is a valid + JSON Web Key Set as specififed in rfc + 7517 + + """ + # Create or coerce a protobuf request object. + request = cluster_service.GetJSONWebKeysRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_json_web_keys, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_node_pools( + self, + request: Optional[Union[cluster_service.ListNodePoolsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListNodePoolsResponse: + r"""Lists the node pools for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_list_node_pools(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.ListNodePoolsRequest( + ) + + # Make the request + response = await client.list_node_pools(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.ListNodePoolsRequest, dict]]): + The request object. ListNodePoolsRequest lists the node + pool(s) for a cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the parent field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster. + This field has been deprecated and + replaced by the parent field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parent (:class:`str`): + The parent (project, location, cluster name) where the + node pools will be listed. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.ListNodePoolsResponse: + ListNodePoolsResponse is the result + of ListNodePoolsRequest. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.ListNodePoolsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_node_pools, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_node_pool( + self, + request: Optional[Union[cluster_service.GetNodePoolRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.NodePool: + r"""Retrieves the requested node pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_get_node_pool(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.GetNodePoolRequest( + ) + + # Make the request + response = await client.get_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.GetNodePoolRequest, dict]]): + The request object. GetNodePoolRequest retrieves a node + pool for a cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster. + This field has been deprecated and + replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool_id (:class:`str`): + Deprecated. The name of the node + pool. This field has been deprecated and + replaced by the name field. + + This corresponds to the ``node_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster, node pool id) of + the node pool to get. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.NodePool: + NodePool contains the name and + configuration for a cluster's node pool. + Node pools are a set of nodes (i.e. + VM's), with a common configuration and + specification, under the control of the + cluster master. They may have a set of + Kubernetes labels applied to them, which + may be used to reference them during pod + scheduling. They may also be resized up + or down, to accommodate the workload. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.GetNodePoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool_id is not None: + request.node_pool_id = node_pool_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_node_pool, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_node_pool( + self, + request: Optional[Union[cluster_service.CreateNodePoolRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool: Optional[cluster_service.NodePool] = None, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Creates a node pool for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_create_node_pool(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.CreateNodePoolRequest( + ) + + # Make the request + response = await client.create_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.CreateNodePoolRequest, dict]]): + The request object. CreateNodePoolRequest creates a node + pool for a cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the parent field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster. + This field has been deprecated and + replaced by the parent field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool (:class:`google.cloud.container_v1.types.NodePool`): + Required. The node pool to create. + This corresponds to the ``node_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parent (:class:`str`): + The parent (project, location, cluster name) where the + node pool will be created. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool, parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.CreateNodePoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool is not None: + request.node_pool = node_pool + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_node_pool, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_node_pool( + self, + request: Optional[Union[cluster_service.DeleteNodePoolRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Deletes a node pool from a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_delete_node_pool(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.DeleteNodePoolRequest( + ) + + # Make the request + response = await client.delete_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.DeleteNodePoolRequest, dict]]): + The request object. DeleteNodePoolRequest deletes a node + pool for a cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster. + This field has been deprecated and + replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool_id (:class:`str`): + Deprecated. The name of the node pool + to delete. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``node_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster, node pool id) of + the node pool to delete. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.DeleteNodePoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool_id is not None: + request.node_pool_id = node_pool_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_node_pool, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def complete_node_pool_upgrade( + self, + request: Optional[ + Union[cluster_service.CompleteNodePoolUpgradeRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""CompleteNodePoolUpgrade will signal an on-going node + pool upgrade to complete. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_complete_node_pool_upgrade(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.CompleteNodePoolUpgradeRequest( + ) + + # Make the request + await client.complete_node_pool_upgrade(request=request) + + Args: + request (Optional[Union[google.cloud.container_v1.types.CompleteNodePoolUpgradeRequest, dict]]): + The request object. CompleteNodePoolUpgradeRequest sets + the name of target node pool to complete + upgrade. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = cluster_service.CompleteNodePoolUpgradeRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.complete_node_pool_upgrade, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def rollback_node_pool_upgrade( + self, + request: Optional[ + Union[cluster_service.RollbackNodePoolUpgradeRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Rolls back a previously Aborted or Failed NodePool + upgrade. This makes no changes if the last upgrade + successfully completed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_rollback_node_pool_upgrade(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.RollbackNodePoolUpgradeRequest( + ) + + # Make the request + response = await client.rollback_node_pool_upgrade(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.RollbackNodePoolUpgradeRequest, dict]]): + The request object. RollbackNodePoolUpgradeRequest + rollbacks the previously Aborted or + Failed NodePool upgrade. This will be an + no-op if the last upgrade successfully + completed. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster + to rollback. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool_id (:class:`str`): + Deprecated. The name of the node pool + to rollback. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``node_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster, node pool id) of + the node poll to rollback upgrade. Specified in the + format + ``projects/*/locations/*/clusters/*/nodePools/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.RollbackNodePoolUpgradeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool_id is not None: + request.node_pool_id = node_pool_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rollback_node_pool_upgrade, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_node_pool_management( + self, + request: Optional[ + Union[cluster_service.SetNodePoolManagementRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the NodeManagement options for a node pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_set_node_pool_management(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.SetNodePoolManagementRequest( + ) + + # Make the request + response = await client.set_node_pool_management(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.SetNodePoolManagementRequest, dict]]): + The request object. SetNodePoolManagementRequest sets the + node management properties of a node + pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + request = cluster_service.SetNodePoolManagementRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_node_pool_management, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_labels( + self, + request: Optional[Union[cluster_service.SetLabelsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets labels on a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_set_labels(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.SetLabelsRequest( + label_fingerprint="label_fingerprint_value", + ) + + # Make the request + response = await client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.SetLabelsRequest, dict]]): + The request object. SetLabelsRequest sets the Google + Cloud Platform labels on a Google + Container Engine cluster, which will in + turn set them for Google Compute Engine + resources used by that cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + request = cluster_service.SetLabelsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_labels, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_legacy_abac( + self, + request: Optional[Union[cluster_service.SetLegacyAbacRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + enabled: Optional[bool] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Enables or disables the ABAC authorization mechanism + on a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_set_legacy_abac(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.SetLegacyAbacRequest( + enabled=True, + ) + + # Make the request + response = await client.set_legacy_abac(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.SetLegacyAbacRequest, dict]]): + The request object. SetLegacyAbacRequest enables or + disables the ABAC authorization + mechanism for a cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster + to update. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + enabled (:class:`bool`): + Required. Whether ABAC authorization + will be enabled in the cluster. + + This corresponds to the ``enabled`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster name) of the + cluster to set legacy abac. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, enabled, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetLegacyAbacRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if enabled is not None: + request.enabled = enabled + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_legacy_abac, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def start_ip_rotation( + self, + request: Optional[Union[cluster_service.StartIPRotationRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Starts master IP rotation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_start_ip_rotation(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.StartIPRotationRequest( + ) + + # Make the request + response = await client.start_ip_rotation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.StartIPRotationRequest, dict]]): + The request object. StartIPRotationRequest creates a new + IP for the cluster and then performs a + node upgrade on each node pool to point + to the new IP. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster. + This field has been deprecated and + replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster name) of the + cluster to start IP rotation. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.StartIPRotationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.start_ip_rotation, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def complete_ip_rotation( + self, + request: Optional[ + Union[cluster_service.CompleteIPRotationRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Completes master IP rotation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_complete_ip_rotation(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.CompleteIPRotationRequest( + ) + + # Make the request + response = await client.complete_ip_rotation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.CompleteIPRotationRequest, dict]]): + The request object. CompleteIPRotationRequest moves the + cluster master back into single-IP mode. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster. + This field has been deprecated and + replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster name) of the + cluster to complete IP rotation. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.CompleteIPRotationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.complete_ip_rotation, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_node_pool_size( + self, + request: Optional[Union[cluster_service.SetNodePoolSizeRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the size for a specific node pool. The new size will be + used for all replicas, including future replicas created by + modifying + [NodePool.locations][google.container.v1.NodePool.locations]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_set_node_pool_size(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.SetNodePoolSizeRequest( + node_count=1070, + ) + + # Make the request + response = await client.set_node_pool_size(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.SetNodePoolSizeRequest, dict]]): + The request object. SetNodePoolSizeRequest sets the size + of a node pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + request = cluster_service.SetNodePoolSizeRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_node_pool_size, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_network_policy( + self, + request: Optional[Union[cluster_service.SetNetworkPolicyRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + network_policy: Optional[cluster_service.NetworkPolicy] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Enables or disables Network Policy for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_set_network_policy(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.SetNetworkPolicyRequest( + ) + + # Make the request + response = await client.set_network_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.SetNetworkPolicyRequest, dict]]): + The request object. SetNetworkPolicyRequest + enables/disables network policy for a + cluster. + project_id (:class:`str`): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Deprecated. The name of the cluster. + This field has been deprecated and + replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_policy (:class:`google.cloud.container_v1.types.NetworkPolicy`): + Required. Configuration options for + the NetworkPolicy feature. + + This corresponds to the ``network_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster name) of the + cluster to set networking policy. Specified in the + format ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, network_policy, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetNetworkPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if network_policy is not None: + request.network_policy = network_policy + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_network_policy, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_maintenance_policy( + self, + request: Optional[ + Union[cluster_service.SetMaintenancePolicyRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + maintenance_policy: Optional[cluster_service.MaintenancePolicy] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the maintenance policy for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_set_maintenance_policy(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.SetMaintenancePolicyRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = await client.set_maintenance_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.SetMaintenancePolicyRequest, dict]]): + The request object. SetMaintenancePolicyRequest sets the + maintenance policy for a cluster. + project_id (:class:`str`): + Required. The Google Developers Console `project ID or + project + number `__. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. The name of the cluster to + update. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + maintenance_policy (:class:`google.cloud.container_v1.types.MaintenancePolicy`): + Required. The maintenance policy to + be set for the cluster. An empty field + clears the existing maintenance policy. + + This corresponds to the ``maintenance_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (:class:`str`): + The name (project, location, cluster name) of the + cluster to set maintenance policy. Specified in the + format ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project_id, zone, cluster_id, maintenance_policy, name] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetMaintenancePolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if maintenance_policy is not None: + request.maintenance_policy = maintenance_policy + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_maintenance_policy, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_usable_subnetworks( + self, + request: Optional[ + Union[cluster_service.ListUsableSubnetworksRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsableSubnetworksAsyncPager: + r"""Lists subnetworks that are usable for creating + clusters in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_list_usable_subnetworks(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.ListUsableSubnetworksRequest( + ) + + # Make the request + page_result = client.list_usable_subnetworks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.ListUsableSubnetworksRequest, dict]]): + The request object. ListUsableSubnetworksRequest requests + the list of usable subnetworks available + to a user for creating clusters. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.services.cluster_manager.pagers.ListUsableSubnetworksAsyncPager: + ListUsableSubnetworksResponse is the + response of + ListUsableSubnetworksRequest. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = cluster_service.ListUsableSubnetworksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_usable_subnetworks, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListUsableSubnetworksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def check_autopilot_compatibility( + self, + request: Optional[ + Union[cluster_service.CheckAutopilotCompatibilityRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.CheckAutopilotCompatibilityResponse: + r"""Checks the cluster compatibility with Autopilot mode, + and returns a list of compatibility issues. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + async def sample_check_autopilot_compatibility(): + # Create a client + client = container_v1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1.CheckAutopilotCompatibilityRequest( + ) + + # Make the request + response = await client.check_autopilot_compatibility(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1.types.CheckAutopilotCompatibilityRequest, dict]]): + The request object. CheckAutopilotCompatibilityRequest + requests getting the blockers for the + given operation in the cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.CheckAutopilotCompatibilityResponse: + CheckAutopilotCompatibilityResponse + has a list of compatibility issues. + + """ + # Create or coerce a protobuf request object. + request = cluster_service.CheckAutopilotCompatibilityRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.check_autopilot_compatibility, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ClusterManagerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ClusterManagerAsyncClient",) diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py new file mode 100644 index 000000000000..b26f67ff07ba --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py @@ -0,0 +1,4872 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.container_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.rpc import status_pb2 # type: ignore + +from google.cloud.container_v1.services.cluster_manager import pagers +from google.cloud.container_v1.types import cluster_service + +from .transports.base import DEFAULT_CLIENT_INFO, ClusterManagerTransport +from .transports.grpc import ClusterManagerGrpcTransport +from .transports.grpc_asyncio import ClusterManagerGrpcAsyncIOTransport + + +class ClusterManagerClientMeta(type): + """Metaclass for the ClusterManager client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ClusterManagerTransport]] + _transport_registry["grpc"] = ClusterManagerGrpcTransport + _transport_registry["grpc_asyncio"] = ClusterManagerGrpcAsyncIOTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ClusterManagerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ClusterManagerClient(metaclass=ClusterManagerClientMeta): + """Google Kubernetes Engine Cluster Manager v1""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "container.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ClusterManagerTransport: + """Returns the transport used by the client instance. + + Returns: + ClusterManagerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def topic_path( + project: str, + topic: str, + ) -> str: + """Returns a fully-qualified topic string.""" + return "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str, str]: + """Parses a topic path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ClusterManagerTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cluster manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ClusterManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ClusterManagerTransport): + # transport is a ClusterManagerTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_clusters( + self, + request: Optional[Union[cluster_service.ListClustersRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListClustersResponse: + r"""Lists all clusters owned by a project in either the + specified zone or all zones. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_list_clusters(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.ListClustersRequest( + ) + + # Make the request + response = client.list_clusters(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.ListClustersRequest, dict]): + The request object. ListClustersRequest lists clusters. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides, or "-" for all zones. This + field has been deprecated and replaced by the parent + field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parent (str): + The parent (project and location) where the clusters + will be listed. Specified in the format + ``projects/*/locations/*``. Location "-" matches all + zones and all regions. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.ListClustersResponse: + ListClustersResponse is the result of + ListClustersRequest. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.ListClustersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.ListClustersRequest): + request = cluster_service.ListClustersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_clusters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_cluster( + self, + request: Optional[Union[cluster_service.GetClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Cluster: + r"""Gets the details of a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_get_cluster(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.GetClusterRequest( + ) + + # Make the request + response = client.get_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.GetClusterRequest, dict]): + The request object. GetClusterRequest gets the settings + of a cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster + to retrieve. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster) of the cluster to + retrieve. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Cluster: + A Google Kubernetes Engine cluster. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.GetClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.GetClusterRequest): + request = cluster_service.GetClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_cluster( + self, + request: Optional[Union[cluster_service.CreateClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster: Optional[cluster_service.Cluster] = None, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Creates a cluster, consisting of the specified number and type + of Google Compute Engine instances. + + By default, the cluster is created in the project's `default + network `__. + + One firewall is added for the cluster. After cluster creation, + the Kubelet creates routes for each node to allow the containers + on that node to communicate with all other instances in the + cluster. + + Finally, an entry is added to the project's global metadata + indicating which CIDR range the cluster is using. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_create_cluster(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.CreateClusterRequest( + ) + + # Make the request + response = client.create_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.CreateClusterRequest, dict]): + The request object. CreateClusterRequest creates a + cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the parent field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (google.cloud.container_v1.types.Cluster): + Required. A `cluster + resource `__ + + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parent (str): + The parent (project and location) where the cluster will + be created. Specified in the format + ``projects/*/locations/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster, parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.CreateClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.CreateClusterRequest): + request = cluster_service.CreateClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster is not None: + request.cluster = cluster + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_cluster( + self, + request: Optional[Union[cluster_service.UpdateClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + update: Optional[cluster_service.ClusterUpdate] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Updates the settings of a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_update_cluster(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.UpdateClusterRequest( + ) + + # Make the request + response = client.update_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.UpdateClusterRequest, dict]): + The request object. UpdateClusterRequest updates the + settings of a cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster + to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update (google.cloud.container_v1.types.ClusterUpdate): + Required. A description of the + update. + + This corresponds to the ``update`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster) of the cluster to + update. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, update, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.UpdateClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.UpdateClusterRequest): + request = cluster_service.UpdateClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if update is not None: + request.update = update + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_node_pool( + self, + request: Optional[Union[cluster_service.UpdateNodePoolRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Updates the version and/or image type for the + specified node pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_update_node_pool(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.UpdateNodePoolRequest( + node_version="node_version_value", + image_type="image_type_value", + ) + + # Make the request + response = client.update_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.UpdateNodePoolRequest, dict]): + The request object. UpdateNodePoolRequests update a node + pool's image and/or version. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.UpdateNodePoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.UpdateNodePoolRequest): + request = cluster_service.UpdateNodePoolRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_node_pool] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_node_pool_autoscaling( + self, + request: Optional[ + Union[cluster_service.SetNodePoolAutoscalingRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the autoscaling settings for the specified node + pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_set_node_pool_autoscaling(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.SetNodePoolAutoscalingRequest( + ) + + # Make the request + response = client.set_node_pool_autoscaling(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.SetNodePoolAutoscalingRequest, dict]): + The request object. SetNodePoolAutoscalingRequest sets + the autoscaler settings of a node pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetNodePoolAutoscalingRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetNodePoolAutoscalingRequest): + request = cluster_service.SetNodePoolAutoscalingRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.set_node_pool_autoscaling + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_logging_service( + self, + request: Optional[Union[cluster_service.SetLoggingServiceRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + logging_service: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the logging service for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_set_logging_service(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.SetLoggingServiceRequest( + logging_service="logging_service_value", + ) + + # Make the request + response = client.set_logging_service(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.SetLoggingServiceRequest, dict]): + The request object. SetLoggingServiceRequest sets the + logging service of a cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster + to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + logging_service (str): + Required. The logging service the cluster should use to + write logs. Currently available options: + + - ``logging.googleapis.com/kubernetes`` - The Cloud + Logging service with a Kubernetes-native resource + model + - ``logging.googleapis.com`` - The legacy Cloud Logging + service (no longer available as of GKE 1.15). + - ``none`` - no logs will be exported from the cluster. + + If left as an empty + string,\ ``logging.googleapis.com/kubernetes`` will be + used for GKE 1.14+ or ``logging.googleapis.com`` for + earlier versions. + + This corresponds to the ``logging_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster) of the cluster to + set logging. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project_id, zone, cluster_id, logging_service, name] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetLoggingServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetLoggingServiceRequest): + request = cluster_service.SetLoggingServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if logging_service is not None: + request.logging_service = logging_service + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_logging_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_monitoring_service( + self, + request: Optional[ + Union[cluster_service.SetMonitoringServiceRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + monitoring_service: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the monitoring service for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_set_monitoring_service(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.SetMonitoringServiceRequest( + monitoring_service="monitoring_service_value", + ) + + # Make the request + response = client.set_monitoring_service(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.SetMonitoringServiceRequest, dict]): + The request object. SetMonitoringServiceRequest sets the + monitoring service of a cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster + to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + monitoring_service (str): + Required. The monitoring service the cluster should use + to write metrics. Currently available options: + + - "monitoring.googleapis.com/kubernetes" - The Cloud + Monitoring service with a Kubernetes-native resource + model + - ``monitoring.googleapis.com`` - The legacy Cloud + Monitoring service (no longer available as of GKE + 1.15). + - ``none`` - No metrics will be exported from the + cluster. + + If left as an empty + string,\ ``monitoring.googleapis.com/kubernetes`` will + be used for GKE 1.14+ or ``monitoring.googleapis.com`` + for earlier versions. + + This corresponds to the ``monitoring_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster) of the cluster to + set monitoring. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project_id, zone, cluster_id, monitoring_service, name] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetMonitoringServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetMonitoringServiceRequest): + request = cluster_service.SetMonitoringServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if monitoring_service is not None: + request.monitoring_service = monitoring_service + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_monitoring_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_addons_config( + self, + request: Optional[Union[cluster_service.SetAddonsConfigRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + addons_config: Optional[cluster_service.AddonsConfig] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the addons for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_set_addons_config(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.SetAddonsConfigRequest( + ) + + # Make the request + response = client.set_addons_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.SetAddonsConfigRequest, dict]): + The request object. SetAddonsConfigRequest sets the + addons associated with the cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster + to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + addons_config (google.cloud.container_v1.types.AddonsConfig): + Required. The desired configurations + for the various addons available to run + in the cluster. + + This corresponds to the ``addons_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster) of the cluster to + set addons. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, addons_config, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetAddonsConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetAddonsConfigRequest): + request = cluster_service.SetAddonsConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if addons_config is not None: + request.addons_config = addons_config + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_addons_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_locations( + self, + request: Optional[Union[cluster_service.SetLocationsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + locations: Optional[MutableSequence[str]] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the locations for a specific cluster. Deprecated. Use + `projects.locations.clusters.update `__ + instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_set_locations(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.SetLocationsRequest( + locations=['locations_value1', 'locations_value2'], + ) + + # Make the request + response = client.set_locations(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.SetLocationsRequest, dict]): + The request object. SetLocationsRequest sets the + locations of the cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster + to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + locations (MutableSequence[str]): + Required. The desired list of Google Compute Engine + `zones `__ + in which the cluster's nodes should be located. Changing + the locations a cluster is in will result in nodes being + either created or removed from the cluster, depending on + whether locations are being added or removed. + + This list must always include the cluster's primary + zone. + + This corresponds to the ``locations`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster) of the cluster to + set locations. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + warnings.warn( + "ClusterManagerClient.set_locations is deprecated", DeprecationWarning + ) + + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, locations, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetLocationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetLocationsRequest): + request = cluster_service.SetLocationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if locations is not None: + request.locations = locations + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_master( + self, + request: Optional[Union[cluster_service.UpdateMasterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + master_version: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Updates the master for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_update_master(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.UpdateMasterRequest( + master_version="master_version_value", + ) + + # Make the request + response = client.update_master(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.UpdateMasterRequest, dict]): + The request object. UpdateMasterRequest updates the + master of the cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster + to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + master_version (str): + Required. The Kubernetes version to + change the master to. + Users may specify either explicit + versions offered by Kubernetes Engine or + version aliases, which have the + following behavior: + + - "latest": picks the highest valid + Kubernetes version + - "1.X": picks the highest valid + patch+gke.N patch in the 1.X version + - "1.X.Y": picks the highest valid gke.N + patch in the 1.X.Y version + - "1.X.Y-gke.N": picks an explicit + Kubernetes version + - "-": picks the default Kubernetes + version + + This corresponds to the ``master_version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster) of the cluster to + update. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, master_version, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.UpdateMasterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.UpdateMasterRequest): + request = cluster_service.UpdateMasterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if master_version is not None: + request.master_version = master_version + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_master] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_master_auth( + self, + request: Optional[Union[cluster_service.SetMasterAuthRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets master auth materials. Currently supports + changing the admin password or a specific cluster, + either via password generation or explicitly setting the + password. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_set_master_auth(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.SetMasterAuthRequest( + action="SET_USERNAME", + ) + + # Make the request + response = client.set_master_auth(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.SetMasterAuthRequest, dict]): + The request object. SetMasterAuthRequest updates the + admin password of a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetMasterAuthRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetMasterAuthRequest): + request = cluster_service.SetMasterAuthRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_master_auth] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_cluster( + self, + request: Optional[Union[cluster_service.DeleteClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Deletes the cluster, including the Kubernetes + endpoint and all worker nodes. + + Firewalls and routes that were configured during cluster + creation are also deleted. + + Other Google Compute Engine resources that might be in + use by the cluster, such as load balancer resources, are + not deleted if they weren't present when the cluster was + initially created. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_delete_cluster(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.DeleteClusterRequest( + ) + + # Make the request + response = client.delete_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.DeleteClusterRequest, dict]): + The request object. DeleteClusterRequest deletes a + cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster + to delete. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster) of the cluster to + delete. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.DeleteClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.DeleteClusterRequest): + request = cluster_service.DeleteClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_operations( + self, + request: Optional[Union[cluster_service.ListOperationsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListOperationsResponse: + r"""Lists all operations in a project in a specific zone + or all zones. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_list_operations(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.ListOperationsRequest( + ) + + # Make the request + response = client.list_operations(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.ListOperationsRequest, dict]): + The request object. ListOperationsRequest lists + operations. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + to return operations for, or ``-`` for all zones. This + field has been deprecated and replaced by the parent + field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.ListOperationsResponse: + ListOperationsResponse is the result + of ListOperationsRequest. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.ListOperationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.ListOperationsRequest): + request = cluster_service.ListOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[Union[cluster_service.GetOperationRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + operation_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Gets the specified operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_get_operation(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.GetOperationRequest( + ) + + # Make the request + response = client.get_operation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.GetOperationRequest, dict]): + The request object. GetOperationRequest gets a single + operation. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation_id (str): + Deprecated. The server-assigned ``name`` of the + operation. This field has been deprecated and replaced + by the name field. + + This corresponds to the ``operation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, operation id) of the + operation to get. Specified in the format + ``projects/*/locations/*/operations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, operation_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.GetOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.GetOperationRequest): + request = cluster_service.GetOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if operation_id is not None: + request.operation_id = operation_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[Union[cluster_service.CancelOperationRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + operation_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Cancels the specified operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_cancel_operation(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.CancelOperationRequest( + ) + + # Make the request + client.cancel_operation(request=request) + + Args: + request (Union[google.cloud.container_v1.types.CancelOperationRequest, dict]): + The request object. CancelOperationRequest cancels a + single operation. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the operation resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation_id (str): + Deprecated. The server-assigned ``name`` of the + operation. This field has been deprecated and replaced + by the name field. + + This corresponds to the ``operation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, operation id) of the + operation to cancel. Specified in the format + ``projects/*/locations/*/operations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, operation_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.CancelOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.CancelOperationRequest): + request = cluster_service.CancelOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if operation_id is not None: + request.operation_id = operation_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_server_config( + self, + request: Optional[Union[cluster_service.GetServerConfigRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ServerConfig: + r"""Returns configuration info about the Google + Kubernetes Engine service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_get_server_config(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.GetServerConfigRequest( + ) + + # Make the request + response = client.get_server_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.GetServerConfigRequest, dict]): + The request object. Gets the current Kubernetes Engine + service configuration. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + to return operations for. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project and location) of the server config to + get, specified in the format ``projects/*/locations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.ServerConfig: + Kubernetes Engine service + configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.GetServerConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.GetServerConfigRequest): + request = cluster_service.GetServerConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_server_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_json_web_keys( + self, + request: Optional[Union[cluster_service.GetJSONWebKeysRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.GetJSONWebKeysResponse: + r"""Gets the public component of the cluster signing keys + in JSON Web Key format. + This API is not yet intended for general use, and is not + available for all clusters. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_get_json_web_keys(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.GetJSONWebKeysRequest( + ) + + # Make the request + response = client.get_json_web_keys(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.GetJSONWebKeysRequest, dict]): + The request object. GetJSONWebKeysRequest gets the public component of the + keys used by the cluster to sign token requests. This + will be the jwks_uri for the discover document returned + by getOpenIDConfig. See the OpenID Connect Discovery 1.0 + specification for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.GetJSONWebKeysResponse: + GetJSONWebKeysResponse is a valid + JSON Web Key Set as specififed in rfc + 7517 + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.GetJSONWebKeysRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.GetJSONWebKeysRequest): + request = cluster_service.GetJSONWebKeysRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_json_web_keys] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_node_pools( + self, + request: Optional[Union[cluster_service.ListNodePoolsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListNodePoolsResponse: + r"""Lists the node pools for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_list_node_pools(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.ListNodePoolsRequest( + ) + + # Make the request + response = client.list_node_pools(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.ListNodePoolsRequest, dict]): + The request object. ListNodePoolsRequest lists the node + pool(s) for a cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the parent field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and + replaced by the parent field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parent (str): + The parent (project, location, cluster name) where the + node pools will be listed. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.ListNodePoolsResponse: + ListNodePoolsResponse is the result + of ListNodePoolsRequest. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.ListNodePoolsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.ListNodePoolsRequest): + request = cluster_service.ListNodePoolsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_node_pools] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_node_pool( + self, + request: Optional[Union[cluster_service.GetNodePoolRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.NodePool: + r"""Retrieves the requested node pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_get_node_pool(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.GetNodePoolRequest( + ) + + # Make the request + response = client.get_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.GetNodePoolRequest, dict]): + The request object. GetNodePoolRequest retrieves a node + pool for a cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and + replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool_id (str): + Deprecated. The name of the node + pool. This field has been deprecated and + replaced by the name field. + + This corresponds to the ``node_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster, node pool id) of + the node pool to get. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.NodePool: + NodePool contains the name and + configuration for a cluster's node pool. + Node pools are a set of nodes (i.e. + VM's), with a common configuration and + specification, under the control of the + cluster master. They may have a set of + Kubernetes labels applied to them, which + may be used to reference them during pod + scheduling. They may also be resized up + or down, to accommodate the workload. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.GetNodePoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.GetNodePoolRequest): + request = cluster_service.GetNodePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool_id is not None: + request.node_pool_id = node_pool_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_node_pool] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_node_pool( + self, + request: Optional[Union[cluster_service.CreateNodePoolRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool: Optional[cluster_service.NodePool] = None, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Creates a node pool for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_create_node_pool(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.CreateNodePoolRequest( + ) + + # Make the request + response = client.create_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.CreateNodePoolRequest, dict]): + The request object. CreateNodePoolRequest creates a node + pool for a cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the parent field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and + replaced by the parent field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool (google.cloud.container_v1.types.NodePool): + Required. The node pool to create. + This corresponds to the ``node_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parent (str): + The parent (project, location, cluster name) where the + node pool will be created. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool, parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.CreateNodePoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.CreateNodePoolRequest): + request = cluster_service.CreateNodePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool is not None: + request.node_pool = node_pool + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_node_pool] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_node_pool( + self, + request: Optional[Union[cluster_service.DeleteNodePoolRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Deletes a node pool from a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_delete_node_pool(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.DeleteNodePoolRequest( + ) + + # Make the request + response = client.delete_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.DeleteNodePoolRequest, dict]): + The request object. DeleteNodePoolRequest deletes a node + pool for a cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and + replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool_id (str): + Deprecated. The name of the node pool + to delete. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``node_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster, node pool id) of + the node pool to delete. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.DeleteNodePoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.DeleteNodePoolRequest): + request = cluster_service.DeleteNodePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool_id is not None: + request.node_pool_id = node_pool_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_node_pool] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def complete_node_pool_upgrade( + self, + request: Optional[ + Union[cluster_service.CompleteNodePoolUpgradeRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""CompleteNodePoolUpgrade will signal an on-going node + pool upgrade to complete. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_complete_node_pool_upgrade(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.CompleteNodePoolUpgradeRequest( + ) + + # Make the request + client.complete_node_pool_upgrade(request=request) + + Args: + request (Union[google.cloud.container_v1.types.CompleteNodePoolUpgradeRequest, dict]): + The request object. CompleteNodePoolUpgradeRequest sets + the name of target node pool to complete + upgrade. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.CompleteNodePoolUpgradeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.CompleteNodePoolUpgradeRequest): + request = cluster_service.CompleteNodePoolUpgradeRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.complete_node_pool_upgrade + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def rollback_node_pool_upgrade( + self, + request: Optional[ + Union[cluster_service.RollbackNodePoolUpgradeRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Rolls back a previously Aborted or Failed NodePool + upgrade. This makes no changes if the last upgrade + successfully completed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_rollback_node_pool_upgrade(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.RollbackNodePoolUpgradeRequest( + ) + + # Make the request + response = client.rollback_node_pool_upgrade(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.RollbackNodePoolUpgradeRequest, dict]): + The request object. RollbackNodePoolUpgradeRequest + rollbacks the previously Aborted or + Failed NodePool upgrade. This will be an + no-op if the last upgrade successfully + completed. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster + to rollback. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool_id (str): + Deprecated. The name of the node pool + to rollback. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``node_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster, node pool id) of + the node poll to rollback upgrade. Specified in the + format + ``projects/*/locations/*/clusters/*/nodePools/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.RollbackNodePoolUpgradeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.RollbackNodePoolUpgradeRequest): + request = cluster_service.RollbackNodePoolUpgradeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool_id is not None: + request.node_pool_id = node_pool_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.rollback_node_pool_upgrade + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_node_pool_management( + self, + request: Optional[ + Union[cluster_service.SetNodePoolManagementRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the NodeManagement options for a node pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_set_node_pool_management(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.SetNodePoolManagementRequest( + ) + + # Make the request + response = client.set_node_pool_management(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.SetNodePoolManagementRequest, dict]): + The request object. SetNodePoolManagementRequest sets the + node management properties of a node + pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetNodePoolManagementRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetNodePoolManagementRequest): + request = cluster_service.SetNodePoolManagementRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_node_pool_management] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels( + self, + request: Optional[Union[cluster_service.SetLabelsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets labels on a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_set_labels(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.SetLabelsRequest( + label_fingerprint="label_fingerprint_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.SetLabelsRequest, dict]): + The request object. SetLabelsRequest sets the Google + Cloud Platform labels on a Google + Container Engine cluster, which will in + turn set them for Google Compute Engine + resources used by that cluster + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetLabelsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetLabelsRequest): + request = cluster_service.SetLabelsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_legacy_abac( + self, + request: Optional[Union[cluster_service.SetLegacyAbacRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + enabled: Optional[bool] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Enables or disables the ABAC authorization mechanism + on a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_set_legacy_abac(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.SetLegacyAbacRequest( + enabled=True, + ) + + # Make the request + response = client.set_legacy_abac(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.SetLegacyAbacRequest, dict]): + The request object. SetLegacyAbacRequest enables or + disables the ABAC authorization + mechanism for a cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster + to update. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + enabled (bool): + Required. Whether ABAC authorization + will be enabled in the cluster. + + This corresponds to the ``enabled`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster name) of the + cluster to set legacy abac. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, enabled, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetLegacyAbacRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetLegacyAbacRequest): + request = cluster_service.SetLegacyAbacRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if enabled is not None: + request.enabled = enabled + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_legacy_abac] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def start_ip_rotation( + self, + request: Optional[Union[cluster_service.StartIPRotationRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Starts master IP rotation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_start_ip_rotation(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.StartIPRotationRequest( + ) + + # Make the request + response = client.start_ip_rotation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.StartIPRotationRequest, dict]): + The request object. StartIPRotationRequest creates a new + IP for the cluster and then performs a + node upgrade on each node pool to point + to the new IP. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and + replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster name) of the + cluster to start IP rotation. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.StartIPRotationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.StartIPRotationRequest): + request = cluster_service.StartIPRotationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_ip_rotation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def complete_ip_rotation( + self, + request: Optional[ + Union[cluster_service.CompleteIPRotationRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Completes master IP rotation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_complete_ip_rotation(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.CompleteIPRotationRequest( + ) + + # Make the request + response = client.complete_ip_rotation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.CompleteIPRotationRequest, dict]): + The request object. CompleteIPRotationRequest moves the + cluster master back into single-IP mode. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and + replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster name) of the + cluster to complete IP rotation. Specified in the format + ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.CompleteIPRotationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.CompleteIPRotationRequest): + request = cluster_service.CompleteIPRotationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.complete_ip_rotation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_node_pool_size( + self, + request: Optional[Union[cluster_service.SetNodePoolSizeRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the size for a specific node pool. The new size will be + used for all replicas, including future replicas created by + modifying + [NodePool.locations][google.container.v1.NodePool.locations]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_set_node_pool_size(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.SetNodePoolSizeRequest( + node_count=1070, + ) + + # Make the request + response = client.set_node_pool_size(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.SetNodePoolSizeRequest, dict]): + The request object. SetNodePoolSizeRequest sets the size + of a node pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetNodePoolSizeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetNodePoolSizeRequest): + request = cluster_service.SetNodePoolSizeRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_node_pool_size] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_network_policy( + self, + request: Optional[Union[cluster_service.SetNetworkPolicyRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + network_policy: Optional[cluster_service.NetworkPolicy] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Enables or disables Network Policy for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_set_network_policy(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.SetNetworkPolicyRequest( + ) + + # Make the request + response = client.set_network_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.SetNetworkPolicyRequest, dict]): + The request object. SetNetworkPolicyRequest + enables/disables network policy for a + cluster. + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and + replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_policy (google.cloud.container_v1.types.NetworkPolicy): + Required. Configuration options for + the NetworkPolicy feature. + + This corresponds to the ``network_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster name) of the + cluster to set networking policy. Specified in the + format ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, network_policy, name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetNetworkPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetNetworkPolicyRequest): + request = cluster_service.SetNetworkPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if network_policy is not None: + request.network_policy = network_policy + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_network_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_maintenance_policy( + self, + request: Optional[ + Union[cluster_service.SetMaintenancePolicyRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + maintenance_policy: Optional[cluster_service.MaintenancePolicy] = None, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the maintenance policy for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_set_maintenance_policy(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.SetMaintenancePolicyRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = client.set_maintenance_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.SetMaintenancePolicyRequest, dict]): + The request object. SetMaintenancePolicyRequest sets the + maintenance policy for a cluster. + project_id (str): + Required. The Google Developers Console `project ID or + project + number `__. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. The name of the cluster to + update. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + maintenance_policy (google.cloud.container_v1.types.MaintenancePolicy): + Required. The maintenance policy to + be set for the cluster. An empty field + clears the existing maintenance policy. + + This corresponds to the ``maintenance_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + name (str): + The name (project, location, cluster name) of the + cluster to set maintenance policy. Specified in the + format ``projects/*/locations/*/clusters/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project_id, zone, cluster_id, maintenance_policy, name] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetMaintenancePolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetMaintenancePolicyRequest): + request = cluster_service.SetMaintenancePolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if maintenance_policy is not None: + request.maintenance_policy = maintenance_policy + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_maintenance_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_usable_subnetworks( + self, + request: Optional[ + Union[cluster_service.ListUsableSubnetworksRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsableSubnetworksPager: + r"""Lists subnetworks that are usable for creating + clusters in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_list_usable_subnetworks(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.ListUsableSubnetworksRequest( + ) + + # Make the request + page_result = client.list_usable_subnetworks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.container_v1.types.ListUsableSubnetworksRequest, dict]): + The request object. ListUsableSubnetworksRequest requests + the list of usable subnetworks available + to a user for creating clusters. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.services.cluster_manager.pagers.ListUsableSubnetworksPager: + ListUsableSubnetworksResponse is the + response of + ListUsableSubnetworksRequest. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.ListUsableSubnetworksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.ListUsableSubnetworksRequest): + request = cluster_service.ListUsableSubnetworksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_usable_subnetworks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUsableSubnetworksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def check_autopilot_compatibility( + self, + request: Optional[ + Union[cluster_service.CheckAutopilotCompatibilityRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.CheckAutopilotCompatibilityResponse: + r"""Checks the cluster compatibility with Autopilot mode, + and returns a list of compatibility issues. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1 + + def sample_check_autopilot_compatibility(): + # Create a client + client = container_v1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1.CheckAutopilotCompatibilityRequest( + ) + + # Make the request + response = client.check_autopilot_compatibility(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1.types.CheckAutopilotCompatibilityRequest, dict]): + The request object. CheckAutopilotCompatibilityRequest + requests getting the blockers for the + given operation in the cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1.types.CheckAutopilotCompatibilityResponse: + CheckAutopilotCompatibilityResponse + has a list of compatibility issues. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.CheckAutopilotCompatibilityRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.CheckAutopilotCompatibilityRequest): + request = cluster_service.CheckAutopilotCompatibilityRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.check_autopilot_compatibility + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ClusterManagerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ClusterManagerClient",) diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/pagers.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/pagers.py new file mode 100644 index 000000000000..90319405f5af --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/pagers.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.container_v1.types import cluster_service + + +class ListUsableSubnetworksPager: + """A pager for iterating through ``list_usable_subnetworks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.container_v1.types.ListUsableSubnetworksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``subnetworks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUsableSubnetworks`` requests and continue to iterate + through the ``subnetworks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.container_v1.types.ListUsableSubnetworksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cluster_service.ListUsableSubnetworksResponse], + request: cluster_service.ListUsableSubnetworksRequest, + response: cluster_service.ListUsableSubnetworksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.container_v1.types.ListUsableSubnetworksRequest): + The initial request object. + response (google.cloud.container_v1.types.ListUsableSubnetworksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cluster_service.ListUsableSubnetworksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cluster_service.ListUsableSubnetworksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[cluster_service.UsableSubnetwork]: + for page in self.pages: + yield from page.subnetworks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsableSubnetworksAsyncPager: + """A pager for iterating through ``list_usable_subnetworks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.container_v1.types.ListUsableSubnetworksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``subnetworks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListUsableSubnetworks`` requests and continue to iterate + through the ``subnetworks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.container_v1.types.ListUsableSubnetworksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cluster_service.ListUsableSubnetworksResponse]], + request: cluster_service.ListUsableSubnetworksRequest, + response: cluster_service.ListUsableSubnetworksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.container_v1.types.ListUsableSubnetworksRequest): + The initial request object. + response (google.cloud.container_v1.types.ListUsableSubnetworksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cluster_service.ListUsableSubnetworksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[cluster_service.ListUsableSubnetworksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[cluster_service.UsableSubnetwork]: + async def async_generator(): + async for page in self.pages: + for response in page.subnetworks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/__init__.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/__init__.py new file mode 100644 index 000000000000..dd25fdf97133 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ClusterManagerTransport +from .grpc import ClusterManagerGrpcTransport +from .grpc_asyncio import ClusterManagerGrpcAsyncIOTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ClusterManagerTransport]] +_transport_registry["grpc"] = ClusterManagerGrpcTransport +_transport_registry["grpc_asyncio"] = ClusterManagerGrpcAsyncIOTransport + +__all__ = ( + "ClusterManagerTransport", + "ClusterManagerGrpcTransport", + "ClusterManagerGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/base.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/base.py new file mode 100644 index 000000000000..b46095e8c3df --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/base.py @@ -0,0 +1,726 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.container_v1 import gapic_version as package_version +from google.cloud.container_v1.types import cluster_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ClusterManagerTransport(abc.ABC): + """Abstract transport class for ClusterManager.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "container.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_clusters: gapic_v1.method.wrap_method( + self.list_clusters, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_cluster: gapic_v1.method.wrap_method( + self.get_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_cluster: gapic_v1.method.wrap_method( + self.create_cluster, + default_timeout=45.0, + client_info=client_info, + ), + self.update_cluster: gapic_v1.method.wrap_method( + self.update_cluster, + default_timeout=45.0, + client_info=client_info, + ), + self.update_node_pool: gapic_v1.method.wrap_method( + self.update_node_pool, + default_timeout=45.0, + client_info=client_info, + ), + self.set_node_pool_autoscaling: gapic_v1.method.wrap_method( + self.set_node_pool_autoscaling, + default_timeout=45.0, + client_info=client_info, + ), + self.set_logging_service: gapic_v1.method.wrap_method( + self.set_logging_service, + default_timeout=45.0, + client_info=client_info, + ), + self.set_monitoring_service: gapic_v1.method.wrap_method( + self.set_monitoring_service, + default_timeout=45.0, + client_info=client_info, + ), + self.set_addons_config: gapic_v1.method.wrap_method( + self.set_addons_config, + default_timeout=45.0, + client_info=client_info, + ), + self.set_locations: gapic_v1.method.wrap_method( + self.set_locations, + default_timeout=45.0, + client_info=client_info, + ), + self.update_master: gapic_v1.method.wrap_method( + self.update_master, + default_timeout=45.0, + client_info=client_info, + ), + self.set_master_auth: gapic_v1.method.wrap_method( + self.set_master_auth, + default_timeout=45.0, + client_info=client_info, + ), + self.delete_cluster: gapic_v1.method.wrap_method( + self.delete_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=45.0, + client_info=client_info, + ), + self.get_server_config: gapic_v1.method.wrap_method( + self.get_server_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_json_web_keys: gapic_v1.method.wrap_method( + self.get_json_web_keys, + default_timeout=None, + client_info=client_info, + ), + self.list_node_pools: gapic_v1.method.wrap_method( + self.list_node_pools, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_node_pool: gapic_v1.method.wrap_method( + self.get_node_pool, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_node_pool: gapic_v1.method.wrap_method( + self.create_node_pool, + default_timeout=45.0, + client_info=client_info, + ), + self.delete_node_pool: gapic_v1.method.wrap_method( + self.delete_node_pool, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.complete_node_pool_upgrade: gapic_v1.method.wrap_method( + self.complete_node_pool_upgrade, + default_timeout=None, + client_info=client_info, + ), + self.rollback_node_pool_upgrade: gapic_v1.method.wrap_method( + self.rollback_node_pool_upgrade, + default_timeout=45.0, + client_info=client_info, + ), + self.set_node_pool_management: gapic_v1.method.wrap_method( + self.set_node_pool_management, + default_timeout=45.0, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=45.0, + client_info=client_info, + ), + self.set_legacy_abac: gapic_v1.method.wrap_method( + self.set_legacy_abac, + default_timeout=45.0, + client_info=client_info, + ), + self.start_ip_rotation: gapic_v1.method.wrap_method( + self.start_ip_rotation, + default_timeout=45.0, + client_info=client_info, + ), + self.complete_ip_rotation: gapic_v1.method.wrap_method( + self.complete_ip_rotation, + default_timeout=45.0, + client_info=client_info, + ), + self.set_node_pool_size: gapic_v1.method.wrap_method( + self.set_node_pool_size, + default_timeout=45.0, + client_info=client_info, + ), + self.set_network_policy: gapic_v1.method.wrap_method( + self.set_network_policy, + default_timeout=45.0, + client_info=client_info, + ), + self.set_maintenance_policy: gapic_v1.method.wrap_method( + self.set_maintenance_policy, + default_timeout=45.0, + client_info=client_info, + ), + self.list_usable_subnetworks: gapic_v1.method.wrap_method( + self.list_usable_subnetworks, + default_timeout=None, + client_info=client_info, + ), + self.check_autopilot_compatibility: gapic_v1.method.wrap_method( + self.check_autopilot_compatibility, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_clusters( + self, + ) -> Callable[ + [cluster_service.ListClustersRequest], + Union[ + cluster_service.ListClustersResponse, + Awaitable[cluster_service.ListClustersResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_cluster( + self, + ) -> Callable[ + [cluster_service.GetClusterRequest], + Union[cluster_service.Cluster, Awaitable[cluster_service.Cluster]], + ]: + raise NotImplementedError() + + @property + def create_cluster( + self, + ) -> Callable[ + [cluster_service.CreateClusterRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def update_cluster( + self, + ) -> Callable[ + [cluster_service.UpdateClusterRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def update_node_pool( + self, + ) -> Callable[ + [cluster_service.UpdateNodePoolRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_node_pool_autoscaling( + self, + ) -> Callable[ + [cluster_service.SetNodePoolAutoscalingRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_logging_service( + self, + ) -> Callable[ + [cluster_service.SetLoggingServiceRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_monitoring_service( + self, + ) -> Callable[ + [cluster_service.SetMonitoringServiceRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_addons_config( + self, + ) -> Callable[ + [cluster_service.SetAddonsConfigRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_locations( + self, + ) -> Callable[ + [cluster_service.SetLocationsRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def update_master( + self, + ) -> Callable[ + [cluster_service.UpdateMasterRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_master_auth( + self, + ) -> Callable[ + [cluster_service.SetMasterAuthRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_cluster( + self, + ) -> Callable[ + [cluster_service.DeleteClusterRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [cluster_service.ListOperationsRequest], + Union[ + cluster_service.ListOperationsResponse, + Awaitable[cluster_service.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [cluster_service.GetOperationRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [cluster_service.CancelOperationRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_server_config( + self, + ) -> Callable[ + [cluster_service.GetServerConfigRequest], + Union[cluster_service.ServerConfig, Awaitable[cluster_service.ServerConfig]], + ]: + raise NotImplementedError() + + @property + def get_json_web_keys( + self, + ) -> Callable[ + [cluster_service.GetJSONWebKeysRequest], + Union[ + cluster_service.GetJSONWebKeysResponse, + Awaitable[cluster_service.GetJSONWebKeysResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_node_pools( + self, + ) -> Callable[ + [cluster_service.ListNodePoolsRequest], + Union[ + cluster_service.ListNodePoolsResponse, + Awaitable[cluster_service.ListNodePoolsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_node_pool( + self, + ) -> Callable[ + [cluster_service.GetNodePoolRequest], + Union[cluster_service.NodePool, Awaitable[cluster_service.NodePool]], + ]: + raise NotImplementedError() + + @property + def create_node_pool( + self, + ) -> Callable[ + [cluster_service.CreateNodePoolRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_node_pool( + self, + ) -> Callable[ + [cluster_service.DeleteNodePoolRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def complete_node_pool_upgrade( + self, + ) -> Callable[ + [cluster_service.CompleteNodePoolUpgradeRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def rollback_node_pool_upgrade( + self, + ) -> Callable[ + [cluster_service.RollbackNodePoolUpgradeRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_node_pool_management( + self, + ) -> Callable[ + [cluster_service.SetNodePoolManagementRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_labels( + self, + ) -> Callable[ + [cluster_service.SetLabelsRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_legacy_abac( + self, + ) -> Callable[ + [cluster_service.SetLegacyAbacRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def start_ip_rotation( + self, + ) -> Callable[ + [cluster_service.StartIPRotationRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def complete_ip_rotation( + self, + ) -> Callable[ + [cluster_service.CompleteIPRotationRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_node_pool_size( + self, + ) -> Callable[ + [cluster_service.SetNodePoolSizeRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_network_policy( + self, + ) -> Callable[ + [cluster_service.SetNetworkPolicyRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_maintenance_policy( + self, + ) -> Callable[ + [cluster_service.SetMaintenancePolicyRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def list_usable_subnetworks( + self, + ) -> Callable[ + [cluster_service.ListUsableSubnetworksRequest], + Union[ + cluster_service.ListUsableSubnetworksResponse, + Awaitable[cluster_service.ListUsableSubnetworksResponse], + ], + ]: + raise NotImplementedError() + + @property + def check_autopilot_compatibility( + self, + ) -> Callable[ + [cluster_service.CheckAutopilotCompatibilityRequest], + Union[ + cluster_service.CheckAutopilotCompatibilityResponse, + Awaitable[cluster_service.CheckAutopilotCompatibilityResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ClusterManagerTransport",) diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/grpc.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/grpc.py new file mode 100644 index 000000000000..a6d5cba5b624 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/grpc.py @@ -0,0 +1,1199 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.container_v1.types import cluster_service + +from .base import DEFAULT_CLIENT_INFO, ClusterManagerTransport + + +class ClusterManagerGrpcTransport(ClusterManagerTransport): + """gRPC backend transport for ClusterManager. + + Google Kubernetes Engine Cluster Manager v1 + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "container.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "container.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_clusters( + self, + ) -> Callable[ + [cluster_service.ListClustersRequest], cluster_service.ListClustersResponse + ]: + r"""Return a callable for the list clusters method over gRPC. + + Lists all clusters owned by a project in either the + specified zone or all zones. + + Returns: + Callable[[~.ListClustersRequest], + ~.ListClustersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_clusters" not in self._stubs: + self._stubs["list_clusters"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/ListClusters", + request_serializer=cluster_service.ListClustersRequest.serialize, + response_deserializer=cluster_service.ListClustersResponse.deserialize, + ) + return self._stubs["list_clusters"] + + @property + def get_cluster( + self, + ) -> Callable[[cluster_service.GetClusterRequest], cluster_service.Cluster]: + r"""Return a callable for the get cluster method over gRPC. + + Gets the details of a specific cluster. + + Returns: + Callable[[~.GetClusterRequest], + ~.Cluster]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster" not in self._stubs: + self._stubs["get_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/GetCluster", + request_serializer=cluster_service.GetClusterRequest.serialize, + response_deserializer=cluster_service.Cluster.deserialize, + ) + return self._stubs["get_cluster"] + + @property + def create_cluster( + self, + ) -> Callable[[cluster_service.CreateClusterRequest], cluster_service.Operation]: + r"""Return a callable for the create cluster method over gRPC. + + Creates a cluster, consisting of the specified number and type + of Google Compute Engine instances. + + By default, the cluster is created in the project's `default + network `__. + + One firewall is added for the cluster. After cluster creation, + the Kubelet creates routes for each node to allow the containers + on that node to communicate with all other instances in the + cluster. + + Finally, an entry is added to the project's global metadata + indicating which CIDR range the cluster is using. + + Returns: + Callable[[~.CreateClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cluster" not in self._stubs: + self._stubs["create_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/CreateCluster", + request_serializer=cluster_service.CreateClusterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["create_cluster"] + + @property + def update_cluster( + self, + ) -> Callable[[cluster_service.UpdateClusterRequest], cluster_service.Operation]: + r"""Return a callable for the update cluster method over gRPC. + + Updates the settings of a specific cluster. + + Returns: + Callable[[~.UpdateClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cluster" not in self._stubs: + self._stubs["update_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/UpdateCluster", + request_serializer=cluster_service.UpdateClusterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["update_cluster"] + + @property + def update_node_pool( + self, + ) -> Callable[[cluster_service.UpdateNodePoolRequest], cluster_service.Operation]: + r"""Return a callable for the update node pool method over gRPC. + + Updates the version and/or image type for the + specified node pool. + + Returns: + Callable[[~.UpdateNodePoolRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_node_pool" not in self._stubs: + self._stubs["update_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/UpdateNodePool", + request_serializer=cluster_service.UpdateNodePoolRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["update_node_pool"] + + @property + def set_node_pool_autoscaling( + self, + ) -> Callable[ + [cluster_service.SetNodePoolAutoscalingRequest], cluster_service.Operation + ]: + r"""Return a callable for the set node pool autoscaling method over gRPC. + + Sets the autoscaling settings for the specified node + pool. + + Returns: + Callable[[~.SetNodePoolAutoscalingRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_node_pool_autoscaling" not in self._stubs: + self._stubs["set_node_pool_autoscaling"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetNodePoolAutoscaling", + request_serializer=cluster_service.SetNodePoolAutoscalingRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_node_pool_autoscaling"] + + @property + def set_logging_service( + self, + ) -> Callable[ + [cluster_service.SetLoggingServiceRequest], cluster_service.Operation + ]: + r"""Return a callable for the set logging service method over gRPC. + + Sets the logging service for a specific cluster. + + Returns: + Callable[[~.SetLoggingServiceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_logging_service" not in self._stubs: + self._stubs["set_logging_service"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetLoggingService", + request_serializer=cluster_service.SetLoggingServiceRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_logging_service"] + + @property + def set_monitoring_service( + self, + ) -> Callable[ + [cluster_service.SetMonitoringServiceRequest], cluster_service.Operation + ]: + r"""Return a callable for the set monitoring service method over gRPC. + + Sets the monitoring service for a specific cluster. + + Returns: + Callable[[~.SetMonitoringServiceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_monitoring_service" not in self._stubs: + self._stubs["set_monitoring_service"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetMonitoringService", + request_serializer=cluster_service.SetMonitoringServiceRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_monitoring_service"] + + @property + def set_addons_config( + self, + ) -> Callable[[cluster_service.SetAddonsConfigRequest], cluster_service.Operation]: + r"""Return a callable for the set addons config method over gRPC. + + Sets the addons for a specific cluster. + + Returns: + Callable[[~.SetAddonsConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_addons_config" not in self._stubs: + self._stubs["set_addons_config"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetAddonsConfig", + request_serializer=cluster_service.SetAddonsConfigRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_addons_config"] + + @property + def set_locations( + self, + ) -> Callable[[cluster_service.SetLocationsRequest], cluster_service.Operation]: + r"""Return a callable for the set locations method over gRPC. + + Sets the locations for a specific cluster. Deprecated. Use + `projects.locations.clusters.update `__ + instead. + + Returns: + Callable[[~.SetLocationsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_locations" not in self._stubs: + self._stubs["set_locations"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetLocations", + request_serializer=cluster_service.SetLocationsRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_locations"] + + @property + def update_master( + self, + ) -> Callable[[cluster_service.UpdateMasterRequest], cluster_service.Operation]: + r"""Return a callable for the update master method over gRPC. + + Updates the master for a specific cluster. + + Returns: + Callable[[~.UpdateMasterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_master" not in self._stubs: + self._stubs["update_master"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/UpdateMaster", + request_serializer=cluster_service.UpdateMasterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["update_master"] + + @property + def set_master_auth( + self, + ) -> Callable[[cluster_service.SetMasterAuthRequest], cluster_service.Operation]: + r"""Return a callable for the set master auth method over gRPC. + + Sets master auth materials. Currently supports + changing the admin password or a specific cluster, + either via password generation or explicitly setting the + password. + + Returns: + Callable[[~.SetMasterAuthRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_master_auth" not in self._stubs: + self._stubs["set_master_auth"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetMasterAuth", + request_serializer=cluster_service.SetMasterAuthRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_master_auth"] + + @property + def delete_cluster( + self, + ) -> Callable[[cluster_service.DeleteClusterRequest], cluster_service.Operation]: + r"""Return a callable for the delete cluster method over gRPC. + + Deletes the cluster, including the Kubernetes + endpoint and all worker nodes. + + Firewalls and routes that were configured during cluster + creation are also deleted. + + Other Google Compute Engine resources that might be in + use by the cluster, such as load balancer resources, are + not deleted if they weren't present when the cluster was + initially created. + + Returns: + Callable[[~.DeleteClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cluster" not in self._stubs: + self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/DeleteCluster", + request_serializer=cluster_service.DeleteClusterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["delete_cluster"] + + @property + def list_operations( + self, + ) -> Callable[ + [cluster_service.ListOperationsRequest], cluster_service.ListOperationsResponse + ]: + r"""Return a callable for the list operations method over gRPC. + + Lists all operations in a project in a specific zone + or all zones. + + Returns: + Callable[[~.ListOperationsRequest], + ~.ListOperationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/ListOperations", + request_serializer=cluster_service.ListOperationsRequest.serialize, + response_deserializer=cluster_service.ListOperationsResponse.deserialize, + ) + return self._stubs["list_operations"] + + @property + def get_operation( + self, + ) -> Callable[[cluster_service.GetOperationRequest], cluster_service.Operation]: + r"""Return a callable for the get operation method over gRPC. + + Gets the specified operation. + + Returns: + Callable[[~.GetOperationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/GetOperation", + request_serializer=cluster_service.GetOperationRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["get_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[cluster_service.CancelOperationRequest], empty_pb2.Empty]: + r"""Return a callable for the cancel operation method over gRPC. + + Cancels the specified operation. + + Returns: + Callable[[~.CancelOperationRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/CancelOperation", + request_serializer=cluster_service.CancelOperationRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["cancel_operation"] + + @property + def get_server_config( + self, + ) -> Callable[ + [cluster_service.GetServerConfigRequest], cluster_service.ServerConfig + ]: + r"""Return a callable for the get server config method over gRPC. + + Returns configuration info about the Google + Kubernetes Engine service. + + Returns: + Callable[[~.GetServerConfigRequest], + ~.ServerConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_server_config" not in self._stubs: + self._stubs["get_server_config"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/GetServerConfig", + request_serializer=cluster_service.GetServerConfigRequest.serialize, + response_deserializer=cluster_service.ServerConfig.deserialize, + ) + return self._stubs["get_server_config"] + + @property + def get_json_web_keys( + self, + ) -> Callable[ + [cluster_service.GetJSONWebKeysRequest], cluster_service.GetJSONWebKeysResponse + ]: + r"""Return a callable for the get json web keys method over gRPC. + + Gets the public component of the cluster signing keys + in JSON Web Key format. + This API is not yet intended for general use, and is not + available for all clusters. + + Returns: + Callable[[~.GetJSONWebKeysRequest], + ~.GetJSONWebKeysResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_json_web_keys" not in self._stubs: + self._stubs["get_json_web_keys"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/GetJSONWebKeys", + request_serializer=cluster_service.GetJSONWebKeysRequest.serialize, + response_deserializer=cluster_service.GetJSONWebKeysResponse.deserialize, + ) + return self._stubs["get_json_web_keys"] + + @property + def list_node_pools( + self, + ) -> Callable[ + [cluster_service.ListNodePoolsRequest], cluster_service.ListNodePoolsResponse + ]: + r"""Return a callable for the list node pools method over gRPC. + + Lists the node pools for a cluster. + + Returns: + Callable[[~.ListNodePoolsRequest], + ~.ListNodePoolsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_node_pools" not in self._stubs: + self._stubs["list_node_pools"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/ListNodePools", + request_serializer=cluster_service.ListNodePoolsRequest.serialize, + response_deserializer=cluster_service.ListNodePoolsResponse.deserialize, + ) + return self._stubs["list_node_pools"] + + @property + def get_node_pool( + self, + ) -> Callable[[cluster_service.GetNodePoolRequest], cluster_service.NodePool]: + r"""Return a callable for the get node pool method over gRPC. + + Retrieves the requested node pool. + + Returns: + Callable[[~.GetNodePoolRequest], + ~.NodePool]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_node_pool" not in self._stubs: + self._stubs["get_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/GetNodePool", + request_serializer=cluster_service.GetNodePoolRequest.serialize, + response_deserializer=cluster_service.NodePool.deserialize, + ) + return self._stubs["get_node_pool"] + + @property + def create_node_pool( + self, + ) -> Callable[[cluster_service.CreateNodePoolRequest], cluster_service.Operation]: + r"""Return a callable for the create node pool method over gRPC. + + Creates a node pool for a cluster. + + Returns: + Callable[[~.CreateNodePoolRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_node_pool" not in self._stubs: + self._stubs["create_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/CreateNodePool", + request_serializer=cluster_service.CreateNodePoolRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["create_node_pool"] + + @property + def delete_node_pool( + self, + ) -> Callable[[cluster_service.DeleteNodePoolRequest], cluster_service.Operation]: + r"""Return a callable for the delete node pool method over gRPC. + + Deletes a node pool from a cluster. + + Returns: + Callable[[~.DeleteNodePoolRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_node_pool" not in self._stubs: + self._stubs["delete_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/DeleteNodePool", + request_serializer=cluster_service.DeleteNodePoolRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["delete_node_pool"] + + @property + def complete_node_pool_upgrade( + self, + ) -> Callable[[cluster_service.CompleteNodePoolUpgradeRequest], empty_pb2.Empty]: + r"""Return a callable for the complete node pool upgrade method over gRPC. + + CompleteNodePoolUpgrade will signal an on-going node + pool upgrade to complete. + + Returns: + Callable[[~.CompleteNodePoolUpgradeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "complete_node_pool_upgrade" not in self._stubs: + self._stubs["complete_node_pool_upgrade"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/CompleteNodePoolUpgrade", + request_serializer=cluster_service.CompleteNodePoolUpgradeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["complete_node_pool_upgrade"] + + @property + def rollback_node_pool_upgrade( + self, + ) -> Callable[ + [cluster_service.RollbackNodePoolUpgradeRequest], cluster_service.Operation + ]: + r"""Return a callable for the rollback node pool upgrade method over gRPC. + + Rolls back a previously Aborted or Failed NodePool + upgrade. This makes no changes if the last upgrade + successfully completed. + + Returns: + Callable[[~.RollbackNodePoolUpgradeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_node_pool_upgrade" not in self._stubs: + self._stubs["rollback_node_pool_upgrade"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/RollbackNodePoolUpgrade", + request_serializer=cluster_service.RollbackNodePoolUpgradeRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["rollback_node_pool_upgrade"] + + @property + def set_node_pool_management( + self, + ) -> Callable[ + [cluster_service.SetNodePoolManagementRequest], cluster_service.Operation + ]: + r"""Return a callable for the set node pool management method over gRPC. + + Sets the NodeManagement options for a node pool. + + Returns: + Callable[[~.SetNodePoolManagementRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_node_pool_management" not in self._stubs: + self._stubs["set_node_pool_management"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetNodePoolManagement", + request_serializer=cluster_service.SetNodePoolManagementRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_node_pool_management"] + + @property + def set_labels( + self, + ) -> Callable[[cluster_service.SetLabelsRequest], cluster_service.Operation]: + r"""Return a callable for the set labels method over gRPC. + + Sets labels on a cluster. + + Returns: + Callable[[~.SetLabelsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_labels" not in self._stubs: + self._stubs["set_labels"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetLabels", + request_serializer=cluster_service.SetLabelsRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_labels"] + + @property + def set_legacy_abac( + self, + ) -> Callable[[cluster_service.SetLegacyAbacRequest], cluster_service.Operation]: + r"""Return a callable for the set legacy abac method over gRPC. + + Enables or disables the ABAC authorization mechanism + on a cluster. + + Returns: + Callable[[~.SetLegacyAbacRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_legacy_abac" not in self._stubs: + self._stubs["set_legacy_abac"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetLegacyAbac", + request_serializer=cluster_service.SetLegacyAbacRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_legacy_abac"] + + @property + def start_ip_rotation( + self, + ) -> Callable[[cluster_service.StartIPRotationRequest], cluster_service.Operation]: + r"""Return a callable for the start ip rotation method over gRPC. + + Starts master IP rotation. + + Returns: + Callable[[~.StartIPRotationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_ip_rotation" not in self._stubs: + self._stubs["start_ip_rotation"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/StartIPRotation", + request_serializer=cluster_service.StartIPRotationRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["start_ip_rotation"] + + @property + def complete_ip_rotation( + self, + ) -> Callable[ + [cluster_service.CompleteIPRotationRequest], cluster_service.Operation + ]: + r"""Return a callable for the complete ip rotation method over gRPC. + + Completes master IP rotation. + + Returns: + Callable[[~.CompleteIPRotationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "complete_ip_rotation" not in self._stubs: + self._stubs["complete_ip_rotation"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/CompleteIPRotation", + request_serializer=cluster_service.CompleteIPRotationRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["complete_ip_rotation"] + + @property + def set_node_pool_size( + self, + ) -> Callable[[cluster_service.SetNodePoolSizeRequest], cluster_service.Operation]: + r"""Return a callable for the set node pool size method over gRPC. + + Sets the size for a specific node pool. The new size will be + used for all replicas, including future replicas created by + modifying + [NodePool.locations][google.container.v1.NodePool.locations]. + + Returns: + Callable[[~.SetNodePoolSizeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_node_pool_size" not in self._stubs: + self._stubs["set_node_pool_size"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetNodePoolSize", + request_serializer=cluster_service.SetNodePoolSizeRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_node_pool_size"] + + @property + def set_network_policy( + self, + ) -> Callable[[cluster_service.SetNetworkPolicyRequest], cluster_service.Operation]: + r"""Return a callable for the set network policy method over gRPC. + + Enables or disables Network Policy for a cluster. + + Returns: + Callable[[~.SetNetworkPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_network_policy" not in self._stubs: + self._stubs["set_network_policy"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetNetworkPolicy", + request_serializer=cluster_service.SetNetworkPolicyRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_network_policy"] + + @property + def set_maintenance_policy( + self, + ) -> Callable[ + [cluster_service.SetMaintenancePolicyRequest], cluster_service.Operation + ]: + r"""Return a callable for the set maintenance policy method over gRPC. + + Sets the maintenance policy for a cluster. + + Returns: + Callable[[~.SetMaintenancePolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_maintenance_policy" not in self._stubs: + self._stubs["set_maintenance_policy"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetMaintenancePolicy", + request_serializer=cluster_service.SetMaintenancePolicyRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_maintenance_policy"] + + @property + def list_usable_subnetworks( + self, + ) -> Callable[ + [cluster_service.ListUsableSubnetworksRequest], + cluster_service.ListUsableSubnetworksResponse, + ]: + r"""Return a callable for the list usable subnetworks method over gRPC. + + Lists subnetworks that are usable for creating + clusters in a project. + + Returns: + Callable[[~.ListUsableSubnetworksRequest], + ~.ListUsableSubnetworksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_usable_subnetworks" not in self._stubs: + self._stubs["list_usable_subnetworks"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/ListUsableSubnetworks", + request_serializer=cluster_service.ListUsableSubnetworksRequest.serialize, + response_deserializer=cluster_service.ListUsableSubnetworksResponse.deserialize, + ) + return self._stubs["list_usable_subnetworks"] + + @property + def check_autopilot_compatibility( + self, + ) -> Callable[ + [cluster_service.CheckAutopilotCompatibilityRequest], + cluster_service.CheckAutopilotCompatibilityResponse, + ]: + r"""Return a callable for the check autopilot compatibility method over gRPC. + + Checks the cluster compatibility with Autopilot mode, + and returns a list of compatibility issues. + + Returns: + Callable[[~.CheckAutopilotCompatibilityRequest], + ~.CheckAutopilotCompatibilityResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_autopilot_compatibility" not in self._stubs: + self._stubs[ + "check_autopilot_compatibility" + ] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/CheckAutopilotCompatibility", + request_serializer=cluster_service.CheckAutopilotCompatibilityRequest.serialize, + response_deserializer=cluster_service.CheckAutopilotCompatibilityResponse.deserialize, + ) + return self._stubs["check_autopilot_compatibility"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ClusterManagerGrpcTransport",) diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/grpc_asyncio.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/grpc_asyncio.py new file mode 100644 index 000000000000..fc49c5efb387 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/grpc_asyncio.py @@ -0,0 +1,1247 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.container_v1.types import cluster_service + +from .base import DEFAULT_CLIENT_INFO, ClusterManagerTransport +from .grpc import ClusterManagerGrpcTransport + + +class ClusterManagerGrpcAsyncIOTransport(ClusterManagerTransport): + """gRPC AsyncIO backend transport for ClusterManager. + + Google Kubernetes Engine Cluster Manager v1 + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "container.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "container.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_clusters( + self, + ) -> Callable[ + [cluster_service.ListClustersRequest], + Awaitable[cluster_service.ListClustersResponse], + ]: + r"""Return a callable for the list clusters method over gRPC. + + Lists all clusters owned by a project in either the + specified zone or all zones. + + Returns: + Callable[[~.ListClustersRequest], + Awaitable[~.ListClustersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_clusters" not in self._stubs: + self._stubs["list_clusters"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/ListClusters", + request_serializer=cluster_service.ListClustersRequest.serialize, + response_deserializer=cluster_service.ListClustersResponse.deserialize, + ) + return self._stubs["list_clusters"] + + @property + def get_cluster( + self, + ) -> Callable[ + [cluster_service.GetClusterRequest], Awaitable[cluster_service.Cluster] + ]: + r"""Return a callable for the get cluster method over gRPC. + + Gets the details of a specific cluster. + + Returns: + Callable[[~.GetClusterRequest], + Awaitable[~.Cluster]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster" not in self._stubs: + self._stubs["get_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/GetCluster", + request_serializer=cluster_service.GetClusterRequest.serialize, + response_deserializer=cluster_service.Cluster.deserialize, + ) + return self._stubs["get_cluster"] + + @property + def create_cluster( + self, + ) -> Callable[ + [cluster_service.CreateClusterRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the create cluster method over gRPC. + + Creates a cluster, consisting of the specified number and type + of Google Compute Engine instances. + + By default, the cluster is created in the project's `default + network `__. + + One firewall is added for the cluster. After cluster creation, + the Kubelet creates routes for each node to allow the containers + on that node to communicate with all other instances in the + cluster. + + Finally, an entry is added to the project's global metadata + indicating which CIDR range the cluster is using. + + Returns: + Callable[[~.CreateClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cluster" not in self._stubs: + self._stubs["create_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/CreateCluster", + request_serializer=cluster_service.CreateClusterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["create_cluster"] + + @property + def update_cluster( + self, + ) -> Callable[ + [cluster_service.UpdateClusterRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the update cluster method over gRPC. + + Updates the settings of a specific cluster. + + Returns: + Callable[[~.UpdateClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cluster" not in self._stubs: + self._stubs["update_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/UpdateCluster", + request_serializer=cluster_service.UpdateClusterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["update_cluster"] + + @property + def update_node_pool( + self, + ) -> Callable[ + [cluster_service.UpdateNodePoolRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the update node pool method over gRPC. + + Updates the version and/or image type for the + specified node pool. + + Returns: + Callable[[~.UpdateNodePoolRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_node_pool" not in self._stubs: + self._stubs["update_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/UpdateNodePool", + request_serializer=cluster_service.UpdateNodePoolRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["update_node_pool"] + + @property + def set_node_pool_autoscaling( + self, + ) -> Callable[ + [cluster_service.SetNodePoolAutoscalingRequest], + Awaitable[cluster_service.Operation], + ]: + r"""Return a callable for the set node pool autoscaling method over gRPC. + + Sets the autoscaling settings for the specified node + pool. + + Returns: + Callable[[~.SetNodePoolAutoscalingRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_node_pool_autoscaling" not in self._stubs: + self._stubs["set_node_pool_autoscaling"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetNodePoolAutoscaling", + request_serializer=cluster_service.SetNodePoolAutoscalingRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_node_pool_autoscaling"] + + @property + def set_logging_service( + self, + ) -> Callable[ + [cluster_service.SetLoggingServiceRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set logging service method over gRPC. + + Sets the logging service for a specific cluster. + + Returns: + Callable[[~.SetLoggingServiceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_logging_service" not in self._stubs: + self._stubs["set_logging_service"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetLoggingService", + request_serializer=cluster_service.SetLoggingServiceRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_logging_service"] + + @property + def set_monitoring_service( + self, + ) -> Callable[ + [cluster_service.SetMonitoringServiceRequest], + Awaitable[cluster_service.Operation], + ]: + r"""Return a callable for the set monitoring service method over gRPC. + + Sets the monitoring service for a specific cluster. + + Returns: + Callable[[~.SetMonitoringServiceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_monitoring_service" not in self._stubs: + self._stubs["set_monitoring_service"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetMonitoringService", + request_serializer=cluster_service.SetMonitoringServiceRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_monitoring_service"] + + @property + def set_addons_config( + self, + ) -> Callable[ + [cluster_service.SetAddonsConfigRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set addons config method over gRPC. + + Sets the addons for a specific cluster. + + Returns: + Callable[[~.SetAddonsConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_addons_config" not in self._stubs: + self._stubs["set_addons_config"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetAddonsConfig", + request_serializer=cluster_service.SetAddonsConfigRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_addons_config"] + + @property + def set_locations( + self, + ) -> Callable[ + [cluster_service.SetLocationsRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set locations method over gRPC. + + Sets the locations for a specific cluster. Deprecated. Use + `projects.locations.clusters.update `__ + instead. + + Returns: + Callable[[~.SetLocationsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_locations" not in self._stubs: + self._stubs["set_locations"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetLocations", + request_serializer=cluster_service.SetLocationsRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_locations"] + + @property + def update_master( + self, + ) -> Callable[ + [cluster_service.UpdateMasterRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the update master method over gRPC. + + Updates the master for a specific cluster. + + Returns: + Callable[[~.UpdateMasterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_master" not in self._stubs: + self._stubs["update_master"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/UpdateMaster", + request_serializer=cluster_service.UpdateMasterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["update_master"] + + @property + def set_master_auth( + self, + ) -> Callable[ + [cluster_service.SetMasterAuthRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set master auth method over gRPC. + + Sets master auth materials. Currently supports + changing the admin password or a specific cluster, + either via password generation or explicitly setting the + password. + + Returns: + Callable[[~.SetMasterAuthRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_master_auth" not in self._stubs: + self._stubs["set_master_auth"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetMasterAuth", + request_serializer=cluster_service.SetMasterAuthRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_master_auth"] + + @property + def delete_cluster( + self, + ) -> Callable[ + [cluster_service.DeleteClusterRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the delete cluster method over gRPC. + + Deletes the cluster, including the Kubernetes + endpoint and all worker nodes. + + Firewalls and routes that were configured during cluster + creation are also deleted. + + Other Google Compute Engine resources that might be in + use by the cluster, such as load balancer resources, are + not deleted if they weren't present when the cluster was + initially created. + + Returns: + Callable[[~.DeleteClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cluster" not in self._stubs: + self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/DeleteCluster", + request_serializer=cluster_service.DeleteClusterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["delete_cluster"] + + @property + def list_operations( + self, + ) -> Callable[ + [cluster_service.ListOperationsRequest], + Awaitable[cluster_service.ListOperationsResponse], + ]: + r"""Return a callable for the list operations method over gRPC. + + Lists all operations in a project in a specific zone + or all zones. + + Returns: + Callable[[~.ListOperationsRequest], + Awaitable[~.ListOperationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/ListOperations", + request_serializer=cluster_service.ListOperationsRequest.serialize, + response_deserializer=cluster_service.ListOperationsResponse.deserialize, + ) + return self._stubs["list_operations"] + + @property + def get_operation( + self, + ) -> Callable[ + [cluster_service.GetOperationRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the get operation method over gRPC. + + Gets the specified operation. + + Returns: + Callable[[~.GetOperationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/GetOperation", + request_serializer=cluster_service.GetOperationRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["get_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[cluster_service.CancelOperationRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the cancel operation method over gRPC. + + Cancels the specified operation. + + Returns: + Callable[[~.CancelOperationRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/CancelOperation", + request_serializer=cluster_service.CancelOperationRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["cancel_operation"] + + @property + def get_server_config( + self, + ) -> Callable[ + [cluster_service.GetServerConfigRequest], + Awaitable[cluster_service.ServerConfig], + ]: + r"""Return a callable for the get server config method over gRPC. + + Returns configuration info about the Google + Kubernetes Engine service. + + Returns: + Callable[[~.GetServerConfigRequest], + Awaitable[~.ServerConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_server_config" not in self._stubs: + self._stubs["get_server_config"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/GetServerConfig", + request_serializer=cluster_service.GetServerConfigRequest.serialize, + response_deserializer=cluster_service.ServerConfig.deserialize, + ) + return self._stubs["get_server_config"] + + @property + def get_json_web_keys( + self, + ) -> Callable[ + [cluster_service.GetJSONWebKeysRequest], + Awaitable[cluster_service.GetJSONWebKeysResponse], + ]: + r"""Return a callable for the get json web keys method over gRPC. + + Gets the public component of the cluster signing keys + in JSON Web Key format. + This API is not yet intended for general use, and is not + available for all clusters. + + Returns: + Callable[[~.GetJSONWebKeysRequest], + Awaitable[~.GetJSONWebKeysResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_json_web_keys" not in self._stubs: + self._stubs["get_json_web_keys"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/GetJSONWebKeys", + request_serializer=cluster_service.GetJSONWebKeysRequest.serialize, + response_deserializer=cluster_service.GetJSONWebKeysResponse.deserialize, + ) + return self._stubs["get_json_web_keys"] + + @property + def list_node_pools( + self, + ) -> Callable[ + [cluster_service.ListNodePoolsRequest], + Awaitable[cluster_service.ListNodePoolsResponse], + ]: + r"""Return a callable for the list node pools method over gRPC. + + Lists the node pools for a cluster. + + Returns: + Callable[[~.ListNodePoolsRequest], + Awaitable[~.ListNodePoolsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_node_pools" not in self._stubs: + self._stubs["list_node_pools"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/ListNodePools", + request_serializer=cluster_service.ListNodePoolsRequest.serialize, + response_deserializer=cluster_service.ListNodePoolsResponse.deserialize, + ) + return self._stubs["list_node_pools"] + + @property + def get_node_pool( + self, + ) -> Callable[ + [cluster_service.GetNodePoolRequest], Awaitable[cluster_service.NodePool] + ]: + r"""Return a callable for the get node pool method over gRPC. + + Retrieves the requested node pool. + + Returns: + Callable[[~.GetNodePoolRequest], + Awaitable[~.NodePool]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_node_pool" not in self._stubs: + self._stubs["get_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/GetNodePool", + request_serializer=cluster_service.GetNodePoolRequest.serialize, + response_deserializer=cluster_service.NodePool.deserialize, + ) + return self._stubs["get_node_pool"] + + @property + def create_node_pool( + self, + ) -> Callable[ + [cluster_service.CreateNodePoolRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the create node pool method over gRPC. + + Creates a node pool for a cluster. + + Returns: + Callable[[~.CreateNodePoolRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_node_pool" not in self._stubs: + self._stubs["create_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/CreateNodePool", + request_serializer=cluster_service.CreateNodePoolRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["create_node_pool"] + + @property + def delete_node_pool( + self, + ) -> Callable[ + [cluster_service.DeleteNodePoolRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the delete node pool method over gRPC. + + Deletes a node pool from a cluster. + + Returns: + Callable[[~.DeleteNodePoolRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_node_pool" not in self._stubs: + self._stubs["delete_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/DeleteNodePool", + request_serializer=cluster_service.DeleteNodePoolRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["delete_node_pool"] + + @property + def complete_node_pool_upgrade( + self, + ) -> Callable[ + [cluster_service.CompleteNodePoolUpgradeRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the complete node pool upgrade method over gRPC. + + CompleteNodePoolUpgrade will signal an on-going node + pool upgrade to complete. + + Returns: + Callable[[~.CompleteNodePoolUpgradeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "complete_node_pool_upgrade" not in self._stubs: + self._stubs["complete_node_pool_upgrade"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/CompleteNodePoolUpgrade", + request_serializer=cluster_service.CompleteNodePoolUpgradeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["complete_node_pool_upgrade"] + + @property + def rollback_node_pool_upgrade( + self, + ) -> Callable[ + [cluster_service.RollbackNodePoolUpgradeRequest], + Awaitable[cluster_service.Operation], + ]: + r"""Return a callable for the rollback node pool upgrade method over gRPC. + + Rolls back a previously Aborted or Failed NodePool + upgrade. This makes no changes if the last upgrade + successfully completed. + + Returns: + Callable[[~.RollbackNodePoolUpgradeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_node_pool_upgrade" not in self._stubs: + self._stubs["rollback_node_pool_upgrade"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/RollbackNodePoolUpgrade", + request_serializer=cluster_service.RollbackNodePoolUpgradeRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["rollback_node_pool_upgrade"] + + @property + def set_node_pool_management( + self, + ) -> Callable[ + [cluster_service.SetNodePoolManagementRequest], + Awaitable[cluster_service.Operation], + ]: + r"""Return a callable for the set node pool management method over gRPC. + + Sets the NodeManagement options for a node pool. + + Returns: + Callable[[~.SetNodePoolManagementRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_node_pool_management" not in self._stubs: + self._stubs["set_node_pool_management"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetNodePoolManagement", + request_serializer=cluster_service.SetNodePoolManagementRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_node_pool_management"] + + @property + def set_labels( + self, + ) -> Callable[ + [cluster_service.SetLabelsRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set labels method over gRPC. + + Sets labels on a cluster. + + Returns: + Callable[[~.SetLabelsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_labels" not in self._stubs: + self._stubs["set_labels"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetLabels", + request_serializer=cluster_service.SetLabelsRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_labels"] + + @property + def set_legacy_abac( + self, + ) -> Callable[ + [cluster_service.SetLegacyAbacRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set legacy abac method over gRPC. + + Enables or disables the ABAC authorization mechanism + on a cluster. + + Returns: + Callable[[~.SetLegacyAbacRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_legacy_abac" not in self._stubs: + self._stubs["set_legacy_abac"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetLegacyAbac", + request_serializer=cluster_service.SetLegacyAbacRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_legacy_abac"] + + @property + def start_ip_rotation( + self, + ) -> Callable[ + [cluster_service.StartIPRotationRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the start ip rotation method over gRPC. + + Starts master IP rotation. + + Returns: + Callable[[~.StartIPRotationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_ip_rotation" not in self._stubs: + self._stubs["start_ip_rotation"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/StartIPRotation", + request_serializer=cluster_service.StartIPRotationRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["start_ip_rotation"] + + @property + def complete_ip_rotation( + self, + ) -> Callable[ + [cluster_service.CompleteIPRotationRequest], + Awaitable[cluster_service.Operation], + ]: + r"""Return a callable for the complete ip rotation method over gRPC. + + Completes master IP rotation. + + Returns: + Callable[[~.CompleteIPRotationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "complete_ip_rotation" not in self._stubs: + self._stubs["complete_ip_rotation"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/CompleteIPRotation", + request_serializer=cluster_service.CompleteIPRotationRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["complete_ip_rotation"] + + @property + def set_node_pool_size( + self, + ) -> Callable[ + [cluster_service.SetNodePoolSizeRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set node pool size method over gRPC. + + Sets the size for a specific node pool. The new size will be + used for all replicas, including future replicas created by + modifying + [NodePool.locations][google.container.v1.NodePool.locations]. + + Returns: + Callable[[~.SetNodePoolSizeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_node_pool_size" not in self._stubs: + self._stubs["set_node_pool_size"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetNodePoolSize", + request_serializer=cluster_service.SetNodePoolSizeRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_node_pool_size"] + + @property + def set_network_policy( + self, + ) -> Callable[ + [cluster_service.SetNetworkPolicyRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set network policy method over gRPC. + + Enables or disables Network Policy for a cluster. + + Returns: + Callable[[~.SetNetworkPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_network_policy" not in self._stubs: + self._stubs["set_network_policy"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetNetworkPolicy", + request_serializer=cluster_service.SetNetworkPolicyRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_network_policy"] + + @property + def set_maintenance_policy( + self, + ) -> Callable[ + [cluster_service.SetMaintenancePolicyRequest], + Awaitable[cluster_service.Operation], + ]: + r"""Return a callable for the set maintenance policy method over gRPC. + + Sets the maintenance policy for a cluster. + + Returns: + Callable[[~.SetMaintenancePolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_maintenance_policy" not in self._stubs: + self._stubs["set_maintenance_policy"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/SetMaintenancePolicy", + request_serializer=cluster_service.SetMaintenancePolicyRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_maintenance_policy"] + + @property + def list_usable_subnetworks( + self, + ) -> Callable[ + [cluster_service.ListUsableSubnetworksRequest], + Awaitable[cluster_service.ListUsableSubnetworksResponse], + ]: + r"""Return a callable for the list usable subnetworks method over gRPC. + + Lists subnetworks that are usable for creating + clusters in a project. + + Returns: + Callable[[~.ListUsableSubnetworksRequest], + Awaitable[~.ListUsableSubnetworksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_usable_subnetworks" not in self._stubs: + self._stubs["list_usable_subnetworks"] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/ListUsableSubnetworks", + request_serializer=cluster_service.ListUsableSubnetworksRequest.serialize, + response_deserializer=cluster_service.ListUsableSubnetworksResponse.deserialize, + ) + return self._stubs["list_usable_subnetworks"] + + @property + def check_autopilot_compatibility( + self, + ) -> Callable[ + [cluster_service.CheckAutopilotCompatibilityRequest], + Awaitable[cluster_service.CheckAutopilotCompatibilityResponse], + ]: + r"""Return a callable for the check autopilot compatibility method over gRPC. + + Checks the cluster compatibility with Autopilot mode, + and returns a list of compatibility issues. + + Returns: + Callable[[~.CheckAutopilotCompatibilityRequest], + Awaitable[~.CheckAutopilotCompatibilityResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_autopilot_compatibility" not in self._stubs: + self._stubs[ + "check_autopilot_compatibility" + ] = self.grpc_channel.unary_unary( + "/google.container.v1.ClusterManager/CheckAutopilotCompatibility", + request_serializer=cluster_service.CheckAutopilotCompatibilityRequest.serialize, + response_deserializer=cluster_service.CheckAutopilotCompatibilityResponse.deserialize, + ) + return self._stubs["check_autopilot_compatibility"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("ClusterManagerGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py b/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py new file mode 100644 index 000000000000..88c4b4c22eb5 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/types/__init__.py @@ -0,0 +1,344 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cluster_service import ( + AcceleratorConfig, + AdditionalNodeNetworkConfig, + AdditionalPodNetworkConfig, + AdditionalPodRangesConfig, + AddonsConfig, + AdvancedDatapathObservabilityConfig, + AdvancedMachineFeatures, + AuthenticatorGroupsConfig, + Autopilot, + AutopilotCompatibilityIssue, + AutoprovisioningNodePoolDefaults, + AutoUpgradeOptions, + BestEffortProvisioning, + BinaryAuthorization, + BlueGreenSettings, + CancelOperationRequest, + CheckAutopilotCompatibilityRequest, + CheckAutopilotCompatibilityResponse, + ClientCertificateConfig, + CloudRunConfig, + Cluster, + ClusterAutoscaling, + ClusterUpdate, + CompleteIPRotationRequest, + CompleteNodePoolUpgradeRequest, + ConfidentialNodes, + ConfigConnectorConfig, + CostManagementConfig, + CreateClusterRequest, + CreateNodePoolRequest, + DailyMaintenanceWindow, + DatabaseEncryption, + DatapathProvider, + DefaultSnatStatus, + DeleteClusterRequest, + DeleteNodePoolRequest, + DnsCacheConfig, + DNSConfig, + EphemeralStorageLocalSsdConfig, + FastSocket, + Fleet, + GatewayAPIConfig, + GcePersistentDiskCsiDriverConfig, + GcfsConfig, + GcpFilestoreCsiDriverConfig, + GcsFuseCsiDriverConfig, + GetClusterRequest, + GetJSONWebKeysRequest, + GetJSONWebKeysResponse, + GetNodePoolRequest, + GetOpenIDConfigRequest, + GetOpenIDConfigResponse, + GetOperationRequest, + GetServerConfigRequest, + GkeBackupAgentConfig, + GPUDriverInstallationConfig, + GPUSharingConfig, + HorizontalPodAutoscaling, + HttpLoadBalancing, + IdentityServiceConfig, + ILBSubsettingConfig, + IntraNodeVisibilityConfig, + IPAllocationPolicy, + IPv6AccessType, + Jwk, + K8sBetaAPIConfig, + KubernetesDashboard, + LegacyAbac, + LinuxNodeConfig, + ListClustersRequest, + ListClustersResponse, + ListNodePoolsRequest, + ListNodePoolsResponse, + ListOperationsRequest, + ListOperationsResponse, + ListUsableSubnetworksRequest, + ListUsableSubnetworksResponse, + LocalNvmeSsdBlockConfig, + LoggingComponentConfig, + LoggingConfig, + LoggingVariantConfig, + MaintenanceExclusionOptions, + MaintenancePolicy, + MaintenanceWindow, + ManagedPrometheusConfig, + MasterAuth, + MasterAuthorizedNetworksConfig, + MaxPodsConstraint, + MeshCertificates, + MonitoringComponentConfig, + MonitoringConfig, + NetworkConfig, + NetworkPolicy, + NetworkPolicyConfig, + NetworkTags, + NodeConfig, + NodeConfigDefaults, + NodeKubeletConfig, + NodeLabels, + NodeManagement, + NodeNetworkConfig, + NodePool, + NodePoolAutoConfig, + NodePoolAutoscaling, + NodePoolDefaults, + NodePoolLoggingConfig, + NodePoolUpdateStrategy, + NodeTaint, + NodeTaints, + NotificationConfig, + Operation, + OperationProgress, + PodCIDROverprovisionConfig, + PrivateClusterConfig, + PrivateClusterMasterGlobalAccessConfig, + PrivateIPv6GoogleAccess, + RangeInfo, + RecurringTimeWindow, + ReleaseChannel, + ReservationAffinity, + ResourceLabels, + ResourceLimit, + ResourceUsageExportConfig, + RollbackNodePoolUpgradeRequest, + SandboxConfig, + SecurityBulletinEvent, + SecurityPostureConfig, + ServerConfig, + ServiceExternalIPsConfig, + SetAddonsConfigRequest, + SetLabelsRequest, + SetLegacyAbacRequest, + SetLocationsRequest, + SetLoggingServiceRequest, + SetMaintenancePolicyRequest, + SetMasterAuthRequest, + SetMonitoringServiceRequest, + SetNetworkPolicyRequest, + SetNodePoolAutoscalingRequest, + SetNodePoolManagementRequest, + SetNodePoolSizeRequest, + ShieldedInstanceConfig, + ShieldedNodes, + SoleTenantConfig, + StackType, + StartIPRotationRequest, + StatusCondition, + TimeWindow, + UpdateClusterRequest, + UpdateMasterRequest, + UpdateNodePoolRequest, + UpgradeAvailableEvent, + UpgradeEvent, + UpgradeResourceType, + UsableSubnetwork, + UsableSubnetworkSecondaryRange, + VerticalPodAutoscaling, + VirtualNIC, + WindowsNodeConfig, + WorkloadIdentityConfig, + WorkloadMetadataConfig, + WorkloadPolicyConfig, +) + +__all__ = ( + "AcceleratorConfig", + "AdditionalNodeNetworkConfig", + "AdditionalPodNetworkConfig", + "AdditionalPodRangesConfig", + "AddonsConfig", + "AdvancedDatapathObservabilityConfig", + "AdvancedMachineFeatures", + "AuthenticatorGroupsConfig", + "Autopilot", + "AutopilotCompatibilityIssue", + "AutoprovisioningNodePoolDefaults", + "AutoUpgradeOptions", + "BestEffortProvisioning", + "BinaryAuthorization", + "BlueGreenSettings", + "CancelOperationRequest", + "CheckAutopilotCompatibilityRequest", + "CheckAutopilotCompatibilityResponse", + "ClientCertificateConfig", + "CloudRunConfig", + "Cluster", + "ClusterAutoscaling", + "ClusterUpdate", + "CompleteIPRotationRequest", + "CompleteNodePoolUpgradeRequest", + "ConfidentialNodes", + "ConfigConnectorConfig", + "CostManagementConfig", + "CreateClusterRequest", + "CreateNodePoolRequest", + "DailyMaintenanceWindow", + "DatabaseEncryption", + "DefaultSnatStatus", + "DeleteClusterRequest", + "DeleteNodePoolRequest", + "DnsCacheConfig", + "DNSConfig", + "EphemeralStorageLocalSsdConfig", + "FastSocket", + "Fleet", + "GatewayAPIConfig", + "GcePersistentDiskCsiDriverConfig", + "GcfsConfig", + "GcpFilestoreCsiDriverConfig", + "GcsFuseCsiDriverConfig", + "GetClusterRequest", + "GetJSONWebKeysRequest", + "GetJSONWebKeysResponse", + "GetNodePoolRequest", + "GetOpenIDConfigRequest", + "GetOpenIDConfigResponse", + "GetOperationRequest", + "GetServerConfigRequest", + "GkeBackupAgentConfig", + "GPUDriverInstallationConfig", + "GPUSharingConfig", + "HorizontalPodAutoscaling", + "HttpLoadBalancing", + "IdentityServiceConfig", + "ILBSubsettingConfig", + "IntraNodeVisibilityConfig", + "IPAllocationPolicy", + "Jwk", + "K8sBetaAPIConfig", + "KubernetesDashboard", + "LegacyAbac", + "LinuxNodeConfig", + "ListClustersRequest", + "ListClustersResponse", + "ListNodePoolsRequest", + "ListNodePoolsResponse", + "ListOperationsRequest", + "ListOperationsResponse", + "ListUsableSubnetworksRequest", + "ListUsableSubnetworksResponse", + "LocalNvmeSsdBlockConfig", + "LoggingComponentConfig", + "LoggingConfig", + "LoggingVariantConfig", + "MaintenanceExclusionOptions", + "MaintenancePolicy", + "MaintenanceWindow", + "ManagedPrometheusConfig", + "MasterAuth", + "MasterAuthorizedNetworksConfig", + "MaxPodsConstraint", + "MeshCertificates", + "MonitoringComponentConfig", + "MonitoringConfig", + "NetworkConfig", + "NetworkPolicy", + "NetworkPolicyConfig", + "NetworkTags", + "NodeConfig", + "NodeConfigDefaults", + "NodeKubeletConfig", + "NodeLabels", + "NodeManagement", + "NodeNetworkConfig", + "NodePool", + "NodePoolAutoConfig", + "NodePoolAutoscaling", + "NodePoolDefaults", + "NodePoolLoggingConfig", + "NodeTaint", + "NodeTaints", + "NotificationConfig", + "Operation", + "OperationProgress", + "PodCIDROverprovisionConfig", + "PrivateClusterConfig", + "PrivateClusterMasterGlobalAccessConfig", + "RangeInfo", + "RecurringTimeWindow", + "ReleaseChannel", + "ReservationAffinity", + "ResourceLabels", + "ResourceLimit", + "ResourceUsageExportConfig", + "RollbackNodePoolUpgradeRequest", + "SandboxConfig", + "SecurityBulletinEvent", + "SecurityPostureConfig", + "ServerConfig", + "ServiceExternalIPsConfig", + "SetAddonsConfigRequest", + "SetLabelsRequest", + "SetLegacyAbacRequest", + "SetLocationsRequest", + "SetLoggingServiceRequest", + "SetMaintenancePolicyRequest", + "SetMasterAuthRequest", + "SetMonitoringServiceRequest", + "SetNetworkPolicyRequest", + "SetNodePoolAutoscalingRequest", + "SetNodePoolManagementRequest", + "SetNodePoolSizeRequest", + "ShieldedInstanceConfig", + "ShieldedNodes", + "SoleTenantConfig", + "StartIPRotationRequest", + "StatusCondition", + "TimeWindow", + "UpdateClusterRequest", + "UpdateMasterRequest", + "UpdateNodePoolRequest", + "UpgradeAvailableEvent", + "UpgradeEvent", + "UsableSubnetwork", + "UsableSubnetworkSecondaryRange", + "VerticalPodAutoscaling", + "VirtualNIC", + "WindowsNodeConfig", + "WorkloadIdentityConfig", + "WorkloadMetadataConfig", + "WorkloadPolicyConfig", + "DatapathProvider", + "IPv6AccessType", + "NodePoolUpdateStrategy", + "PrivateIPv6GoogleAccess", + "StackType", + "UpgradeResourceType", +) diff --git a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py new file mode 100644 index 000000000000..2e9bb4448a46 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py @@ -0,0 +1,9080 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import code_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.container.v1", + manifest={ + "PrivateIPv6GoogleAccess", + "UpgradeResourceType", + "DatapathProvider", + "NodePoolUpdateStrategy", + "StackType", + "IPv6AccessType", + "LinuxNodeConfig", + "WindowsNodeConfig", + "NodeKubeletConfig", + "NodeConfig", + "AdvancedMachineFeatures", + "NodeNetworkConfig", + "AdditionalNodeNetworkConfig", + "AdditionalPodNetworkConfig", + "ShieldedInstanceConfig", + "SandboxConfig", + "GcfsConfig", + "ReservationAffinity", + "SoleTenantConfig", + "NodeTaint", + "NodeTaints", + "NodeLabels", + "ResourceLabels", + "NetworkTags", + "MasterAuth", + "ClientCertificateConfig", + "AddonsConfig", + "HttpLoadBalancing", + "HorizontalPodAutoscaling", + "KubernetesDashboard", + "NetworkPolicyConfig", + "DnsCacheConfig", + "PrivateClusterMasterGlobalAccessConfig", + "PrivateClusterConfig", + "AuthenticatorGroupsConfig", + "CloudRunConfig", + "ConfigConnectorConfig", + "GcePersistentDiskCsiDriverConfig", + "GcpFilestoreCsiDriverConfig", + "GcsFuseCsiDriverConfig", + "GkeBackupAgentConfig", + "MasterAuthorizedNetworksConfig", + "LegacyAbac", + "NetworkPolicy", + "BinaryAuthorization", + "PodCIDROverprovisionConfig", + "IPAllocationPolicy", + "Cluster", + "K8sBetaAPIConfig", + "SecurityPostureConfig", + "NodePoolAutoConfig", + "NodePoolDefaults", + "NodeConfigDefaults", + "ClusterUpdate", + "AdditionalPodRangesConfig", + "RangeInfo", + "Operation", + "OperationProgress", + "CreateClusterRequest", + "GetClusterRequest", + "UpdateClusterRequest", + "UpdateNodePoolRequest", + "SetNodePoolAutoscalingRequest", + "SetLoggingServiceRequest", + "SetMonitoringServiceRequest", + "SetAddonsConfigRequest", + "SetLocationsRequest", + "UpdateMasterRequest", + "SetMasterAuthRequest", + "DeleteClusterRequest", + "ListClustersRequest", + "ListClustersResponse", + "GetOperationRequest", + "ListOperationsRequest", + "CancelOperationRequest", + "ListOperationsResponse", + "GetServerConfigRequest", + "ServerConfig", + "CreateNodePoolRequest", + "DeleteNodePoolRequest", + "ListNodePoolsRequest", + "GetNodePoolRequest", + "BlueGreenSettings", + "NodePool", + "NodeManagement", + "BestEffortProvisioning", + "AutoUpgradeOptions", + "MaintenancePolicy", + "MaintenanceWindow", + "TimeWindow", + "MaintenanceExclusionOptions", + "RecurringTimeWindow", + "DailyMaintenanceWindow", + "SetNodePoolManagementRequest", + "SetNodePoolSizeRequest", + "CompleteNodePoolUpgradeRequest", + "RollbackNodePoolUpgradeRequest", + "ListNodePoolsResponse", + "ClusterAutoscaling", + "AutoprovisioningNodePoolDefaults", + "ResourceLimit", + "NodePoolAutoscaling", + "SetLabelsRequest", + "SetLegacyAbacRequest", + "StartIPRotationRequest", + "CompleteIPRotationRequest", + "AcceleratorConfig", + "GPUSharingConfig", + "GPUDriverInstallationConfig", + "WorkloadMetadataConfig", + "SetNetworkPolicyRequest", + "SetMaintenancePolicyRequest", + "StatusCondition", + "NetworkConfig", + "GatewayAPIConfig", + "ServiceExternalIPsConfig", + "GetOpenIDConfigRequest", + "GetOpenIDConfigResponse", + "GetJSONWebKeysRequest", + "Jwk", + "GetJSONWebKeysResponse", + "CheckAutopilotCompatibilityRequest", + "AutopilotCompatibilityIssue", + "CheckAutopilotCompatibilityResponse", + "ReleaseChannel", + "CostManagementConfig", + "IntraNodeVisibilityConfig", + "ILBSubsettingConfig", + "DNSConfig", + "MaxPodsConstraint", + "WorkloadIdentityConfig", + "IdentityServiceConfig", + "MeshCertificates", + "DatabaseEncryption", + "ListUsableSubnetworksRequest", + "ListUsableSubnetworksResponse", + "UsableSubnetworkSecondaryRange", + "UsableSubnetwork", + "ResourceUsageExportConfig", + "VerticalPodAutoscaling", + "DefaultSnatStatus", + "ShieldedNodes", + "VirtualNIC", + "FastSocket", + "NotificationConfig", + "ConfidentialNodes", + "UpgradeEvent", + "UpgradeAvailableEvent", + "SecurityBulletinEvent", + "Autopilot", + "WorkloadPolicyConfig", + "LoggingConfig", + "LoggingComponentConfig", + "MonitoringConfig", + "AdvancedDatapathObservabilityConfig", + "NodePoolLoggingConfig", + "LoggingVariantConfig", + "MonitoringComponentConfig", + "ManagedPrometheusConfig", + "Fleet", + "LocalNvmeSsdBlockConfig", + "EphemeralStorageLocalSsdConfig", + }, +) + + +class PrivateIPv6GoogleAccess(proto.Enum): + r"""PrivateIPv6GoogleAccess controls whether and how the pods can + communicate with Google Services through gRPC over IPv6. + + Values: + PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED (0): + Default value. Same as DISABLED + PRIVATE_IPV6_GOOGLE_ACCESS_DISABLED (1): + No private access to or from Google Services + PRIVATE_IPV6_GOOGLE_ACCESS_TO_GOOGLE (2): + Enables private IPv6 access to Google + Services from GKE + PRIVATE_IPV6_GOOGLE_ACCESS_BIDIRECTIONAL (3): + Enables private IPv6 access to and from + Google Services + """ + PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED = 0 + PRIVATE_IPV6_GOOGLE_ACCESS_DISABLED = 1 + PRIVATE_IPV6_GOOGLE_ACCESS_TO_GOOGLE = 2 + PRIVATE_IPV6_GOOGLE_ACCESS_BIDIRECTIONAL = 3 + + +class UpgradeResourceType(proto.Enum): + r"""UpgradeResourceType is the resource type that is upgrading. + It is used in upgrade notifications. + + Values: + UPGRADE_RESOURCE_TYPE_UNSPECIFIED (0): + Default value. This shouldn't be used. + MASTER (1): + Master / control plane + NODE_POOL (2): + Node pool + """ + UPGRADE_RESOURCE_TYPE_UNSPECIFIED = 0 + MASTER = 1 + NODE_POOL = 2 + + +class DatapathProvider(proto.Enum): + r"""The datapath provider selects the implementation of the + Kubernetes networking model for service resolution and network + policy enforcement. + + Values: + DATAPATH_PROVIDER_UNSPECIFIED (0): + Default value. + LEGACY_DATAPATH (1): + Use the IPTables implementation based on + kube-proxy. + ADVANCED_DATAPATH (2): + Use the eBPF based GKE Dataplane V2 with additional + features. See the `GKE Dataplane V2 + documentation `__ + for more. + """ + DATAPATH_PROVIDER_UNSPECIFIED = 0 + LEGACY_DATAPATH = 1 + ADVANCED_DATAPATH = 2 + + +class NodePoolUpdateStrategy(proto.Enum): + r"""Strategy used for node pool update. + + Values: + NODE_POOL_UPDATE_STRATEGY_UNSPECIFIED (0): + Default value if unset. GKE internally + defaults the update strategy to SURGE for + unspecified strategies. + BLUE_GREEN (2): + blue-green upgrade. + SURGE (3): + SURGE is the traditional way of upgrade a node pool. + max_surge and max_unavailable determines the level of + upgrade parallelism. + """ + NODE_POOL_UPDATE_STRATEGY_UNSPECIFIED = 0 + BLUE_GREEN = 2 + SURGE = 3 + + +class StackType(proto.Enum): + r"""Possible values for IP stack type + + Values: + STACK_TYPE_UNSPECIFIED (0): + Default value, will be defaulted as IPV4 only + IPV4 (1): + Cluster is IPV4 only + IPV4_IPV6 (2): + Cluster can use both IPv4 and IPv6 + """ + STACK_TYPE_UNSPECIFIED = 0 + IPV4 = 1 + IPV4_IPV6 = 2 + + +class IPv6AccessType(proto.Enum): + r"""Possible values for IPv6 access type + + Values: + IPV6_ACCESS_TYPE_UNSPECIFIED (0): + Default value, will be defaulted as type + external. + INTERNAL (1): + Access type internal (all v6 addresses are + internal IPs) + EXTERNAL (2): + Access type external (all v6 addresses are + external IPs) + """ + IPV6_ACCESS_TYPE_UNSPECIFIED = 0 + INTERNAL = 1 + EXTERNAL = 2 + + +class LinuxNodeConfig(proto.Message): + r"""Parameters that can be configured on Linux nodes. + + Attributes: + sysctls (MutableMapping[str, str]): + The Linux kernel parameters to be applied to the nodes and + all pods running on the nodes. + + The following parameters are supported. + + net.core.busy_poll net.core.busy_read + net.core.netdev_max_backlog net.core.rmem_max + net.core.wmem_default net.core.wmem_max net.core.optmem_max + net.core.somaxconn net.ipv4.tcp_rmem net.ipv4.tcp_wmem + net.ipv4.tcp_tw_reuse + cgroup_mode (google.cloud.container_v1.types.LinuxNodeConfig.CgroupMode): + cgroup_mode specifies the cgroup mode to be used on the + node. + """ + + class CgroupMode(proto.Enum): + r"""Possible cgroup modes that can be used. + + Values: + CGROUP_MODE_UNSPECIFIED (0): + CGROUP_MODE_UNSPECIFIED is when unspecified cgroup + configuration is used. The default for the GKE node OS image + will be used. + CGROUP_MODE_V1 (1): + CGROUP_MODE_V1 specifies to use cgroupv1 for the cgroup + configuration on the node image. + CGROUP_MODE_V2 (2): + CGROUP_MODE_V2 specifies to use cgroupv2 for the cgroup + configuration on the node image. + """ + CGROUP_MODE_UNSPECIFIED = 0 + CGROUP_MODE_V1 = 1 + CGROUP_MODE_V2 = 2 + + sysctls: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + cgroup_mode: CgroupMode = proto.Field( + proto.ENUM, + number=2, + enum=CgroupMode, + ) + + +class WindowsNodeConfig(proto.Message): + r"""Parameters that can be configured on Windows nodes. + Windows Node Config that define the parameters that will be used + to configure the Windows node pool settings + + Attributes: + os_version (google.cloud.container_v1.types.WindowsNodeConfig.OSVersion): + OSVersion specifies the Windows node config + to be used on the node + """ + + class OSVersion(proto.Enum): + r"""Possible OS version that can be used. + + Values: + OS_VERSION_UNSPECIFIED (0): + When OSVersion is not specified + OS_VERSION_LTSC2019 (1): + LTSC2019 specifies to use LTSC2019 as the + Windows Servercore Base Image + OS_VERSION_LTSC2022 (2): + LTSC2022 specifies to use LTSC2022 as the + Windows Servercore Base Image + """ + OS_VERSION_UNSPECIFIED = 0 + OS_VERSION_LTSC2019 = 1 + OS_VERSION_LTSC2022 = 2 + + os_version: OSVersion = proto.Field( + proto.ENUM, + number=1, + enum=OSVersion, + ) + + +class NodeKubeletConfig(proto.Message): + r"""Node kubelet configs. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cpu_manager_policy (str): + Control the CPU management policy on the node. See + https://kubernetes.io/docs/tasks/administer-cluster/cpu-management-policies/ + + The following values are allowed. + + - "none": the default, which represents the existing + scheduling behavior. + - "static": allows pods with certain resource + characteristics to be granted increased CPU affinity and + exclusivity on the node. The default value is 'none' if + unspecified. + cpu_cfs_quota (google.protobuf.wrappers_pb2.BoolValue): + Enable CPU CFS quota enforcement for + containers that specify CPU limits. + This option is enabled by default which makes + kubelet use CFS quota + (https://www.kernel.org/doc/Documentation/scheduler/sched-bwc.txt) + to enforce container CPU limits. Otherwise, CPU + limits will not be enforced at all. + + Disable this option to mitigate CPU throttling + problems while still having your pods to be in + Guaranteed QoS class by specifying the CPU + limits. + + The default value is 'true' if unspecified. + cpu_cfs_quota_period (str): + Set the CPU CFS quota period value 'cpu.cfs_period_us'. + + The string must be a sequence of decimal numbers, each with + optional fraction and a unit suffix, such as "300ms". Valid + time units are "ns", "us" (or "µs"), "ms", "s", "m", "h". + The value must be a positive duration. + pod_pids_limit (int): + Set the Pod PID limits. See + https://kubernetes.io/docs/concepts/policy/pid-limiting/#pod-pid-limits + + Controls the maximum number of processes allowed + to run in a pod. The value must be greater than + or equal to 1024 and less than 4194304. + insecure_kubelet_readonly_port_enabled (bool): + Enable or disable Kubelet read only port. + + This field is a member of `oneof`_ ``_insecure_kubelet_readonly_port_enabled``. + """ + + cpu_manager_policy: str = proto.Field( + proto.STRING, + number=1, + ) + cpu_cfs_quota: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=2, + message=wrappers_pb2.BoolValue, + ) + cpu_cfs_quota_period: str = proto.Field( + proto.STRING, + number=3, + ) + pod_pids_limit: int = proto.Field( + proto.INT64, + number=4, + ) + insecure_kubelet_readonly_port_enabled: bool = proto.Field( + proto.BOOL, + number=7, + optional=True, + ) + + +class NodeConfig(proto.Message): + r"""Parameters that describe the nodes in a cluster. + + GKE Autopilot clusters do not recognize parameters in + ``NodeConfig``. Use + [AutoprovisioningNodePoolDefaults][google.container.v1.AutoprovisioningNodePoolDefaults] + instead. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + machine_type (str): + The name of a Google Compute Engine `machine + type `__ + + If unspecified, the default machine type is ``e2-medium``. + disk_size_gb (int): + Size of the disk attached to each node, + specified in GB. The smallest allowed disk size + is 10GB. + + If unspecified, the default disk size is 100GB. + oauth_scopes (MutableSequence[str]): + The set of Google API scopes to be made available on all of + the node VMs under the "default" service account. + + The following scopes are recommended, but not required, and + by default are not included: + + - ``https://www.googleapis.com/auth/compute`` is required + for mounting persistent storage on your nodes. + - ``https://www.googleapis.com/auth/devstorage.read_only`` + is required for communicating with **gcr.io** (the + `Google Container + Registry `__). + + If unspecified, no scopes are added, unless Cloud Logging or + Cloud Monitoring are enabled, in which case their required + scopes will be added. + service_account (str): + The Google Cloud Platform Service Account to + be used by the node VMs. Specify the email + address of the Service Account; otherwise, if no + Service Account is specified, the "default" + service account is used. + metadata (MutableMapping[str, str]): + The metadata key/value pairs assigned to instances in the + cluster. + + Keys must conform to the regexp ``[a-zA-Z0-9-_]+`` and be + less than 128 bytes in length. These are reflected as part + of a URL in the metadata server. Additionally, to avoid + ambiguity, keys must not conflict with any other metadata + keys for the project or be one of the reserved keys: + + - "cluster-location" + - "cluster-name" + - "cluster-uid" + - "configure-sh" + - "containerd-configure-sh" + - "enable-os-login" + - "gci-ensure-gke-docker" + - "gci-metrics-enabled" + - "gci-update-strategy" + - "instance-template" + - "kube-env" + - "startup-script" + - "user-data" + - "disable-address-manager" + - "windows-startup-script-ps1" + - "common-psm1" + - "k8s-node-setup-psm1" + - "install-ssh-psm1" + - "user-profile-psm1" + + Values are free-form strings, and only have meaning as + interpreted by the image running in the instance. The only + restriction placed on them is that each value's size must be + less than or equal to 32 KB. + + The total size of all keys and values must be less than 512 + KB. + image_type (str): + The image type to use for this node. Note + that for a given image type, the latest version + of it will be used. Please see + https://cloud.google.com/kubernetes-engine/docs/concepts/node-images + for available image types. + labels (MutableMapping[str, str]): + The map of Kubernetes labels (key/value + pairs) to be applied to each node. These will + added in addition to any default label(s) that + Kubernetes may apply to the node. + In case of conflict in label keys, the applied + set may differ depending on the Kubernetes + version -- it's best to assume the behavior is + undefined and conflicts should be avoided. + For more information, including usage and the + valid values, see: + + https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/ + local_ssd_count (int): + The number of local SSD disks to be attached + to the node. + The limit for this value is dependent upon the + maximum number of disks available on a machine + per zone. See: + + https://cloud.google.com/compute/docs/disks/local-ssd + for more information. + tags (MutableSequence[str]): + The list of instance tags applied to all + nodes. Tags are used to identify valid sources + or targets for network firewalls and are + specified by the client during cluster or node + pool creation. Each tag within the list must + comply with RFC1035. + preemptible (bool): + Whether the nodes are created as preemptible + VM instances. See: + https://cloud.google.com/compute/docs/instances/preemptible + for more information about preemptible VM + instances. + accelerators (MutableSequence[google.cloud.container_v1.types.AcceleratorConfig]): + A list of hardware accelerators to be + attached to each node. See + https://cloud.google.com/compute/docs/gpus for + more information about support for GPUs. + disk_type (str): + Type of the disk attached to each node (e.g. + 'pd-standard', 'pd-ssd' or 'pd-balanced') + + If unspecified, the default disk type is + 'pd-standard' + min_cpu_platform (str): + Minimum CPU platform to be used by this instance. The + instance may be scheduled on the specified or newer CPU + platform. Applicable values are the friendly names of CPU + platforms, such as ``minCpuPlatform: "Intel Haswell"`` or + ``minCpuPlatform: "Intel Sandy Bridge"``. For more + information, read `how to specify min CPU + platform `__ + workload_metadata_config (google.cloud.container_v1.types.WorkloadMetadataConfig): + The workload metadata configuration for this + node. + taints (MutableSequence[google.cloud.container_v1.types.NodeTaint]): + List of kubernetes taints to be applied to + each node. + For more information, including usage and the + valid values, see: + + https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/ + sandbox_config (google.cloud.container_v1.types.SandboxConfig): + Sandbox configuration for this node. + node_group (str): + Setting this field will assign instances of this pool to run + on the specified node group. This is useful for running + workloads on `sole tenant + nodes `__. + reservation_affinity (google.cloud.container_v1.types.ReservationAffinity): + The optional reservation affinity. Setting this field will + apply the specified `Zonal Compute + Reservation `__ + to this node pool. + shielded_instance_config (google.cloud.container_v1.types.ShieldedInstanceConfig): + Shielded Instance options. + linux_node_config (google.cloud.container_v1.types.LinuxNodeConfig): + Parameters that can be configured on Linux + nodes. + kubelet_config (google.cloud.container_v1.types.NodeKubeletConfig): + Node kubelet configs. + boot_disk_kms_key (str): + The Customer Managed Encryption Key used to encrypt the boot + disk attached to each node in the node pool. This should be + of the form + projects/[KEY_PROJECT_ID]/locations/[LOCATION]/keyRings/[RING_NAME]/cryptoKeys/[KEY_NAME]. + For more information about protecting resources with Cloud + KMS Keys please see: + https://cloud.google.com/compute/docs/disks/customer-managed-encryption + gcfs_config (google.cloud.container_v1.types.GcfsConfig): + Google Container File System (image + streaming) configs. + advanced_machine_features (google.cloud.container_v1.types.AdvancedMachineFeatures): + Advanced features for the Compute Engine VM. + gvnic (google.cloud.container_v1.types.VirtualNIC): + Enable or disable gvnic in the node pool. + spot (bool): + Spot flag for enabling Spot VM, which is a + rebrand of the existing preemptible flag. + confidential_nodes (google.cloud.container_v1.types.ConfidentialNodes): + Confidential nodes config. + All the nodes in the node pool will be + Confidential VM once enabled. + fast_socket (google.cloud.container_v1.types.FastSocket): + Enable or disable NCCL fast socket for the + node pool. + + This field is a member of `oneof`_ ``_fast_socket``. + resource_labels (MutableMapping[str, str]): + The resource labels for the node pool to use + to annotate any related Google Compute Engine + resources. + logging_config (google.cloud.container_v1.types.NodePoolLoggingConfig): + Logging configuration. + windows_node_config (google.cloud.container_v1.types.WindowsNodeConfig): + Parameters that can be configured on Windows + nodes. + local_nvme_ssd_block_config (google.cloud.container_v1.types.LocalNvmeSsdBlockConfig): + Parameters for using raw-block Local NVMe + SSDs. + ephemeral_storage_local_ssd_config (google.cloud.container_v1.types.EphemeralStorageLocalSsdConfig): + Parameters for the node ephemeral storage + using Local SSDs. If unspecified, ephemeral + storage is backed by the boot disk. + sole_tenant_config (google.cloud.container_v1.types.SoleTenantConfig): + Parameters for node pools to be backed by + shared sole tenant node groups. + """ + + machine_type: str = proto.Field( + proto.STRING, + number=1, + ) + disk_size_gb: int = proto.Field( + proto.INT32, + number=2, + ) + oauth_scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + service_account: str = proto.Field( + proto.STRING, + number=9, + ) + metadata: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + image_type: str = proto.Field( + proto.STRING, + number=5, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + local_ssd_count: int = proto.Field( + proto.INT32, + number=7, + ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + preemptible: bool = proto.Field( + proto.BOOL, + number=10, + ) + accelerators: MutableSequence["AcceleratorConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="AcceleratorConfig", + ) + disk_type: str = proto.Field( + proto.STRING, + number=12, + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=13, + ) + workload_metadata_config: "WorkloadMetadataConfig" = proto.Field( + proto.MESSAGE, + number=14, + message="WorkloadMetadataConfig", + ) + taints: MutableSequence["NodeTaint"] = proto.RepeatedField( + proto.MESSAGE, + number=15, + message="NodeTaint", + ) + sandbox_config: "SandboxConfig" = proto.Field( + proto.MESSAGE, + number=17, + message="SandboxConfig", + ) + node_group: str = proto.Field( + proto.STRING, + number=18, + ) + reservation_affinity: "ReservationAffinity" = proto.Field( + proto.MESSAGE, + number=19, + message="ReservationAffinity", + ) + shielded_instance_config: "ShieldedInstanceConfig" = proto.Field( + proto.MESSAGE, + number=20, + message="ShieldedInstanceConfig", + ) + linux_node_config: "LinuxNodeConfig" = proto.Field( + proto.MESSAGE, + number=21, + message="LinuxNodeConfig", + ) + kubelet_config: "NodeKubeletConfig" = proto.Field( + proto.MESSAGE, + number=22, + message="NodeKubeletConfig", + ) + boot_disk_kms_key: str = proto.Field( + proto.STRING, + number=23, + ) + gcfs_config: "GcfsConfig" = proto.Field( + proto.MESSAGE, + number=25, + message="GcfsConfig", + ) + advanced_machine_features: "AdvancedMachineFeatures" = proto.Field( + proto.MESSAGE, + number=26, + message="AdvancedMachineFeatures", + ) + gvnic: "VirtualNIC" = proto.Field( + proto.MESSAGE, + number=29, + message="VirtualNIC", + ) + spot: bool = proto.Field( + proto.BOOL, + number=32, + ) + confidential_nodes: "ConfidentialNodes" = proto.Field( + proto.MESSAGE, + number=35, + message="ConfidentialNodes", + ) + fast_socket: "FastSocket" = proto.Field( + proto.MESSAGE, + number=36, + optional=True, + message="FastSocket", + ) + resource_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=37, + ) + logging_config: "NodePoolLoggingConfig" = proto.Field( + proto.MESSAGE, + number=38, + message="NodePoolLoggingConfig", + ) + windows_node_config: "WindowsNodeConfig" = proto.Field( + proto.MESSAGE, + number=39, + message="WindowsNodeConfig", + ) + local_nvme_ssd_block_config: "LocalNvmeSsdBlockConfig" = proto.Field( + proto.MESSAGE, + number=40, + message="LocalNvmeSsdBlockConfig", + ) + ephemeral_storage_local_ssd_config: "EphemeralStorageLocalSsdConfig" = proto.Field( + proto.MESSAGE, + number=41, + message="EphemeralStorageLocalSsdConfig", + ) + sole_tenant_config: "SoleTenantConfig" = proto.Field( + proto.MESSAGE, + number=42, + message="SoleTenantConfig", + ) + + +class AdvancedMachineFeatures(proto.Message): + r"""Specifies options for controlling advanced machine features. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + threads_per_core (int): + The number of threads per physical core. To + disable simultaneous multithreading (SMT) set + this to 1. If unset, the maximum number of + threads supported per core by the underlying + processor is assumed. + + This field is a member of `oneof`_ ``_threads_per_core``. + """ + + threads_per_core: int = proto.Field( + proto.INT64, + number=1, + optional=True, + ) + + +class NodeNetworkConfig(proto.Message): + r"""Parameters for node pool-level network config. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + create_pod_range (bool): + Input only. Whether to create a new range for pod IPs in + this node pool. Defaults are provided for ``pod_range`` and + ``pod_ipv4_cidr_block`` if they are not specified. + + If neither ``create_pod_range`` or ``pod_range`` are + specified, the cluster-level default + (``ip_allocation_policy.cluster_ipv4_cidr_block``) is used. + + Only applicable if ``ip_allocation_policy.use_ip_aliases`` + is true. + + This field cannot be changed after the node pool has been + created. + pod_range (str): + The ID of the secondary range for pod IPs. If + ``create_pod_range`` is true, this ID is used for the new + range. If ``create_pod_range`` is false, uses an existing + secondary range with this ID. + + Only applicable if ``ip_allocation_policy.use_ip_aliases`` + is true. + + This field cannot be changed after the node pool has been + created. + pod_ipv4_cidr_block (str): + The IP address range for pod IPs in this node pool. + + Only applicable if ``create_pod_range`` is true. + + Set to blank to have a range chosen with the default size. + + Set to /netmask (e.g. ``/14``) to have a range chosen with a + specific netmask. + + Set to a + `CIDR `__ + notation (e.g. ``10.96.0.0/14``) to pick a specific range to + use. + + Only applicable if ``ip_allocation_policy.use_ip_aliases`` + is true. + + This field cannot be changed after the node pool has been + created. + enable_private_nodes (bool): + Whether nodes have internal IP addresses only. If + enable_private_nodes is not specified, then the value is + derived from + [cluster.privateClusterConfig.enablePrivateNodes][google.container.v1beta1.PrivateClusterConfig.enablePrivateNodes] + + This field is a member of `oneof`_ ``_enable_private_nodes``. + network_performance_config (google.cloud.container_v1.types.NodeNetworkConfig.NetworkPerformanceConfig): + Network bandwidth tier configuration. + + This field is a member of `oneof`_ ``_network_performance_config``. + pod_cidr_overprovision_config (google.cloud.container_v1.types.PodCIDROverprovisionConfig): + [PRIVATE FIELD] Pod CIDR size overprovisioning config for + the nodepool. + + Pod CIDR size per node depends on max_pods_per_node. By + default, the value of max_pods_per_node is rounded off to + next power of 2 and we then double that to get the size of + pod CIDR block per node. Example: max_pods_per_node of 30 + would result in 64 IPs (/26). + + This config can disable the doubling of IPs (we still round + off to next power of 2) Example: max_pods_per_node of 30 + will result in 32 IPs (/27) when overprovisioning is + disabled. + additional_node_network_configs (MutableSequence[google.cloud.container_v1.types.AdditionalNodeNetworkConfig]): + We specify the additional node networks for + this node pool using this list. Each node + network corresponds to an additional interface + additional_pod_network_configs (MutableSequence[google.cloud.container_v1.types.AdditionalPodNetworkConfig]): + We specify the additional pod networks for + this node pool using this list. Each pod network + corresponds to an additional alias IP range for + the node + pod_ipv4_range_utilization (float): + Output only. [Output only] The utilization of the IPv4 range + for the pod. The ratio is Usage/[Total number of IPs in the + secondary range], Usage=numNodes\ *numZones*\ podIPsPerNode. + """ + + class NetworkPerformanceConfig(proto.Message): + r"""Configuration of all network bandwidth tiers + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + total_egress_bandwidth_tier (google.cloud.container_v1.types.NodeNetworkConfig.NetworkPerformanceConfig.Tier): + Specifies the total network bandwidth tier + for the NodePool. + + This field is a member of `oneof`_ ``_total_egress_bandwidth_tier``. + """ + + class Tier(proto.Enum): + r"""Node network tier + + Values: + TIER_UNSPECIFIED (0): + Default value + TIER_1 (1): + Higher bandwidth, actual values based on VM + size. + """ + TIER_UNSPECIFIED = 0 + TIER_1 = 1 + + total_egress_bandwidth_tier: "NodeNetworkConfig.NetworkPerformanceConfig.Tier" = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum="NodeNetworkConfig.NetworkPerformanceConfig.Tier", + ) + + create_pod_range: bool = proto.Field( + proto.BOOL, + number=4, + ) + pod_range: str = proto.Field( + proto.STRING, + number=5, + ) + pod_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=6, + ) + enable_private_nodes: bool = proto.Field( + proto.BOOL, + number=9, + optional=True, + ) + network_performance_config: NetworkPerformanceConfig = proto.Field( + proto.MESSAGE, + number=11, + optional=True, + message=NetworkPerformanceConfig, + ) + pod_cidr_overprovision_config: "PodCIDROverprovisionConfig" = proto.Field( + proto.MESSAGE, + number=13, + message="PodCIDROverprovisionConfig", + ) + additional_node_network_configs: MutableSequence[ + "AdditionalNodeNetworkConfig" + ] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="AdditionalNodeNetworkConfig", + ) + additional_pod_network_configs: MutableSequence[ + "AdditionalPodNetworkConfig" + ] = proto.RepeatedField( + proto.MESSAGE, + number=15, + message="AdditionalPodNetworkConfig", + ) + pod_ipv4_range_utilization: float = proto.Field( + proto.DOUBLE, + number=16, + ) + + +class AdditionalNodeNetworkConfig(proto.Message): + r"""AdditionalNodeNetworkConfig is the configuration for + additional node networks within the NodeNetworkConfig message + + Attributes: + network (str): + Name of the VPC where the additional + interface belongs + subnetwork (str): + Name of the subnetwork where the additional + interface belongs + """ + + network: str = proto.Field( + proto.STRING, + number=1, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AdditionalPodNetworkConfig(proto.Message): + r"""AdditionalPodNetworkConfig is the configuration for + additional pod networks within the NodeNetworkConfig message + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + subnetwork (str): + Name of the subnetwork where the additional + pod network belongs + secondary_pod_range (str): + The name of the secondary range on the subnet + which provides IP address for this pod range + max_pods_per_node (google.cloud.container_v1.types.MaxPodsConstraint): + The maximum number of pods per node which use + this pod network + + This field is a member of `oneof`_ ``_max_pods_per_node``. + """ + + subnetwork: str = proto.Field( + proto.STRING, + number=1, + ) + secondary_pod_range: str = proto.Field( + proto.STRING, + number=2, + ) + max_pods_per_node: "MaxPodsConstraint" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="MaxPodsConstraint", + ) + + +class ShieldedInstanceConfig(proto.Message): + r"""A set of Shielded Instance options. + + Attributes: + enable_secure_boot (bool): + Defines whether the instance has Secure Boot + enabled. + Secure Boot helps ensure that the system only + runs authentic software by verifying the digital + signature of all boot components, and halting + the boot process if signature verification + fails. + enable_integrity_monitoring (bool): + Defines whether the instance has integrity + monitoring enabled. + Enables monitoring and attestation of the boot + integrity of the instance. The attestation is + performed against the integrity policy baseline. + This baseline is initially derived from the + implicitly trusted boot image when the instance + is created. + """ + + enable_secure_boot: bool = proto.Field( + proto.BOOL, + number=1, + ) + enable_integrity_monitoring: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class SandboxConfig(proto.Message): + r"""SandboxConfig contains configurations of the sandbox to use + for the node. + + Attributes: + type_ (google.cloud.container_v1.types.SandboxConfig.Type): + Type of the sandbox to use for the node. + """ + + class Type(proto.Enum): + r"""Possible types of sandboxes. + + Values: + UNSPECIFIED (0): + Default value. This should not be used. + GVISOR (1): + Run sandbox using gvisor. + """ + UNSPECIFIED = 0 + GVISOR = 1 + + type_: Type = proto.Field( + proto.ENUM, + number=2, + enum=Type, + ) + + +class GcfsConfig(proto.Message): + r"""GcfsConfig contains configurations of Google Container File + System (image streaming). + + Attributes: + enabled (bool): + Whether to use GCFS. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ReservationAffinity(proto.Message): + r"""`ReservationAffinity `__ + is the configuration of desired reservation which instances could + take capacity from. + + Attributes: + consume_reservation_type (google.cloud.container_v1.types.ReservationAffinity.Type): + Corresponds to the type of reservation + consumption. + key (str): + Corresponds to the label key of a reservation resource. To + target a SPECIFIC_RESERVATION by name, specify + "compute.googleapis.com/reservation-name" as the key and + specify the name of your reservation as its value. + values (MutableSequence[str]): + Corresponds to the label value(s) of + reservation resource(s). + """ + + class Type(proto.Enum): + r"""Indicates whether to consume capacity from a reservation or + not. + + Values: + UNSPECIFIED (0): + Default value. This should not be used. + NO_RESERVATION (1): + Do not consume from any reserved capacity. + ANY_RESERVATION (2): + Consume any reservation available. + SPECIFIC_RESERVATION (3): + Must consume from a specific reservation. + Must specify key value fields for specifying the + reservations. + """ + UNSPECIFIED = 0 + NO_RESERVATION = 1 + ANY_RESERVATION = 2 + SPECIFIC_RESERVATION = 3 + + consume_reservation_type: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + key: str = proto.Field( + proto.STRING, + number=2, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class SoleTenantConfig(proto.Message): + r"""SoleTenantConfig contains the NodeAffinities to specify what + shared sole tenant node groups should back the node pool. + + Attributes: + node_affinities (MutableSequence[google.cloud.container_v1.types.SoleTenantConfig.NodeAffinity]): + NodeAffinities used to match to a shared sole + tenant node group. + """ + + class NodeAffinity(proto.Message): + r"""Specifies the NodeAffinity key, values, and affinity operator + according to `shared sole tenant node group + affinities `__. + + Attributes: + key (str): + Key for NodeAffinity. + operator (google.cloud.container_v1.types.SoleTenantConfig.NodeAffinity.Operator): + Operator for NodeAffinity. + values (MutableSequence[str]): + Values for NodeAffinity. + """ + + class Operator(proto.Enum): + r"""Operator allows user to specify affinity or anti-affinity for + the given key values. + + Values: + OPERATOR_UNSPECIFIED (0): + Invalid or unspecified affinity operator. + IN (1): + Affinity operator. + NOT_IN (2): + Anti-affinity operator. + """ + OPERATOR_UNSPECIFIED = 0 + IN = 1 + NOT_IN = 2 + + key: str = proto.Field( + proto.STRING, + number=1, + ) + operator: "SoleTenantConfig.NodeAffinity.Operator" = proto.Field( + proto.ENUM, + number=2, + enum="SoleTenantConfig.NodeAffinity.Operator", + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + node_affinities: MutableSequence[NodeAffinity] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=NodeAffinity, + ) + + +class NodeTaint(proto.Message): + r"""Kubernetes taint is composed of three fields: key, value, and + effect. Effect can only be one of three types: NoSchedule, + PreferNoSchedule or NoExecute. + + See + `here `__ + for more information, including usage and the valid values. + + Attributes: + key (str): + Key for taint. + value (str): + Value for taint. + effect (google.cloud.container_v1.types.NodeTaint.Effect): + Effect for taint. + """ + + class Effect(proto.Enum): + r"""Possible values for Effect in taint. + + Values: + EFFECT_UNSPECIFIED (0): + Not set + NO_SCHEDULE (1): + NoSchedule + PREFER_NO_SCHEDULE (2): + PreferNoSchedule + NO_EXECUTE (3): + NoExecute + """ + EFFECT_UNSPECIFIED = 0 + NO_SCHEDULE = 1 + PREFER_NO_SCHEDULE = 2 + NO_EXECUTE = 3 + + key: str = proto.Field( + proto.STRING, + number=1, + ) + value: str = proto.Field( + proto.STRING, + number=2, + ) + effect: Effect = proto.Field( + proto.ENUM, + number=3, + enum=Effect, + ) + + +class NodeTaints(proto.Message): + r"""Collection of Kubernetes `node + taints `__. + + Attributes: + taints (MutableSequence[google.cloud.container_v1.types.NodeTaint]): + List of node taints. + """ + + taints: MutableSequence["NodeTaint"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="NodeTaint", + ) + + +class NodeLabels(proto.Message): + r"""Collection of node-level `Kubernetes + labels `__. + + Attributes: + labels (MutableMapping[str, str]): + Map of node label keys and node label values. + """ + + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + +class ResourceLabels(proto.Message): + r"""Collection of `GCP + labels `__. + + Attributes: + labels (MutableMapping[str, str]): + Map of node label keys and node label values. + """ + + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + +class NetworkTags(proto.Message): + r"""Collection of Compute Engine network tags that can be applied + to a node's underlying VM instance. + + Attributes: + tags (MutableSequence[str]): + List of network tags. + """ + + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class MasterAuth(proto.Message): + r"""The authentication information for accessing the master + endpoint. Authentication can be done using HTTP basic auth or + using client certificates. + + Attributes: + username (str): + The username to use for HTTP basic + authentication to the master endpoint. For + clusters v1.6.0 and later, basic authentication + can be disabled by leaving username unspecified + (or setting it to the empty string). + + Warning: basic authentication is deprecated, and + will be removed in GKE control plane versions + 1.19 and newer. For a list of recommended + authentication methods, see: + + https://cloud.google.com/kubernetes-engine/docs/how-to/api-server-authentication + password (str): + The password to use for HTTP basic + authentication to the master endpoint. Because + the master endpoint is open to the Internet, you + should create a strong password. If a password + is provided for cluster creation, username must + be non-empty. + + Warning: basic authentication is deprecated, and + will be removed in GKE control plane versions + 1.19 and newer. For a list of recommended + authentication methods, see: + + https://cloud.google.com/kubernetes-engine/docs/how-to/api-server-authentication + client_certificate_config (google.cloud.container_v1.types.ClientCertificateConfig): + Configuration for client certificate + authentication on the cluster. For clusters + before v1.12, if no configuration is specified, + a client certificate is issued. + cluster_ca_certificate (str): + [Output only] Base64-encoded public certificate that is the + root of trust for the cluster. + client_certificate (str): + [Output only] Base64-encoded public certificate used by + clients to authenticate to the cluster endpoint. + client_key (str): + [Output only] Base64-encoded private key used by clients to + authenticate to the cluster endpoint. + """ + + username: str = proto.Field( + proto.STRING, + number=1, + ) + password: str = proto.Field( + proto.STRING, + number=2, + ) + client_certificate_config: "ClientCertificateConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="ClientCertificateConfig", + ) + cluster_ca_certificate: str = proto.Field( + proto.STRING, + number=100, + ) + client_certificate: str = proto.Field( + proto.STRING, + number=101, + ) + client_key: str = proto.Field( + proto.STRING, + number=102, + ) + + +class ClientCertificateConfig(proto.Message): + r"""Configuration for client certificates on the cluster. + + Attributes: + issue_client_certificate (bool): + Issue a client certificate. + """ + + issue_client_certificate: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class AddonsConfig(proto.Message): + r"""Configuration for the addons that can be automatically spun + up in the cluster, enabling additional functionality. + + Attributes: + http_load_balancing (google.cloud.container_v1.types.HttpLoadBalancing): + Configuration for the HTTP (L7) load + balancing controller addon, which makes it easy + to set up HTTP load balancers for services in a + cluster. + horizontal_pod_autoscaling (google.cloud.container_v1.types.HorizontalPodAutoscaling): + Configuration for the horizontal pod + autoscaling feature, which increases or + decreases the number of replica pods a + replication controller has based on the resource + usage of the existing pods. + kubernetes_dashboard (google.cloud.container_v1.types.KubernetesDashboard): + Configuration for the Kubernetes Dashboard. + This addon is deprecated, and will be disabled + in 1.15. It is recommended to use the Cloud + Console to manage and monitor your Kubernetes + clusters, workloads and applications. For more + information, see: + + https://cloud.google.com/kubernetes-engine/docs/concepts/dashboards + network_policy_config (google.cloud.container_v1.types.NetworkPolicyConfig): + Configuration for NetworkPolicy. This only + tracks whether the addon is enabled or not on + the Master, it does not track whether network + policy is enabled for the nodes. + cloud_run_config (google.cloud.container_v1.types.CloudRunConfig): + Configuration for the Cloud Run addon, which + allows the user to use a managed Knative + service. + dns_cache_config (google.cloud.container_v1.types.DnsCacheConfig): + Configuration for NodeLocalDNS, a dns cache + running on cluster nodes + config_connector_config (google.cloud.container_v1.types.ConfigConnectorConfig): + Configuration for the ConfigConnector add-on, + a Kubernetes extension to manage hosted GCP + services through the Kubernetes API + gce_persistent_disk_csi_driver_config (google.cloud.container_v1.types.GcePersistentDiskCsiDriverConfig): + Configuration for the Compute Engine + Persistent Disk CSI driver. + gcp_filestore_csi_driver_config (google.cloud.container_v1.types.GcpFilestoreCsiDriverConfig): + Configuration for the GCP Filestore CSI + driver. + gke_backup_agent_config (google.cloud.container_v1.types.GkeBackupAgentConfig): + Configuration for the Backup for GKE agent + addon. + gcs_fuse_csi_driver_config (google.cloud.container_v1.types.GcsFuseCsiDriverConfig): + Configuration for the Cloud Storage Fuse CSI + driver. + """ + + http_load_balancing: "HttpLoadBalancing" = proto.Field( + proto.MESSAGE, + number=1, + message="HttpLoadBalancing", + ) + horizontal_pod_autoscaling: "HorizontalPodAutoscaling" = proto.Field( + proto.MESSAGE, + number=2, + message="HorizontalPodAutoscaling", + ) + kubernetes_dashboard: "KubernetesDashboard" = proto.Field( + proto.MESSAGE, + number=3, + message="KubernetesDashboard", + ) + network_policy_config: "NetworkPolicyConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="NetworkPolicyConfig", + ) + cloud_run_config: "CloudRunConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="CloudRunConfig", + ) + dns_cache_config: "DnsCacheConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="DnsCacheConfig", + ) + config_connector_config: "ConfigConnectorConfig" = proto.Field( + proto.MESSAGE, + number=10, + message="ConfigConnectorConfig", + ) + gce_persistent_disk_csi_driver_config: "GcePersistentDiskCsiDriverConfig" = ( + proto.Field( + proto.MESSAGE, + number=11, + message="GcePersistentDiskCsiDriverConfig", + ) + ) + gcp_filestore_csi_driver_config: "GcpFilestoreCsiDriverConfig" = proto.Field( + proto.MESSAGE, + number=14, + message="GcpFilestoreCsiDriverConfig", + ) + gke_backup_agent_config: "GkeBackupAgentConfig" = proto.Field( + proto.MESSAGE, + number=16, + message="GkeBackupAgentConfig", + ) + gcs_fuse_csi_driver_config: "GcsFuseCsiDriverConfig" = proto.Field( + proto.MESSAGE, + number=17, + message="GcsFuseCsiDriverConfig", + ) + + +class HttpLoadBalancing(proto.Message): + r"""Configuration options for the HTTP (L7) load balancing + controller addon, which makes it easy to set up HTTP load + balancers for services in a cluster. + + Attributes: + disabled (bool): + Whether the HTTP Load Balancing controller is + enabled in the cluster. When enabled, it runs a + small pod in the cluster that manages the load + balancers. + """ + + disabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class HorizontalPodAutoscaling(proto.Message): + r"""Configuration options for the horizontal pod autoscaling + feature, which increases or decreases the number of replica pods + a replication controller has based on the resource usage of the + existing pods. + + Attributes: + disabled (bool): + Whether the Horizontal Pod Autoscaling + feature is enabled in the cluster. When enabled, + it ensures that metrics are collected into + Stackdriver Monitoring. + """ + + disabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class KubernetesDashboard(proto.Message): + r"""Configuration for the Kubernetes Dashboard. + + Attributes: + disabled (bool): + Whether the Kubernetes Dashboard is enabled + for this cluster. + """ + + disabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class NetworkPolicyConfig(proto.Message): + r"""Configuration for NetworkPolicy. This only tracks whether the + addon is enabled or not on the Master, it does not track whether + network policy is enabled for the nodes. + + Attributes: + disabled (bool): + Whether NetworkPolicy is enabled for this + cluster. + """ + + disabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class DnsCacheConfig(proto.Message): + r"""Configuration for NodeLocal DNSCache + + Attributes: + enabled (bool): + Whether NodeLocal DNSCache is enabled for + this cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class PrivateClusterMasterGlobalAccessConfig(proto.Message): + r"""Configuration for controlling master global access settings. + + Attributes: + enabled (bool): + Whenever master is accessible globally or + not. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class PrivateClusterConfig(proto.Message): + r"""Configuration options for private clusters. + + Attributes: + enable_private_nodes (bool): + Whether nodes have internal IP addresses + only. If enabled, all nodes are given only RFC + 1918 private addresses and communicate with the + master via private networking. + enable_private_endpoint (bool): + Whether the master's internal IP address is + used as the cluster endpoint. + master_ipv4_cidr_block (str): + The IP range in CIDR notation to use for the + hosted master network. This range will be used + for assigning internal IP addresses to the + master or set of masters, as well as the ILB + VIP. This range must not overlap with any other + ranges in use within the cluster's network. + private_endpoint (str): + Output only. The internal IP address of this + cluster's master endpoint. + public_endpoint (str): + Output only. The external IP address of this + cluster's master endpoint. + peering_name (str): + Output only. The peering name in the customer + VPC used by this cluster. + master_global_access_config (google.cloud.container_v1.types.PrivateClusterMasterGlobalAccessConfig): + Controls master global access settings. + private_endpoint_subnetwork (str): + Subnet to provision the master's private endpoint during + cluster creation. Specified in + projects/\ */regions/*/subnetworks/\* format. + """ + + enable_private_nodes: bool = proto.Field( + proto.BOOL, + number=1, + ) + enable_private_endpoint: bool = proto.Field( + proto.BOOL, + number=2, + ) + master_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=3, + ) + private_endpoint: str = proto.Field( + proto.STRING, + number=4, + ) + public_endpoint: str = proto.Field( + proto.STRING, + number=5, + ) + peering_name: str = proto.Field( + proto.STRING, + number=7, + ) + master_global_access_config: "PrivateClusterMasterGlobalAccessConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="PrivateClusterMasterGlobalAccessConfig", + ) + private_endpoint_subnetwork: str = proto.Field( + proto.STRING, + number=10, + ) + + +class AuthenticatorGroupsConfig(proto.Message): + r"""Configuration for returning group information from + authenticators. + + Attributes: + enabled (bool): + Whether this cluster should return group + membership lookups during authentication using a + group of security groups. + security_group (str): + The name of the security group-of-groups to + be used. Only relevant if enabled = true. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + security_group: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CloudRunConfig(proto.Message): + r"""Configuration options for the Cloud Run feature. + + Attributes: + disabled (bool): + Whether Cloud Run addon is enabled for this + cluster. + load_balancer_type (google.cloud.container_v1.types.CloudRunConfig.LoadBalancerType): + Which load balancer type is installed for + Cloud Run. + """ + + class LoadBalancerType(proto.Enum): + r"""Load balancer type of ingress service of Cloud Run. + + Values: + LOAD_BALANCER_TYPE_UNSPECIFIED (0): + Load balancer type for Cloud Run is + unspecified. + LOAD_BALANCER_TYPE_EXTERNAL (1): + Install external load balancer for Cloud Run. + LOAD_BALANCER_TYPE_INTERNAL (2): + Install internal load balancer for Cloud Run. + """ + LOAD_BALANCER_TYPE_UNSPECIFIED = 0 + LOAD_BALANCER_TYPE_EXTERNAL = 1 + LOAD_BALANCER_TYPE_INTERNAL = 2 + + disabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + load_balancer_type: LoadBalancerType = proto.Field( + proto.ENUM, + number=3, + enum=LoadBalancerType, + ) + + +class ConfigConnectorConfig(proto.Message): + r"""Configuration options for the Config Connector add-on. + + Attributes: + enabled (bool): + Whether Cloud Connector is enabled for this + cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class GcePersistentDiskCsiDriverConfig(proto.Message): + r"""Configuration for the Compute Engine PD CSI driver. + + Attributes: + enabled (bool): + Whether the Compute Engine PD CSI driver is + enabled for this cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class GcpFilestoreCsiDriverConfig(proto.Message): + r"""Configuration for the GCP Filestore CSI driver. + + Attributes: + enabled (bool): + Whether the GCP Filestore CSI driver is + enabled for this cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class GcsFuseCsiDriverConfig(proto.Message): + r"""Configuration for the Cloud Storage Fuse CSI driver. + + Attributes: + enabled (bool): + Whether the Cloud Storage Fuse CSI driver is + enabled for this cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class GkeBackupAgentConfig(proto.Message): + r"""Configuration for the Backup for GKE Agent. + + Attributes: + enabled (bool): + Whether the Backup for GKE agent is enabled + for this cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class MasterAuthorizedNetworksConfig(proto.Message): + r"""Configuration options for the master authorized networks + feature. Enabled master authorized networks will disallow all + external traffic to access Kubernetes master through HTTPS + except traffic from the given CIDR blocks, Google Compute Engine + Public IPs and Google Prod IPs. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enabled (bool): + Whether or not master authorized networks is + enabled. + cidr_blocks (MutableSequence[google.cloud.container_v1.types.MasterAuthorizedNetworksConfig.CidrBlock]): + cidr_blocks define up to 50 external networks that could + access Kubernetes master through HTTPS. + gcp_public_cidrs_access_enabled (bool): + Whether master is accessbile via Google + Compute Engine Public IP addresses. + + This field is a member of `oneof`_ ``_gcp_public_cidrs_access_enabled``. + """ + + class CidrBlock(proto.Message): + r"""CidrBlock contains an optional name and one CIDR block. + + Attributes: + display_name (str): + display_name is an optional field for users to identify CIDR + blocks. + cidr_block (str): + cidr_block must be specified in CIDR notation. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + cidr_block: str = proto.Field( + proto.STRING, + number=2, + ) + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + cidr_blocks: MutableSequence[CidrBlock] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=CidrBlock, + ) + gcp_public_cidrs_access_enabled: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + + +class LegacyAbac(proto.Message): + r"""Configuration for the legacy Attribute Based Access Control + authorization mode. + + Attributes: + enabled (bool): + Whether the ABAC authorizer is enabled for + this cluster. When enabled, identities in the + system, including service accounts, nodes, and + controllers, will have statically granted + permissions beyond those provided by the RBAC + configuration or IAM. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class NetworkPolicy(proto.Message): + r"""Configuration options for the NetworkPolicy feature. + https://kubernetes.io/docs/concepts/services-networking/networkpolicies/ + + Attributes: + provider (google.cloud.container_v1.types.NetworkPolicy.Provider): + The selected network policy provider. + enabled (bool): + Whether network policy is enabled on the + cluster. + """ + + class Provider(proto.Enum): + r"""Allowed Network Policy providers. + + Values: + PROVIDER_UNSPECIFIED (0): + Not set + CALICO (1): + Tigera (Calico Felix). + """ + PROVIDER_UNSPECIFIED = 0 + CALICO = 1 + + provider: Provider = proto.Field( + proto.ENUM, + number=1, + enum=Provider, + ) + enabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class BinaryAuthorization(proto.Message): + r"""Configuration for Binary Authorization. + + Attributes: + enabled (bool): + This field is deprecated. Leave this unset and instead + configure BinaryAuthorization using evaluation_mode. If + evaluation_mode is set to anything other than + EVALUATION_MODE_UNSPECIFIED, this field is ignored. + evaluation_mode (google.cloud.container_v1.types.BinaryAuthorization.EvaluationMode): + Mode of operation for binauthz policy + evaluation. If unspecified, defaults to + DISABLED. + """ + + class EvaluationMode(proto.Enum): + r"""Binary Authorization mode of operation. + + Values: + EVALUATION_MODE_UNSPECIFIED (0): + Default value + DISABLED (1): + Disable BinaryAuthorization + PROJECT_SINGLETON_POLICY_ENFORCE (2): + Enforce Kubernetes admission requests with + BinaryAuthorization using the project's + singleton policy. This is equivalent to setting + the enabled boolean to true. + """ + EVALUATION_MODE_UNSPECIFIED = 0 + DISABLED = 1 + PROJECT_SINGLETON_POLICY_ENFORCE = 2 + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + evaluation_mode: EvaluationMode = proto.Field( + proto.ENUM, + number=2, + enum=EvaluationMode, + ) + + +class PodCIDROverprovisionConfig(proto.Message): + r"""[PRIVATE FIELD] Config for pod CIDR size overprovisioning. + + Attributes: + disable (bool): + Whether Pod CIDR overprovisioning is + disabled. Note: Pod CIDR overprovisioning is + enabled by default. + """ + + disable: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class IPAllocationPolicy(proto.Message): + r"""Configuration for controlling how IPs are allocated in the + cluster. + + Attributes: + use_ip_aliases (bool): + Whether alias IPs will be used for pod IPs in the cluster. + This is used in conjunction with use_routes. It cannot be + true if use_routes is true. If both use_ip_aliases and + use_routes are false, then the server picks the default IP + allocation mode + create_subnetwork (bool): + Whether a new subnetwork will be created automatically for + the cluster. + + This field is only applicable when ``use_ip_aliases`` is + true. + subnetwork_name (str): + A custom subnetwork name to be used if ``create_subnetwork`` + is true. If this field is empty, then an automatic name will + be chosen for the new subnetwork. + cluster_ipv4_cidr (str): + This field is deprecated, use cluster_ipv4_cidr_block. + node_ipv4_cidr (str): + This field is deprecated, use node_ipv4_cidr_block. + services_ipv4_cidr (str): + This field is deprecated, use services_ipv4_cidr_block. + cluster_secondary_range_name (str): + The name of the secondary range to be used for the cluster + CIDR block. The secondary range will be used for pod IP + addresses. This must be an existing secondary range + associated with the cluster subnetwork. + + This field is only applicable with use_ip_aliases is true + and create_subnetwork is false. + services_secondary_range_name (str): + The name of the secondary range to be used as for the + services CIDR block. The secondary range will be used for + service ClusterIPs. This must be an existing secondary range + associated with the cluster subnetwork. + + This field is only applicable with use_ip_aliases is true + and create_subnetwork is false. + cluster_ipv4_cidr_block (str): + The IP address range for the cluster pod IPs. If this field + is set, then ``cluster.cluster_ipv4_cidr`` must be left + blank. + + This field is only applicable when ``use_ip_aliases`` is + true. + + Set to blank to have a range chosen with the default size. + + Set to /netmask (e.g. ``/14``) to have a range chosen with a + specific netmask. + + Set to a + `CIDR `__ + notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private + networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, + ``192.168.0.0/16``) to pick a specific range to use. + node_ipv4_cidr_block (str): + The IP address range of the instance IPs in this cluster. + + This is applicable only if ``create_subnetwork`` is true. + + Set to blank to have a range chosen with the default size. + + Set to /netmask (e.g. ``/14``) to have a range chosen with a + specific netmask. + + Set to a + `CIDR `__ + notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private + networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, + ``192.168.0.0/16``) to pick a specific range to use. + services_ipv4_cidr_block (str): + The IP address range of the services IPs in this cluster. If + blank, a range will be automatically chosen with the default + size. + + This field is only applicable when ``use_ip_aliases`` is + true. + + Set to blank to have a range chosen with the default size. + + Set to /netmask (e.g. ``/14``) to have a range chosen with a + specific netmask. + + Set to a + `CIDR `__ + notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private + networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, + ``192.168.0.0/16``) to pick a specific range to use. + tpu_ipv4_cidr_block (str): + The IP address range of the Cloud TPUs in this cluster. If + unspecified, a range will be automatically chosen with the + default size. + + This field is only applicable when ``use_ip_aliases`` is + true. + + If unspecified, the range will use the default size. + + Set to /netmask (e.g. ``/14``) to have a range chosen with a + specific netmask. + + Set to a + `CIDR `__ + notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private + networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, + ``192.168.0.0/16``) to pick a specific range to use. + use_routes (bool): + Whether routes will be used for pod IPs in the cluster. This + is used in conjunction with use_ip_aliases. It cannot be + true if use_ip_aliases is true. If both use_ip_aliases and + use_routes are false, then the server picks the default IP + allocation mode + stack_type (google.cloud.container_v1.types.StackType): + The IP stack type of the cluster + ipv6_access_type (google.cloud.container_v1.types.IPv6AccessType): + The ipv6 access type (internal or external) when + create_subnetwork is true + pod_cidr_overprovision_config (google.cloud.container_v1.types.PodCIDROverprovisionConfig): + [PRIVATE FIELD] Pod CIDR size overprovisioning config for + the cluster. + + Pod CIDR size per node depends on max_pods_per_node. By + default, the value of max_pods_per_node is doubled and then + rounded off to next power of 2 to get the size of pod CIDR + block per node. Example: max_pods_per_node of 30 would + result in 64 IPs (/26). + + This config can disable the doubling of IPs (we still round + off to next power of 2) Example: max_pods_per_node of 30 + will result in 32 IPs (/27) when overprovisioning is + disabled. + subnet_ipv6_cidr_block (str): + Output only. [Output only] The subnet's IPv6 CIDR block used + by nodes and pods. + services_ipv6_cidr_block (str): + Output only. [Output only] The services IPv6 CIDR block for + the cluster. + additional_pod_ranges_config (google.cloud.container_v1.types.AdditionalPodRangesConfig): + Output only. [Output only] The additional pod ranges that + are added to the cluster. These pod ranges can be used by + new node pools to allocate pod IPs automatically. Once the + range is removed it will not show up in IPAllocationPolicy. + default_pod_ipv4_range_utilization (float): + Output only. [Output only] The utilization of the cluster + default IPv4 range for the pod. The ratio is Usage/[Total + number of IPs in the secondary range], + Usage=numNodes\ *numZones*\ podIPsPerNode. + """ + + use_ip_aliases: bool = proto.Field( + proto.BOOL, + number=1, + ) + create_subnetwork: bool = proto.Field( + proto.BOOL, + number=2, + ) + subnetwork_name: str = proto.Field( + proto.STRING, + number=3, + ) + cluster_ipv4_cidr: str = proto.Field( + proto.STRING, + number=4, + ) + node_ipv4_cidr: str = proto.Field( + proto.STRING, + number=5, + ) + services_ipv4_cidr: str = proto.Field( + proto.STRING, + number=6, + ) + cluster_secondary_range_name: str = proto.Field( + proto.STRING, + number=7, + ) + services_secondary_range_name: str = proto.Field( + proto.STRING, + number=8, + ) + cluster_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=9, + ) + node_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=10, + ) + services_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=11, + ) + tpu_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=13, + ) + use_routes: bool = proto.Field( + proto.BOOL, + number=15, + ) + stack_type: "StackType" = proto.Field( + proto.ENUM, + number=16, + enum="StackType", + ) + ipv6_access_type: "IPv6AccessType" = proto.Field( + proto.ENUM, + number=17, + enum="IPv6AccessType", + ) + pod_cidr_overprovision_config: "PodCIDROverprovisionConfig" = proto.Field( + proto.MESSAGE, + number=21, + message="PodCIDROverprovisionConfig", + ) + subnet_ipv6_cidr_block: str = proto.Field( + proto.STRING, + number=22, + ) + services_ipv6_cidr_block: str = proto.Field( + proto.STRING, + number=23, + ) + additional_pod_ranges_config: "AdditionalPodRangesConfig" = proto.Field( + proto.MESSAGE, + number=24, + message="AdditionalPodRangesConfig", + ) + default_pod_ipv4_range_utilization: float = proto.Field( + proto.DOUBLE, + number=25, + ) + + +class Cluster(proto.Message): + r"""A Google Kubernetes Engine cluster. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The name of this cluster. The name must be unique within + this project and location (e.g. zone or region), and can be + up to 40 characters with the following restrictions: + + - Lowercase letters, numbers, and hyphens only. + - Must start with a letter. + - Must end with a number or a letter. + description (str): + An optional description of this cluster. + initial_node_count (int): + The number of nodes to create in this cluster. You must + ensure that your Compute Engine `resource + quota `__ is + sufficient for this number of instances. You must also have + available firewall and routes quota. For requests, this + field should only be used in lieu of a "node_pool" object, + since this configuration (along with the "node_config") will + be used to create a "NodePool" object with an auto-generated + name. Do not use this and a node_pool at the same time. + + This field is deprecated, use node_pool.initial_node_count + instead. + node_config (google.cloud.container_v1.types.NodeConfig): + Parameters used in creating the cluster's nodes. For + requests, this field should only be used in lieu of a + "node_pool" object, since this configuration (along with the + "initial_node_count") will be used to create a "NodePool" + object with an auto-generated name. Do not use this and a + node_pool at the same time. For responses, this field will + be populated with the node configuration of the first node + pool. (For configuration of each node pool, see + ``node_pool.config``) + + If unspecified, the defaults are used. This field is + deprecated, use node_pool.config instead. + master_auth (google.cloud.container_v1.types.MasterAuth): + The authentication information for accessing the master + endpoint. If unspecified, the defaults are used: For + clusters before v1.12, if master_auth is unspecified, + ``username`` will be set to "admin", a random password will + be generated, and a client certificate will be issued. + logging_service (str): + The logging service the cluster should use to write logs. + Currently available options: + + - ``logging.googleapis.com/kubernetes`` - The Cloud Logging + service with a Kubernetes-native resource model + - ``logging.googleapis.com`` - The legacy Cloud Logging + service (no longer available as of GKE 1.15). + - ``none`` - no logs will be exported from the cluster. + + If left as an empty + string,\ ``logging.googleapis.com/kubernetes`` will be used + for GKE 1.14+ or ``logging.googleapis.com`` for earlier + versions. + monitoring_service (str): + The monitoring service the cluster should use to write + metrics. Currently available options: + + - "monitoring.googleapis.com/kubernetes" - The Cloud + Monitoring service with a Kubernetes-native resource + model + - ``monitoring.googleapis.com`` - The legacy Cloud + Monitoring service (no longer available as of GKE 1.15). + - ``none`` - No metrics will be exported from the cluster. + + If left as an empty + string,\ ``monitoring.googleapis.com/kubernetes`` will be + used for GKE 1.14+ or ``monitoring.googleapis.com`` for + earlier versions. + network (str): + The name of the Google Compute Engine + `network `__ + to which the cluster is connected. If left unspecified, the + ``default`` network will be used. + cluster_ipv4_cidr (str): + The IP address range of the container pods in this cluster, + in + `CIDR `__ + notation (e.g. ``10.96.0.0/14``). Leave blank to have one + automatically chosen or specify a ``/14`` block in + ``10.0.0.0/8``. + addons_config (google.cloud.container_v1.types.AddonsConfig): + Configurations for the various addons + available to run in the cluster. + subnetwork (str): + The name of the Google Compute Engine + `subnetwork `__ + to which the cluster is connected. + node_pools (MutableSequence[google.cloud.container_v1.types.NodePool]): + The node pools associated with this cluster. This field + should not be set if "node_config" or "initial_node_count" + are specified. + locations (MutableSequence[str]): + The list of Google Compute Engine + `zones `__ + in which the cluster's nodes should be located. + + This field provides a default value if + `NodePool.Locations `__ + are not specified during node pool creation. + + Warning: changing cluster locations will update the + `NodePool.Locations `__ + of all node pools and will result in nodes being added + and/or removed. + enable_kubernetes_alpha (bool): + Kubernetes alpha features are enabled on this + cluster. This includes alpha API groups (e.g. + v1alpha1) and features that may not be + production ready in the kubernetes version of + the master and nodes. The cluster has no SLA for + uptime and master/node upgrades are disabled. + Alpha enabled clusters are automatically deleted + thirty days after creation. + resource_labels (MutableMapping[str, str]): + The resource labels for the cluster to use to + annotate any related Google Compute Engine + resources. + label_fingerprint (str): + The fingerprint of the set of labels for this + cluster. + legacy_abac (google.cloud.container_v1.types.LegacyAbac): + Configuration for the legacy ABAC + authorization mode. + network_policy (google.cloud.container_v1.types.NetworkPolicy): + Configuration options for the NetworkPolicy + feature. + ip_allocation_policy (google.cloud.container_v1.types.IPAllocationPolicy): + Configuration for cluster IP allocation. + master_authorized_networks_config (google.cloud.container_v1.types.MasterAuthorizedNetworksConfig): + The configuration options for master + authorized networks feature. + maintenance_policy (google.cloud.container_v1.types.MaintenancePolicy): + Configure the maintenance policy for this + cluster. + binary_authorization (google.cloud.container_v1.types.BinaryAuthorization): + Configuration for Binary Authorization. + autoscaling (google.cloud.container_v1.types.ClusterAutoscaling): + Cluster-level autoscaling configuration. + network_config (google.cloud.container_v1.types.NetworkConfig): + Configuration for cluster networking. + default_max_pods_constraint (google.cloud.container_v1.types.MaxPodsConstraint): + The default constraint on the maximum number + of pods that can be run simultaneously on a node + in the node pool of this cluster. Only honored + if cluster created with IP Alias support. + resource_usage_export_config (google.cloud.container_v1.types.ResourceUsageExportConfig): + Configuration for exporting resource usages. + Resource usage export is disabled when this + config is unspecified. + authenticator_groups_config (google.cloud.container_v1.types.AuthenticatorGroupsConfig): + Configuration controlling RBAC group + membership information. + private_cluster_config (google.cloud.container_v1.types.PrivateClusterConfig): + Configuration for private cluster. + database_encryption (google.cloud.container_v1.types.DatabaseEncryption): + Configuration of etcd encryption. + vertical_pod_autoscaling (google.cloud.container_v1.types.VerticalPodAutoscaling): + Cluster-level Vertical Pod Autoscaling + configuration. + shielded_nodes (google.cloud.container_v1.types.ShieldedNodes): + Shielded Nodes configuration. + release_channel (google.cloud.container_v1.types.ReleaseChannel): + Release channel configuration. If left + unspecified on cluster creation and a version is + specified, the cluster is enrolled in the most + mature release channel where the version is + available (first checking STABLE, then REGULAR, + and finally RAPID). Otherwise, if no release + channel configuration and no version is + specified, the cluster is enrolled in the + REGULAR channel with its default version. + workload_identity_config (google.cloud.container_v1.types.WorkloadIdentityConfig): + Configuration for the use of Kubernetes + Service Accounts in GCP IAM policies. + mesh_certificates (google.cloud.container_v1.types.MeshCertificates): + Configuration for issuance of mTLS keys and + certificates to Kubernetes pods. + cost_management_config (google.cloud.container_v1.types.CostManagementConfig): + Configuration for the fine-grained cost + management feature. + notification_config (google.cloud.container_v1.types.NotificationConfig): + Notification configuration of the cluster. + confidential_nodes (google.cloud.container_v1.types.ConfidentialNodes): + Configuration of Confidential Nodes. + All the nodes in the cluster will be + Confidential VM once enabled. + identity_service_config (google.cloud.container_v1.types.IdentityServiceConfig): + Configuration for Identity Service component. + self_link (str): + [Output only] Server-defined URL for the resource. + zone (str): + [Output only] The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field is deprecated, use + location instead. + endpoint (str): + [Output only] The IP address of this cluster's master + endpoint. The endpoint can be accessed from the internet at + ``https://username:password@endpoint/``. + + See the ``masterAuth`` property of this resource for + username and password information. + initial_cluster_version (str): + The initial Kubernetes version for this + cluster. Valid versions are those found in + validMasterVersions returned by getServerConfig. + The version can be upgraded over time; such + upgrades are reflected in currentMasterVersion + and currentNodeVersion. + + Users may specify either explicit versions + offered by Kubernetes Engine or version aliases, + which have the following behavior: + + - "latest": picks the highest valid Kubernetes + version + - "1.X": picks the highest valid patch+gke.N + patch in the 1.X version + - "1.X.Y": picks the highest valid gke.N patch + in the 1.X.Y version + - "1.X.Y-gke.N": picks an explicit Kubernetes + version + - "","-": picks the default Kubernetes version + current_master_version (str): + [Output only] The current software version of the master + endpoint. + current_node_version (str): + [Output only] Deprecated, use + `NodePools.version `__ + instead. The current version of the node software + components. If they are currently at multiple versions + because they're in the process of being upgraded, this + reflects the minimum version of all nodes. + create_time (str): + [Output only] The time the cluster was created, in + `RFC3339 `__ text + format. + status (google.cloud.container_v1.types.Cluster.Status): + [Output only] The current status of this cluster. + status_message (str): + [Output only] Deprecated. Use conditions instead. Additional + information about the current status of this cluster, if + available. + node_ipv4_cidr_size (int): + [Output only] The size of the address space on each node for + hosting containers. This is provisioned from within the + ``container_ipv4_cidr`` range. This field will only be set + when cluster is in route-based network mode. + services_ipv4_cidr (str): + [Output only] The IP address range of the Kubernetes + services in this cluster, in + `CIDR `__ + notation (e.g. ``1.2.3.4/29``). Service addresses are + typically put in the last ``/16`` from the container CIDR. + instance_group_urls (MutableSequence[str]): + Deprecated. Use node_pools.instance_group_urls. + current_node_count (int): + [Output only] The number of nodes currently in the cluster. + Deprecated. Call Kubernetes API directly to retrieve node + information. + expire_time (str): + [Output only] The time the cluster will be automatically + deleted in + `RFC3339 `__ text + format. + location (str): + [Output only] The name of the Google Compute Engine + `zone `__ + or + `region `__ + in which the cluster resides. + enable_tpu (bool): + Enable the ability to use Cloud TPUs in this + cluster. + tpu_ipv4_cidr_block (str): + [Output only] The IP address range of the Cloud TPUs in this + cluster, in + `CIDR `__ + notation (e.g. ``1.2.3.4/29``). + conditions (MutableSequence[google.cloud.container_v1.types.StatusCondition]): + Which conditions caused the current cluster + state. + autopilot (google.cloud.container_v1.types.Autopilot): + Autopilot configuration for the cluster. + id (str): + Output only. Unique id for the cluster. + node_pool_defaults (google.cloud.container_v1.types.NodePoolDefaults): + Default NodePool settings for the entire + cluster. These settings are overridden if + specified on the specific NodePool object. + + This field is a member of `oneof`_ ``_node_pool_defaults``. + logging_config (google.cloud.container_v1.types.LoggingConfig): + Logging configuration for the cluster. + monitoring_config (google.cloud.container_v1.types.MonitoringConfig): + Monitoring configuration for the cluster. + node_pool_auto_config (google.cloud.container_v1.types.NodePoolAutoConfig): + Node pool configs that apply to all + auto-provisioned node pools in autopilot + clusters and node auto-provisioning enabled + clusters. + etag (str): + This checksum is computed by the server based + on the value of cluster fields, and may be sent + on update requests to ensure the client has an + up-to-date value before proceeding. + fleet (google.cloud.container_v1.types.Fleet): + Fleet information for the cluster. + security_posture_config (google.cloud.container_v1.types.SecurityPostureConfig): + Enable/Disable Security Posture API features + for the cluster. + enable_k8s_beta_apis (google.cloud.container_v1.types.K8sBetaAPIConfig): + Beta APIs Config + """ + + class Status(proto.Enum): + r"""The current status of the cluster. + + Values: + STATUS_UNSPECIFIED (0): + Not set. + PROVISIONING (1): + The PROVISIONING state indicates the cluster + is being created. + RUNNING (2): + The RUNNING state indicates the cluster has + been created and is fully usable. + RECONCILING (3): + The RECONCILING state indicates that some work is actively + being done on the cluster, such as upgrading the master or + node software. Details can be found in the ``statusMessage`` + field. + STOPPING (4): + The STOPPING state indicates the cluster is + being deleted. + ERROR (5): + The ERROR state indicates the cluster is unusable. It will + be automatically deleted. Details can be found in the + ``statusMessage`` field. + DEGRADED (6): + The DEGRADED state indicates the cluster requires user + action to restore full functionality. Details can be found + in the ``statusMessage`` field. + """ + STATUS_UNSPECIFIED = 0 + PROVISIONING = 1 + RUNNING = 2 + RECONCILING = 3 + STOPPING = 4 + ERROR = 5 + DEGRADED = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + initial_node_count: int = proto.Field( + proto.INT32, + number=3, + ) + node_config: "NodeConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="NodeConfig", + ) + master_auth: "MasterAuth" = proto.Field( + proto.MESSAGE, + number=5, + message="MasterAuth", + ) + logging_service: str = proto.Field( + proto.STRING, + number=6, + ) + monitoring_service: str = proto.Field( + proto.STRING, + number=7, + ) + network: str = proto.Field( + proto.STRING, + number=8, + ) + cluster_ipv4_cidr: str = proto.Field( + proto.STRING, + number=9, + ) + addons_config: "AddonsConfig" = proto.Field( + proto.MESSAGE, + number=10, + message="AddonsConfig", + ) + subnetwork: str = proto.Field( + proto.STRING, + number=11, + ) + node_pools: MutableSequence["NodePool"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="NodePool", + ) + locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + enable_kubernetes_alpha: bool = proto.Field( + proto.BOOL, + number=14, + ) + resource_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=15, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=16, + ) + legacy_abac: "LegacyAbac" = proto.Field( + proto.MESSAGE, + number=18, + message="LegacyAbac", + ) + network_policy: "NetworkPolicy" = proto.Field( + proto.MESSAGE, + number=19, + message="NetworkPolicy", + ) + ip_allocation_policy: "IPAllocationPolicy" = proto.Field( + proto.MESSAGE, + number=20, + message="IPAllocationPolicy", + ) + master_authorized_networks_config: "MasterAuthorizedNetworksConfig" = proto.Field( + proto.MESSAGE, + number=22, + message="MasterAuthorizedNetworksConfig", + ) + maintenance_policy: "MaintenancePolicy" = proto.Field( + proto.MESSAGE, + number=23, + message="MaintenancePolicy", + ) + binary_authorization: "BinaryAuthorization" = proto.Field( + proto.MESSAGE, + number=24, + message="BinaryAuthorization", + ) + autoscaling: "ClusterAutoscaling" = proto.Field( + proto.MESSAGE, + number=26, + message="ClusterAutoscaling", + ) + network_config: "NetworkConfig" = proto.Field( + proto.MESSAGE, + number=27, + message="NetworkConfig", + ) + default_max_pods_constraint: "MaxPodsConstraint" = proto.Field( + proto.MESSAGE, + number=30, + message="MaxPodsConstraint", + ) + resource_usage_export_config: "ResourceUsageExportConfig" = proto.Field( + proto.MESSAGE, + number=33, + message="ResourceUsageExportConfig", + ) + authenticator_groups_config: "AuthenticatorGroupsConfig" = proto.Field( + proto.MESSAGE, + number=34, + message="AuthenticatorGroupsConfig", + ) + private_cluster_config: "PrivateClusterConfig" = proto.Field( + proto.MESSAGE, + number=37, + message="PrivateClusterConfig", + ) + database_encryption: "DatabaseEncryption" = proto.Field( + proto.MESSAGE, + number=38, + message="DatabaseEncryption", + ) + vertical_pod_autoscaling: "VerticalPodAutoscaling" = proto.Field( + proto.MESSAGE, + number=39, + message="VerticalPodAutoscaling", + ) + shielded_nodes: "ShieldedNodes" = proto.Field( + proto.MESSAGE, + number=40, + message="ShieldedNodes", + ) + release_channel: "ReleaseChannel" = proto.Field( + proto.MESSAGE, + number=41, + message="ReleaseChannel", + ) + workload_identity_config: "WorkloadIdentityConfig" = proto.Field( + proto.MESSAGE, + number=43, + message="WorkloadIdentityConfig", + ) + mesh_certificates: "MeshCertificates" = proto.Field( + proto.MESSAGE, + number=67, + message="MeshCertificates", + ) + cost_management_config: "CostManagementConfig" = proto.Field( + proto.MESSAGE, + number=45, + message="CostManagementConfig", + ) + notification_config: "NotificationConfig" = proto.Field( + proto.MESSAGE, + number=49, + message="NotificationConfig", + ) + confidential_nodes: "ConfidentialNodes" = proto.Field( + proto.MESSAGE, + number=50, + message="ConfidentialNodes", + ) + identity_service_config: "IdentityServiceConfig" = proto.Field( + proto.MESSAGE, + number=54, + message="IdentityServiceConfig", + ) + self_link: str = proto.Field( + proto.STRING, + number=100, + ) + zone: str = proto.Field( + proto.STRING, + number=101, + ) + endpoint: str = proto.Field( + proto.STRING, + number=102, + ) + initial_cluster_version: str = proto.Field( + proto.STRING, + number=103, + ) + current_master_version: str = proto.Field( + proto.STRING, + number=104, + ) + current_node_version: str = proto.Field( + proto.STRING, + number=105, + ) + create_time: str = proto.Field( + proto.STRING, + number=106, + ) + status: Status = proto.Field( + proto.ENUM, + number=107, + enum=Status, + ) + status_message: str = proto.Field( + proto.STRING, + number=108, + ) + node_ipv4_cidr_size: int = proto.Field( + proto.INT32, + number=109, + ) + services_ipv4_cidr: str = proto.Field( + proto.STRING, + number=110, + ) + instance_group_urls: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=111, + ) + current_node_count: int = proto.Field( + proto.INT32, + number=112, + ) + expire_time: str = proto.Field( + proto.STRING, + number=113, + ) + location: str = proto.Field( + proto.STRING, + number=114, + ) + enable_tpu: bool = proto.Field( + proto.BOOL, + number=115, + ) + tpu_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=116, + ) + conditions: MutableSequence["StatusCondition"] = proto.RepeatedField( + proto.MESSAGE, + number=118, + message="StatusCondition", + ) + autopilot: "Autopilot" = proto.Field( + proto.MESSAGE, + number=128, + message="Autopilot", + ) + id: str = proto.Field( + proto.STRING, + number=129, + ) + node_pool_defaults: "NodePoolDefaults" = proto.Field( + proto.MESSAGE, + number=131, + optional=True, + message="NodePoolDefaults", + ) + logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=132, + message="LoggingConfig", + ) + monitoring_config: "MonitoringConfig" = proto.Field( + proto.MESSAGE, + number=133, + message="MonitoringConfig", + ) + node_pool_auto_config: "NodePoolAutoConfig" = proto.Field( + proto.MESSAGE, + number=136, + message="NodePoolAutoConfig", + ) + etag: str = proto.Field( + proto.STRING, + number=139, + ) + fleet: "Fleet" = proto.Field( + proto.MESSAGE, + number=140, + message="Fleet", + ) + security_posture_config: "SecurityPostureConfig" = proto.Field( + proto.MESSAGE, + number=145, + message="SecurityPostureConfig", + ) + enable_k8s_beta_apis: "K8sBetaAPIConfig" = proto.Field( + proto.MESSAGE, + number=143, + message="K8sBetaAPIConfig", + ) + + +class K8sBetaAPIConfig(proto.Message): + r"""K8sBetaAPIConfig , configuration for beta APIs + + Attributes: + enabled_apis (MutableSequence[str]): + Enabled k8s beta APIs. + """ + + enabled_apis: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class SecurityPostureConfig(proto.Message): + r"""SecurityPostureConfig defines the flags needed to + enable/disable features for the Security Posture API. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + mode (google.cloud.container_v1.types.SecurityPostureConfig.Mode): + Sets which mode to use for Security Posture + features. + + This field is a member of `oneof`_ ``_mode``. + vulnerability_mode (google.cloud.container_v1.types.SecurityPostureConfig.VulnerabilityMode): + Sets which mode to use for vulnerability + scanning. + + This field is a member of `oneof`_ ``_vulnerability_mode``. + """ + + class Mode(proto.Enum): + r"""Mode defines enablement mode for GKE Security posture + features. + + Values: + MODE_UNSPECIFIED (0): + Default value not specified. + DISABLED (1): + Disables Security Posture features on the + cluster. + BASIC (2): + Applies Security Posture features on the + cluster. + """ + MODE_UNSPECIFIED = 0 + DISABLED = 1 + BASIC = 2 + + class VulnerabilityMode(proto.Enum): + r"""VulnerabilityMode defines enablement mode for vulnerability + scanning. + + Values: + VULNERABILITY_MODE_UNSPECIFIED (0): + Default value not specified. + VULNERABILITY_DISABLED (1): + Disables vulnerability scanning on the + cluster. + VULNERABILITY_BASIC (2): + Applies basic vulnerability scanning on the + cluster. + """ + VULNERABILITY_MODE_UNSPECIFIED = 0 + VULNERABILITY_DISABLED = 1 + VULNERABILITY_BASIC = 2 + + mode: Mode = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=Mode, + ) + vulnerability_mode: VulnerabilityMode = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum=VulnerabilityMode, + ) + + +class NodePoolAutoConfig(proto.Message): + r"""Node pool configs that apply to all auto-provisioned node + pools in autopilot clusters and node auto-provisioning enabled + clusters. + + Attributes: + network_tags (google.cloud.container_v1.types.NetworkTags): + The list of instance tags applied to all + nodes. Tags are used to identify valid sources + or targets for network firewalls and are + specified by the client during cluster creation. + Each tag within the list must comply with + RFC1035. + """ + + network_tags: "NetworkTags" = proto.Field( + proto.MESSAGE, + number=1, + message="NetworkTags", + ) + + +class NodePoolDefaults(proto.Message): + r"""Subset of Nodepool message that has defaults. + + Attributes: + node_config_defaults (google.cloud.container_v1.types.NodeConfigDefaults): + Subset of NodeConfig message that has + defaults. + """ + + node_config_defaults: "NodeConfigDefaults" = proto.Field( + proto.MESSAGE, + number=1, + message="NodeConfigDefaults", + ) + + +class NodeConfigDefaults(proto.Message): + r"""Subset of NodeConfig message that has defaults. + + Attributes: + gcfs_config (google.cloud.container_v1.types.GcfsConfig): + GCFS (Google Container File System, also + known as Riptide) options. + logging_config (google.cloud.container_v1.types.NodePoolLoggingConfig): + Logging configuration for node pools. + """ + + gcfs_config: "GcfsConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="GcfsConfig", + ) + logging_config: "NodePoolLoggingConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="NodePoolLoggingConfig", + ) + + +class ClusterUpdate(proto.Message): + r"""ClusterUpdate describes an update to the cluster. Exactly one + update can be applied to a cluster with each request, so at most + one field can be provided. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + desired_node_version (str): + The Kubernetes version to change the nodes to + (typically an upgrade). + + Users may specify either explicit versions + offered by Kubernetes Engine or version aliases, + which have the following behavior: + + - "latest": picks the highest valid Kubernetes + version + - "1.X": picks the highest valid patch+gke.N + patch in the 1.X version + - "1.X.Y": picks the highest valid gke.N patch + in the 1.X.Y version + - "1.X.Y-gke.N": picks an explicit Kubernetes + version + - "-": picks the Kubernetes master version + desired_monitoring_service (str): + The monitoring service the cluster should use to write + metrics. Currently available options: + + - "monitoring.googleapis.com/kubernetes" - The Cloud + Monitoring service with a Kubernetes-native resource + model + - ``monitoring.googleapis.com`` - The legacy Cloud + Monitoring service (no longer available as of GKE 1.15). + - ``none`` - No metrics will be exported from the cluster. + + If left as an empty + string,\ ``monitoring.googleapis.com/kubernetes`` will be + used for GKE 1.14+ or ``monitoring.googleapis.com`` for + earlier versions. + desired_addons_config (google.cloud.container_v1.types.AddonsConfig): + Configurations for the various addons + available to run in the cluster. + desired_node_pool_id (str): + The node pool to be upgraded. This field is mandatory if + "desired_node_version", "desired_image_family" or + "desired_node_pool_autoscaling" is specified and there is + more than one node pool on the cluster. + desired_image_type (str): + The desired image type for the node pool. NOTE: Set the + "desired_node_pool" field as well. + desired_database_encryption (google.cloud.container_v1.types.DatabaseEncryption): + Configuration of etcd encryption. + desired_workload_identity_config (google.cloud.container_v1.types.WorkloadIdentityConfig): + Configuration for Workload Identity. + desired_mesh_certificates (google.cloud.container_v1.types.MeshCertificates): + Configuration for issuance of mTLS keys and + certificates to Kubernetes pods. + desired_shielded_nodes (google.cloud.container_v1.types.ShieldedNodes): + Configuration for Shielded Nodes. + desired_cost_management_config (google.cloud.container_v1.types.CostManagementConfig): + The desired configuration for the + fine-grained cost management feature. + desired_dns_config (google.cloud.container_v1.types.DNSConfig): + DNSConfig contains clusterDNS config for this + cluster. + desired_node_pool_autoscaling (google.cloud.container_v1.types.NodePoolAutoscaling): + Autoscaler configuration for the node pool specified in + desired_node_pool_id. If there is only one pool in the + cluster and desired_node_pool_id is not provided then the + change applies to that single node pool. + desired_locations (MutableSequence[str]): + The desired list of Google Compute Engine + `zones `__ + in which the cluster's nodes should be located. + + This list must always include the cluster's primary zone. + + Warning: changing cluster locations will update the + locations of all node pools and will result in nodes being + added and/or removed. + desired_master_authorized_networks_config (google.cloud.container_v1.types.MasterAuthorizedNetworksConfig): + The desired configuration options for master + authorized networks feature. + desired_cluster_autoscaling (google.cloud.container_v1.types.ClusterAutoscaling): + Cluster-level autoscaling configuration. + desired_binary_authorization (google.cloud.container_v1.types.BinaryAuthorization): + The desired configuration options for the + Binary Authorization feature. + desired_logging_service (str): + The logging service the cluster should use to write logs. + Currently available options: + + - ``logging.googleapis.com/kubernetes`` - The Cloud Logging + service with a Kubernetes-native resource model + - ``logging.googleapis.com`` - The legacy Cloud Logging + service (no longer available as of GKE 1.15). + - ``none`` - no logs will be exported from the cluster. + + If left as an empty + string,\ ``logging.googleapis.com/kubernetes`` will be used + for GKE 1.14+ or ``logging.googleapis.com`` for earlier + versions. + desired_resource_usage_export_config (google.cloud.container_v1.types.ResourceUsageExportConfig): + The desired configuration for exporting + resource usage. + desired_vertical_pod_autoscaling (google.cloud.container_v1.types.VerticalPodAutoscaling): + Cluster-level Vertical Pod Autoscaling + configuration. + desired_private_cluster_config (google.cloud.container_v1.types.PrivateClusterConfig): + The desired private cluster configuration. + desired_intra_node_visibility_config (google.cloud.container_v1.types.IntraNodeVisibilityConfig): + The desired config of Intra-node visibility. + desired_default_snat_status (google.cloud.container_v1.types.DefaultSnatStatus): + The desired status of whether to disable + default sNAT for this cluster. + desired_release_channel (google.cloud.container_v1.types.ReleaseChannel): + The desired release channel configuration. + desired_l4ilb_subsetting_config (google.cloud.container_v1.types.ILBSubsettingConfig): + The desired L4 Internal Load Balancer + Subsetting configuration. + desired_datapath_provider (google.cloud.container_v1.types.DatapathProvider): + The desired datapath provider for the + cluster. + desired_private_ipv6_google_access (google.cloud.container_v1.types.PrivateIPv6GoogleAccess): + The desired state of IPv6 connectivity to + Google Services. + desired_notification_config (google.cloud.container_v1.types.NotificationConfig): + The desired notification configuration. + desired_authenticator_groups_config (google.cloud.container_v1.types.AuthenticatorGroupsConfig): + The desired authenticator groups config for + the cluster. + desired_logging_config (google.cloud.container_v1.types.LoggingConfig): + The desired logging configuration. + desired_monitoring_config (google.cloud.container_v1.types.MonitoringConfig): + The desired monitoring configuration. + desired_identity_service_config (google.cloud.container_v1.types.IdentityServiceConfig): + The desired Identity Service component + configuration. + desired_service_external_ips_config (google.cloud.container_v1.types.ServiceExternalIPsConfig): + ServiceExternalIPsConfig specifies the config + for the use of Services with ExternalIPs field. + desired_enable_private_endpoint (bool): + Enable/Disable private endpoint for the + cluster's master. + + This field is a member of `oneof`_ ``_desired_enable_private_endpoint``. + desired_master_version (str): + The Kubernetes version to change the master + to. + Users may specify either explicit versions + offered by Kubernetes Engine or version aliases, + which have the following behavior: + + - "latest": picks the highest valid Kubernetes + version + - "1.X": picks the highest valid patch+gke.N + patch in the 1.X version + - "1.X.Y": picks the highest valid gke.N patch + in the 1.X.Y version + - "1.X.Y-gke.N": picks an explicit Kubernetes + version + - "-": picks the default Kubernetes version + desired_gcfs_config (google.cloud.container_v1.types.GcfsConfig): + The desired GCFS config for the cluster + desired_node_pool_auto_config_network_tags (google.cloud.container_v1.types.NetworkTags): + The desired network tags that apply to all + auto-provisioned node pools in autopilot + clusters and node auto-provisioning enabled + clusters. + desired_gateway_api_config (google.cloud.container_v1.types.GatewayAPIConfig): + The desired config of Gateway API on this + cluster. + etag (str): + The current etag of the cluster. + If an etag is provided and does not match the + current etag of the cluster, update will be + blocked and an ABORTED error will be returned. + desired_node_pool_logging_config (google.cloud.container_v1.types.NodePoolLoggingConfig): + The desired node pool logging configuration + defaults for the cluster. + desired_fleet (google.cloud.container_v1.types.Fleet): + The desired fleet configuration for the + cluster. + desired_stack_type (google.cloud.container_v1.types.StackType): + The desired stack type of the cluster. + If a stack type is provided and does not match + the current stack type of the cluster, update + will attempt to change the stack type to the new + type. + additional_pod_ranges_config (google.cloud.container_v1.types.AdditionalPodRangesConfig): + The additional pod ranges to be added to the + cluster. These pod ranges can be used by node + pools to allocate pod IPs. + removed_additional_pod_ranges_config (google.cloud.container_v1.types.AdditionalPodRangesConfig): + The additional pod ranges that are to be removed from the + cluster. The pod ranges specified here must have been + specified earlier in the 'additional_pod_ranges_config' + argument. + enable_k8s_beta_apis (google.cloud.container_v1.types.K8sBetaAPIConfig): + Kubernetes open source beta apis enabled on + the cluster. Only beta apis + desired_security_posture_config (google.cloud.container_v1.types.SecurityPostureConfig): + Enable/Disable Security Posture API features + for the cluster. + desired_network_performance_config (google.cloud.container_v1.types.NetworkConfig.ClusterNetworkPerformanceConfig): + The desired network performance config. + desired_enable_fqdn_network_policy (bool): + Enable/Disable FQDN Network Policy for the + cluster. + + This field is a member of `oneof`_ ``_desired_enable_fqdn_network_policy``. + desired_autopilot_workload_policy_config (google.cloud.container_v1.types.WorkloadPolicyConfig): + The desired workload policy configuration for + the autopilot cluster. + desired_k8s_beta_apis (google.cloud.container_v1.types.K8sBetaAPIConfig): + Desired Beta APIs to be enabled for cluster. + """ + + desired_node_version: str = proto.Field( + proto.STRING, + number=4, + ) + desired_monitoring_service: str = proto.Field( + proto.STRING, + number=5, + ) + desired_addons_config: "AddonsConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="AddonsConfig", + ) + desired_node_pool_id: str = proto.Field( + proto.STRING, + number=7, + ) + desired_image_type: str = proto.Field( + proto.STRING, + number=8, + ) + desired_database_encryption: "DatabaseEncryption" = proto.Field( + proto.MESSAGE, + number=46, + message="DatabaseEncryption", + ) + desired_workload_identity_config: "WorkloadIdentityConfig" = proto.Field( + proto.MESSAGE, + number=47, + message="WorkloadIdentityConfig", + ) + desired_mesh_certificates: "MeshCertificates" = proto.Field( + proto.MESSAGE, + number=67, + message="MeshCertificates", + ) + desired_shielded_nodes: "ShieldedNodes" = proto.Field( + proto.MESSAGE, + number=48, + message="ShieldedNodes", + ) + desired_cost_management_config: "CostManagementConfig" = proto.Field( + proto.MESSAGE, + number=49, + message="CostManagementConfig", + ) + desired_dns_config: "DNSConfig" = proto.Field( + proto.MESSAGE, + number=53, + message="DNSConfig", + ) + desired_node_pool_autoscaling: "NodePoolAutoscaling" = proto.Field( + proto.MESSAGE, + number=9, + message="NodePoolAutoscaling", + ) + desired_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + desired_master_authorized_networks_config: "MasterAuthorizedNetworksConfig" = ( + proto.Field( + proto.MESSAGE, + number=12, + message="MasterAuthorizedNetworksConfig", + ) + ) + desired_cluster_autoscaling: "ClusterAutoscaling" = proto.Field( + proto.MESSAGE, + number=15, + message="ClusterAutoscaling", + ) + desired_binary_authorization: "BinaryAuthorization" = proto.Field( + proto.MESSAGE, + number=16, + message="BinaryAuthorization", + ) + desired_logging_service: str = proto.Field( + proto.STRING, + number=19, + ) + desired_resource_usage_export_config: "ResourceUsageExportConfig" = proto.Field( + proto.MESSAGE, + number=21, + message="ResourceUsageExportConfig", + ) + desired_vertical_pod_autoscaling: "VerticalPodAutoscaling" = proto.Field( + proto.MESSAGE, + number=22, + message="VerticalPodAutoscaling", + ) + desired_private_cluster_config: "PrivateClusterConfig" = proto.Field( + proto.MESSAGE, + number=25, + message="PrivateClusterConfig", + ) + desired_intra_node_visibility_config: "IntraNodeVisibilityConfig" = proto.Field( + proto.MESSAGE, + number=26, + message="IntraNodeVisibilityConfig", + ) + desired_default_snat_status: "DefaultSnatStatus" = proto.Field( + proto.MESSAGE, + number=28, + message="DefaultSnatStatus", + ) + desired_release_channel: "ReleaseChannel" = proto.Field( + proto.MESSAGE, + number=31, + message="ReleaseChannel", + ) + desired_l4ilb_subsetting_config: "ILBSubsettingConfig" = proto.Field( + proto.MESSAGE, + number=39, + message="ILBSubsettingConfig", + ) + desired_datapath_provider: "DatapathProvider" = proto.Field( + proto.ENUM, + number=50, + enum="DatapathProvider", + ) + desired_private_ipv6_google_access: "PrivateIPv6GoogleAccess" = proto.Field( + proto.ENUM, + number=51, + enum="PrivateIPv6GoogleAccess", + ) + desired_notification_config: "NotificationConfig" = proto.Field( + proto.MESSAGE, + number=55, + message="NotificationConfig", + ) + desired_authenticator_groups_config: "AuthenticatorGroupsConfig" = proto.Field( + proto.MESSAGE, + number=63, + message="AuthenticatorGroupsConfig", + ) + desired_logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=64, + message="LoggingConfig", + ) + desired_monitoring_config: "MonitoringConfig" = proto.Field( + proto.MESSAGE, + number=65, + message="MonitoringConfig", + ) + desired_identity_service_config: "IdentityServiceConfig" = proto.Field( + proto.MESSAGE, + number=66, + message="IdentityServiceConfig", + ) + desired_service_external_ips_config: "ServiceExternalIPsConfig" = proto.Field( + proto.MESSAGE, + number=60, + message="ServiceExternalIPsConfig", + ) + desired_enable_private_endpoint: bool = proto.Field( + proto.BOOL, + number=71, + optional=True, + ) + desired_master_version: str = proto.Field( + proto.STRING, + number=100, + ) + desired_gcfs_config: "GcfsConfig" = proto.Field( + proto.MESSAGE, + number=109, + message="GcfsConfig", + ) + desired_node_pool_auto_config_network_tags: "NetworkTags" = proto.Field( + proto.MESSAGE, + number=110, + message="NetworkTags", + ) + desired_gateway_api_config: "GatewayAPIConfig" = proto.Field( + proto.MESSAGE, + number=114, + message="GatewayAPIConfig", + ) + etag: str = proto.Field( + proto.STRING, + number=115, + ) + desired_node_pool_logging_config: "NodePoolLoggingConfig" = proto.Field( + proto.MESSAGE, + number=116, + message="NodePoolLoggingConfig", + ) + desired_fleet: "Fleet" = proto.Field( + proto.MESSAGE, + number=117, + message="Fleet", + ) + desired_stack_type: "StackType" = proto.Field( + proto.ENUM, + number=119, + enum="StackType", + ) + additional_pod_ranges_config: "AdditionalPodRangesConfig" = proto.Field( + proto.MESSAGE, + number=120, + message="AdditionalPodRangesConfig", + ) + removed_additional_pod_ranges_config: "AdditionalPodRangesConfig" = proto.Field( + proto.MESSAGE, + number=121, + message="AdditionalPodRangesConfig", + ) + enable_k8s_beta_apis: "K8sBetaAPIConfig" = proto.Field( + proto.MESSAGE, + number=122, + message="K8sBetaAPIConfig", + ) + desired_security_posture_config: "SecurityPostureConfig" = proto.Field( + proto.MESSAGE, + number=124, + message="SecurityPostureConfig", + ) + desired_network_performance_config: "NetworkConfig.ClusterNetworkPerformanceConfig" = proto.Field( + proto.MESSAGE, + number=125, + message="NetworkConfig.ClusterNetworkPerformanceConfig", + ) + desired_enable_fqdn_network_policy: bool = proto.Field( + proto.BOOL, + number=126, + optional=True, + ) + desired_autopilot_workload_policy_config: "WorkloadPolicyConfig" = proto.Field( + proto.MESSAGE, + number=128, + message="WorkloadPolicyConfig", + ) + desired_k8s_beta_apis: "K8sBetaAPIConfig" = proto.Field( + proto.MESSAGE, + number=131, + message="K8sBetaAPIConfig", + ) + + +class AdditionalPodRangesConfig(proto.Message): + r"""AdditionalPodRangesConfig is the configuration for additional + pod secondary ranges supporting the ClusterUpdate message. + + Attributes: + pod_range_names (MutableSequence[str]): + Name for pod secondary ipv4 range which has + the actual range defined ahead. + pod_range_info (MutableSequence[google.cloud.container_v1.types.RangeInfo]): + Output only. [Output only] Information for additional pod + range. + """ + + pod_range_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + pod_range_info: MutableSequence["RangeInfo"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="RangeInfo", + ) + + +class RangeInfo(proto.Message): + r"""RangeInfo contains the range name and the range utilization + by this cluster. + + Attributes: + range_name (str): + Output only. [Output only] Name of a range. + utilization (float): + Output only. [Output only] The utilization of the range. + """ + + range_name: str = proto.Field( + proto.STRING, + number=1, + ) + utilization: float = proto.Field( + proto.DOUBLE, + number=2, + ) + + +class Operation(proto.Message): + r"""This operation resource represents operations that may have + happened or are happening on the cluster. All fields are output + only. + + Attributes: + name (str): + The server-assigned ID for the operation. + zone (str): + The name of the Google Compute Engine + `zone `__ + in which the operation is taking place. This field is + deprecated, use location instead. + operation_type (google.cloud.container_v1.types.Operation.Type): + The operation type. + status (google.cloud.container_v1.types.Operation.Status): + The current status of the operation. + detail (str): + Detailed operation progress, if available. + status_message (str): + Output only. If an error has occurred, a + textual description of the error. Deprecated. + Use the field error instead. + self_link (str): + Server-defined URI for the operation. Example: + ``https://container.googleapis.com/v1alpha1/projects/123/locations/us-central1/operations/operation-123``. + target_link (str): + Server-defined URI for the target of the operation. The + format of this is a URI to the resource being modified (such + as a cluster, node pool, or node). For node pool repairs, + there may be multiple nodes being repaired, but only one + will be the target. + + Examples: + + - + + ``https://container.googleapis.com/v1/projects/123/locations/us-central1/clusters/my-cluster`` + + ``https://container.googleapis.com/v1/projects/123/zones/us-central1-c/clusters/my-cluster/nodePools/my-np`` + + ``https://container.googleapis.com/v1/projects/123/zones/us-central1-c/clusters/my-cluster/nodePools/my-np/node/my-node`` + location (str): + [Output only] The name of the Google Compute Engine + `zone `__ + or + `region `__ + in which the cluster resides. + start_time (str): + [Output only] The time the operation started, in + `RFC3339 `__ text + format. + end_time (str): + [Output only] The time the operation completed, in + `RFC3339 `__ text + format. + progress (google.cloud.container_v1.types.OperationProgress): + Output only. [Output only] Progress information for an + operation. + cluster_conditions (MutableSequence[google.cloud.container_v1.types.StatusCondition]): + Which conditions caused the current cluster + state. Deprecated. Use field error instead. + nodepool_conditions (MutableSequence[google.cloud.container_v1.types.StatusCondition]): + Which conditions caused the current node pool + state. Deprecated. Use field error instead. + error (google.rpc.status_pb2.Status): + The error result of the operation in case of + failure. + """ + + class Status(proto.Enum): + r"""Current status of the operation. + + Values: + STATUS_UNSPECIFIED (0): + Not set. + PENDING (1): + The operation has been created. + RUNNING (2): + The operation is currently running. + DONE (3): + The operation is done, either cancelled or + completed. + ABORTING (4): + The operation is aborting. + """ + STATUS_UNSPECIFIED = 0 + PENDING = 1 + RUNNING = 2 + DONE = 3 + ABORTING = 4 + + class Type(proto.Enum): + r"""Operation type categorizes the operation. + + Values: + TYPE_UNSPECIFIED (0): + Not set. + CREATE_CLUSTER (1): + The cluster is being created. The cluster should be assumed + to be unusable until the operation finishes. + + In the event of the operation failing, the cluster will + enter the [ERROR state][Cluster.Status.ERROR] and eventually + be deleted. + DELETE_CLUSTER (2): + The cluster is being deleted. The cluster should be assumed + to be unusable as soon as this operation starts. + + In the event of the operation failing, the cluster will + enter the [ERROR state][Cluster.Status.ERROR] and the + deletion will be automatically retried until completed. + UPGRADE_MASTER (3): + The [cluster + version][google.container.v1.ClusterUpdate.desired_master_version] + is being updated. Note that this includes "upgrades" to the + same version, which are simply a recreation. This also + includes + `auto-upgrades `__. + For more details, see `documentation on cluster + upgrades `__. + UPGRADE_NODES (4): + A node pool is being updated. Despite calling this an + "upgrade", this includes most forms of updates to node + pools. This also includes + `auto-upgrades `__. + + This operation sets the + [progress][google.container.v1.Operation.progress] field and + may be + [canceled][google.container.v1.ClusterManager.CancelOperation]. + + The upgrade strategy depends on `node pool + configuration `__. + The nodes are generally still usable during this operation. + REPAIR_CLUSTER (5): + A problem has been detected with the control plane and is + being repaired. This operation type is initiated by GKE. For + more details, see `documentation on + repairs `__. + UPDATE_CLUSTER (6): + The cluster is being updated. This is a broad category of + operations and includes operations that only change metadata + as well as those that must recreate the entire cluster. If + the control plane must be recreated, this will cause + temporary downtime for zonal clusters. + + Some features require recreating the nodes as well. Those + will be recreated as separate operations and the update may + not be completely functional until the node pools + recreations finish. Node recreations will generally follow + `maintenance + policies `__. + + Some GKE-initiated operations use this type. This includes + certain types of auto-upgrades and incident mitigations. + CREATE_NODE_POOL (7): + A node pool is being created. The node pool should be + assumed to be unusable until this operation finishes. In the + event of an error, the node pool may be partially created. + + If enabled, `node + autoprovisioning `__ + may have automatically initiated such operations. + DELETE_NODE_POOL (8): + The node pool is being deleted. The node pool + should be assumed to be unusable as soon as this + operation starts. + SET_NODE_POOL_MANAGEMENT (9): + The node pool's + [manamagent][google.container.v1.NodePool.management] field + is being updated. These operations only update metadata and + may be concurrent with most other operations. + AUTO_REPAIR_NODES (10): + A problem has been detected with nodes and `they are being + repaired `__. + This operation type is initiated by GKE, typically + automatically. This operation may be concurrent with other + operations and there may be multiple repairs occurring on + the same node pool. + AUTO_UPGRADE_NODES (11): + Unused. Automatic node upgrade uses + [UPGRADE_NODES][google.container.v1.Operation.Type.UPGRADE_NODES]. + SET_LABELS (12): + Unused. Updating labels uses + [UPDATE_CLUSTER][google.container.v1.Operation.Type.UPDATE_CLUSTER]. + SET_MASTER_AUTH (13): + Unused. Updating master auth uses + [UPDATE_CLUSTER][google.container.v1.Operation.Type.UPDATE_CLUSTER]. + SET_NODE_POOL_SIZE (14): + The node pool is being resized. With the + exception of resizing to or from size zero, the + node pool is generally usable during this + operation. + SET_NETWORK_POLICY (15): + Unused. Updating network policy uses + [UPDATE_CLUSTER][google.container.v1.Operation.Type.UPDATE_CLUSTER]. + SET_MAINTENANCE_POLICY (16): + Unused. Updating maintenance policy uses + [UPDATE_CLUSTER][google.container.v1.Operation.Type.UPDATE_CLUSTER]. + RESIZE_CLUSTER (18): + The control plane is being resized. This operation type is + initiated by GKE. These operations are often performed + preemptively to ensure that the control plane has sufficient + resources and is not typically an indication of issues. For + more details, see `documentation on + resizes `__. + """ + TYPE_UNSPECIFIED = 0 + CREATE_CLUSTER = 1 + DELETE_CLUSTER = 2 + UPGRADE_MASTER = 3 + UPGRADE_NODES = 4 + REPAIR_CLUSTER = 5 + UPDATE_CLUSTER = 6 + CREATE_NODE_POOL = 7 + DELETE_NODE_POOL = 8 + SET_NODE_POOL_MANAGEMENT = 9 + AUTO_REPAIR_NODES = 10 + AUTO_UPGRADE_NODES = 11 + SET_LABELS = 12 + SET_MASTER_AUTH = 13 + SET_NODE_POOL_SIZE = 14 + SET_NETWORK_POLICY = 15 + SET_MAINTENANCE_POLICY = 16 + RESIZE_CLUSTER = 18 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + operation_type: Type = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) + status: Status = proto.Field( + proto.ENUM, + number=4, + enum=Status, + ) + detail: str = proto.Field( + proto.STRING, + number=8, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + self_link: str = proto.Field( + proto.STRING, + number=6, + ) + target_link: str = proto.Field( + proto.STRING, + number=7, + ) + location: str = proto.Field( + proto.STRING, + number=9, + ) + start_time: str = proto.Field( + proto.STRING, + number=10, + ) + end_time: str = proto.Field( + proto.STRING, + number=11, + ) + progress: "OperationProgress" = proto.Field( + proto.MESSAGE, + number=12, + message="OperationProgress", + ) + cluster_conditions: MutableSequence["StatusCondition"] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message="StatusCondition", + ) + nodepool_conditions: MutableSequence["StatusCondition"] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="StatusCondition", + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=15, + message=status_pb2.Status, + ) + + +class OperationProgress(proto.Message): + r"""Information about operation (or operation stage) progress. + + Attributes: + name (str): + A non-parameterized string describing an + operation stage. Unset for single-stage + operations. + status (google.cloud.container_v1.types.Operation.Status): + Status of an operation stage. + Unset for single-stage operations. + metrics (MutableSequence[google.cloud.container_v1.types.OperationProgress.Metric]): + Progress metric bundle, for example: metrics: [{name: "nodes + done", int_value: 15}, {name: "nodes total", int_value: 32}] + or metrics: [{name: "progress", double_value: 0.56}, {name: + "progress scale", double_value: 1.0}] + stages (MutableSequence[google.cloud.container_v1.types.OperationProgress]): + Substages of an operation or a stage. + """ + + class Metric(proto.Message): + r"""Progress metric is (string, int|float|string) pair. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. Metric name, e.g., "nodes total", + "percent done". + int_value (int): + For metrics with integer value. + + This field is a member of `oneof`_ ``value``. + double_value (float): + For metrics with floating point value. + + This field is a member of `oneof`_ ``value``. + string_value (str): + For metrics with custom values (ratios, + visual progress, etc.). + + This field is a member of `oneof`_ ``value``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + int_value: int = proto.Field( + proto.INT64, + number=2, + oneof="value", + ) + double_value: float = proto.Field( + proto.DOUBLE, + number=3, + oneof="value", + ) + string_value: str = proto.Field( + proto.STRING, + number=4, + oneof="value", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + status: "Operation.Status" = proto.Field( + proto.ENUM, + number=2, + enum="Operation.Status", + ) + metrics: MutableSequence[Metric] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Metric, + ) + stages: MutableSequence["OperationProgress"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="OperationProgress", + ) + + +class CreateClusterRequest(proto.Message): + r"""CreateClusterRequest creates a cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the parent + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the parent field. + cluster (google.cloud.container_v1.types.Cluster): + Required. A `cluster + resource `__ + parent (str): + The parent (project and location) where the cluster will be + created. Specified in the format ``projects/*/locations/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster: "Cluster" = proto.Field( + proto.MESSAGE, + number=3, + message="Cluster", + ) + parent: str = proto.Field( + proto.STRING, + number=5, + ) + + +class GetClusterRequest(proto.Message): + r"""GetClusterRequest gets the settings of a cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + retrieve. This field has been deprecated and + replaced by the name field. + name (str): + The name (project, location, cluster) of the cluster to + retrieve. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( + proto.STRING, + number=5, + ) + + +class UpdateClusterRequest(proto.Message): + r"""UpdateClusterRequest updates the settings of a cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + upgrade. This field has been deprecated and + replaced by the name field. + update (google.cloud.container_v1.types.ClusterUpdate): + Required. A description of the update. + name (str): + The name (project, location, cluster) of the cluster to + update. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + update: "ClusterUpdate" = proto.Field( + proto.MESSAGE, + number=4, + message="ClusterUpdate", + ) + name: str = proto.Field( + proto.STRING, + number=5, + ) + + +class UpdateNodePoolRequest(proto.Message): + r"""UpdateNodePoolRequests update a node pool's image and/or + version. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + upgrade. This field has been deprecated and + replaced by the name field. + node_pool_id (str): + Deprecated. The name of the node pool to + upgrade. This field has been deprecated and + replaced by the name field. + node_version (str): + Required. The Kubernetes version to change + the nodes to (typically an upgrade). + + Users may specify either explicit versions + offered by Kubernetes Engine or version aliases, + which have the following behavior: + + - "latest": picks the highest valid Kubernetes + version + - "1.X": picks the highest valid patch+gke.N + patch in the 1.X version + - "1.X.Y": picks the highest valid gke.N patch + in the 1.X.Y version + - "1.X.Y-gke.N": picks an explicit Kubernetes + version + - "-": picks the Kubernetes master version + image_type (str): + Required. The desired image type for the node + pool. Please see + https://cloud.google.com/kubernetes-engine/docs/concepts/node-images + for available image types. + name (str): + The name (project, location, cluster, node pool) of the node + pool to update. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + locations (MutableSequence[str]): + The desired list of Google Compute Engine + `zones `__ + in which the node pool's nodes should be located. Changing + the locations for a node pool will result in nodes being + either created or removed from the node pool, depending on + whether locations are being added or removed. + workload_metadata_config (google.cloud.container_v1.types.WorkloadMetadataConfig): + The desired workload metadata config for the + node pool. + upgrade_settings (google.cloud.container_v1.types.NodePool.UpgradeSettings): + Upgrade settings control disruption and speed + of the upgrade. + tags (google.cloud.container_v1.types.NetworkTags): + The desired network tags to be applied to all nodes in the + node pool. If this field is not present, the tags will not + be changed. Otherwise, the existing network tags will be + *replaced* with the provided tags. + taints (google.cloud.container_v1.types.NodeTaints): + The desired node taints to be applied to all nodes in the + node pool. If this field is not present, the taints will not + be changed. Otherwise, the existing node taints will be + *replaced* with the provided taints. + labels (google.cloud.container_v1.types.NodeLabels): + The desired node labels to be applied to all nodes in the + node pool. If this field is not present, the labels will not + be changed. Otherwise, the existing node labels will be + *replaced* with the provided labels. + linux_node_config (google.cloud.container_v1.types.LinuxNodeConfig): + Parameters that can be configured on Linux + nodes. + kubelet_config (google.cloud.container_v1.types.NodeKubeletConfig): + Node kubelet configs. + node_network_config (google.cloud.container_v1.types.NodeNetworkConfig): + Node network config. + gcfs_config (google.cloud.container_v1.types.GcfsConfig): + GCFS config. + confidential_nodes (google.cloud.container_v1.types.ConfidentialNodes): + Confidential nodes config. + All the nodes in the node pool will be + Confidential VM once enabled. + gvnic (google.cloud.container_v1.types.VirtualNIC): + Enable or disable gvnic on the node pool. + etag (str): + The current etag of the node pool. + If an etag is provided and does not match the + current etag of the node pool, update will be + blocked and an ABORTED error will be returned. + fast_socket (google.cloud.container_v1.types.FastSocket): + Enable or disable NCCL fast socket for the + node pool. + logging_config (google.cloud.container_v1.types.NodePoolLoggingConfig): + Logging configuration. + resource_labels (google.cloud.container_v1.types.ResourceLabels): + The resource labels for the node pool to use + to annotate any related Google Compute Engine + resources. + windows_node_config (google.cloud.container_v1.types.WindowsNodeConfig): + Parameters that can be configured on Windows + nodes. + machine_type (str): + Optional. The desired `Google Compute Engine machine + type `__ + for nodes in the node pool. Initiates an upgrade operation + that migrates the nodes in the node pool to the specified + machine type. + disk_type (str): + Optional. The desired disk type (e.g. + 'pd-standard', 'pd-ssd' or 'pd-balanced') for + nodes in the node pool. Initiates an upgrade + operation that migrates the nodes in the node + pool to the specified disk type. + disk_size_gb (int): + Optional. The desired disk size for nodes in + the node pool specified in GB. The smallest + allowed disk size is 10GB. Initiates an upgrade + operation that migrates the nodes in the node + pool to the specified disk size. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=4, + ) + node_version: str = proto.Field( + proto.STRING, + number=5, + ) + image_type: str = proto.Field( + proto.STRING, + number=6, + ) + name: str = proto.Field( + proto.STRING, + number=8, + ) + locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + workload_metadata_config: "WorkloadMetadataConfig" = proto.Field( + proto.MESSAGE, + number=14, + message="WorkloadMetadataConfig", + ) + upgrade_settings: "NodePool.UpgradeSettings" = proto.Field( + proto.MESSAGE, + number=15, + message="NodePool.UpgradeSettings", + ) + tags: "NetworkTags" = proto.Field( + proto.MESSAGE, + number=16, + message="NetworkTags", + ) + taints: "NodeTaints" = proto.Field( + proto.MESSAGE, + number=17, + message="NodeTaints", + ) + labels: "NodeLabels" = proto.Field( + proto.MESSAGE, + number=18, + message="NodeLabels", + ) + linux_node_config: "LinuxNodeConfig" = proto.Field( + proto.MESSAGE, + number=19, + message="LinuxNodeConfig", + ) + kubelet_config: "NodeKubeletConfig" = proto.Field( + proto.MESSAGE, + number=20, + message="NodeKubeletConfig", + ) + node_network_config: "NodeNetworkConfig" = proto.Field( + proto.MESSAGE, + number=21, + message="NodeNetworkConfig", + ) + gcfs_config: "GcfsConfig" = proto.Field( + proto.MESSAGE, + number=22, + message="GcfsConfig", + ) + confidential_nodes: "ConfidentialNodes" = proto.Field( + proto.MESSAGE, + number=23, + message="ConfidentialNodes", + ) + gvnic: "VirtualNIC" = proto.Field( + proto.MESSAGE, + number=29, + message="VirtualNIC", + ) + etag: str = proto.Field( + proto.STRING, + number=30, + ) + fast_socket: "FastSocket" = proto.Field( + proto.MESSAGE, + number=31, + message="FastSocket", + ) + logging_config: "NodePoolLoggingConfig" = proto.Field( + proto.MESSAGE, + number=32, + message="NodePoolLoggingConfig", + ) + resource_labels: "ResourceLabels" = proto.Field( + proto.MESSAGE, + number=33, + message="ResourceLabels", + ) + windows_node_config: "WindowsNodeConfig" = proto.Field( + proto.MESSAGE, + number=34, + message="WindowsNodeConfig", + ) + machine_type: str = proto.Field( + proto.STRING, + number=36, + ) + disk_type: str = proto.Field( + proto.STRING, + number=37, + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=38, + ) + + +class SetNodePoolAutoscalingRequest(proto.Message): + r"""SetNodePoolAutoscalingRequest sets the autoscaler settings of + a node pool. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + upgrade. This field has been deprecated and + replaced by the name field. + node_pool_id (str): + Deprecated. The name of the node pool to + upgrade. This field has been deprecated and + replaced by the name field. + autoscaling (google.cloud.container_v1.types.NodePoolAutoscaling): + Required. Autoscaling configuration for the + node pool. + name (str): + The name (project, location, cluster, node pool) of the node + pool to set autoscaler settings. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=4, + ) + autoscaling: "NodePoolAutoscaling" = proto.Field( + proto.MESSAGE, + number=5, + message="NodePoolAutoscaling", + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class SetLoggingServiceRequest(proto.Message): + r"""SetLoggingServiceRequest sets the logging service of a + cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + upgrade. This field has been deprecated and + replaced by the name field. + logging_service (str): + Required. The logging service the cluster should use to + write logs. Currently available options: + + - ``logging.googleapis.com/kubernetes`` - The Cloud Logging + service with a Kubernetes-native resource model + - ``logging.googleapis.com`` - The legacy Cloud Logging + service (no longer available as of GKE 1.15). + - ``none`` - no logs will be exported from the cluster. + + If left as an empty + string,\ ``logging.googleapis.com/kubernetes`` will be used + for GKE 1.14+ or ``logging.googleapis.com`` for earlier + versions. + name (str): + The name (project, location, cluster) of the cluster to set + logging. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + logging_service: str = proto.Field( + proto.STRING, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=5, + ) + + +class SetMonitoringServiceRequest(proto.Message): + r"""SetMonitoringServiceRequest sets the monitoring service of a + cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + upgrade. This field has been deprecated and + replaced by the name field. + monitoring_service (str): + Required. The monitoring service the cluster should use to + write metrics. Currently available options: + + - "monitoring.googleapis.com/kubernetes" - The Cloud + Monitoring service with a Kubernetes-native resource + model + - ``monitoring.googleapis.com`` - The legacy Cloud + Monitoring service (no longer available as of GKE 1.15). + - ``none`` - No metrics will be exported from the cluster. + + If left as an empty + string,\ ``monitoring.googleapis.com/kubernetes`` will be + used for GKE 1.14+ or ``monitoring.googleapis.com`` for + earlier versions. + name (str): + The name (project, location, cluster) of the cluster to set + monitoring. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + monitoring_service: str = proto.Field( + proto.STRING, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class SetAddonsConfigRequest(proto.Message): + r"""SetAddonsConfigRequest sets the addons associated with the + cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + upgrade. This field has been deprecated and + replaced by the name field. + addons_config (google.cloud.container_v1.types.AddonsConfig): + Required. The desired configurations for the + various addons available to run in the cluster. + name (str): + The name (project, location, cluster) of the cluster to set + addons. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + addons_config: "AddonsConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="AddonsConfig", + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class SetLocationsRequest(proto.Message): + r"""SetLocationsRequest sets the locations of the cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + upgrade. This field has been deprecated and + replaced by the name field. + locations (MutableSequence[str]): + Required. The desired list of Google Compute Engine + `zones `__ + in which the cluster's nodes should be located. Changing the + locations a cluster is in will result in nodes being either + created or removed from the cluster, depending on whether + locations are being added or removed. + + This list must always include the cluster's primary zone. + name (str): + The name (project, location, cluster) of the cluster to set + locations. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class UpdateMasterRequest(proto.Message): + r"""UpdateMasterRequest updates the master of the cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + upgrade. This field has been deprecated and + replaced by the name field. + master_version (str): + Required. The Kubernetes version to change + the master to. + Users may specify either explicit versions + offered by Kubernetes Engine or version aliases, + which have the following behavior: + + - "latest": picks the highest valid Kubernetes + version + - "1.X": picks the highest valid patch+gke.N + patch in the 1.X version + - "1.X.Y": picks the highest valid gke.N patch + in the 1.X.Y version + - "1.X.Y-gke.N": picks an explicit Kubernetes + version + - "-": picks the default Kubernetes version + name (str): + The name (project, location, cluster) of the cluster to + update. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + master_version: str = proto.Field( + proto.STRING, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class SetMasterAuthRequest(proto.Message): + r"""SetMasterAuthRequest updates the admin password of a cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + upgrade. This field has been deprecated and + replaced by the name field. + action (google.cloud.container_v1.types.SetMasterAuthRequest.Action): + Required. The exact form of action to be + taken on the master auth. + update (google.cloud.container_v1.types.MasterAuth): + Required. A description of the update. + name (str): + The name (project, location, cluster) of the cluster to set + auth. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + class Action(proto.Enum): + r"""Operation type: what type update to perform. + + Values: + UNKNOWN (0): + Operation is unknown and will error out. + SET_PASSWORD (1): + Set the password to a user generated value. + GENERATE_PASSWORD (2): + Generate a new password and set it to that. + SET_USERNAME (3): + Set the username. If an empty username is + provided, basic authentication is disabled for + the cluster. If a non-empty username is + provided, basic authentication is enabled, with + either a provided password or a generated one. + """ + UNKNOWN = 0 + SET_PASSWORD = 1 + GENERATE_PASSWORD = 2 + SET_USERNAME = 3 + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + action: Action = proto.Field( + proto.ENUM, + number=4, + enum=Action, + ) + update: "MasterAuth" = proto.Field( + proto.MESSAGE, + number=5, + message="MasterAuth", + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class DeleteClusterRequest(proto.Message): + r"""DeleteClusterRequest deletes a cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + delete. This field has been deprecated and + replaced by the name field. + name (str): + The name (project, location, cluster) of the cluster to + delete. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListClustersRequest(proto.Message): + r"""ListClustersRequest lists clusters. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the parent + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides, or "-" for all zones. This + field has been deprecated and replaced by the parent field. + parent (str): + The parent (project and location) where the clusters will be + listed. Specified in the format ``projects/*/locations/*``. + Location "-" matches all zones and all regions. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + parent: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListClustersResponse(proto.Message): + r"""ListClustersResponse is the result of ListClustersRequest. + + Attributes: + clusters (MutableSequence[google.cloud.container_v1.types.Cluster]): + A list of clusters in the project in the + specified zone, or across all ones. + missing_zones (MutableSequence[str]): + If any zones are listed here, the list of + clusters returned may be missing those zones. + """ + + clusters: MutableSequence["Cluster"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Cluster", + ) + missing_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class GetOperationRequest(proto.Message): + r"""GetOperationRequest gets a single operation. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + operation_id (str): + Deprecated. The server-assigned ``name`` of the operation. + This field has been deprecated and replaced by the name + field. + name (str): + The name (project, location, operation id) of the operation + to get. Specified in the format + ``projects/*/locations/*/operations/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + operation_id: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListOperationsRequest(proto.Message): + r"""ListOperationsRequest lists operations. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the parent + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + to return operations for, or ``-`` for all zones. This field + has been deprecated and replaced by the parent field. + parent (str): + The parent (project and location) where the operations will + be listed. Specified in the format + ``projects/*/locations/*``. Location "-" matches all zones + and all regions. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + parent: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CancelOperationRequest(proto.Message): + r"""CancelOperationRequest cancels a single operation. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the operation resides. This field has been + deprecated and replaced by the name field. + operation_id (str): + Deprecated. The server-assigned ``name`` of the operation. + This field has been deprecated and replaced by the name + field. + name (str): + The name (project, location, operation id) of the operation + to cancel. Specified in the format + ``projects/*/locations/*/operations/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + operation_id: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListOperationsResponse(proto.Message): + r"""ListOperationsResponse is the result of + ListOperationsRequest. + + Attributes: + operations (MutableSequence[google.cloud.container_v1.types.Operation]): + A list of operations in the project in the + specified zone. + missing_zones (MutableSequence[str]): + If any zones are listed here, the list of + operations returned may be missing the + operations from those zones. + """ + + operations: MutableSequence["Operation"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Operation", + ) + missing_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class GetServerConfigRequest(proto.Message): + r"""Gets the current Kubernetes Engine service configuration. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + to return operations for. This field has been deprecated and + replaced by the name field. + name (str): + The name (project and location) of the server config to get, + specified in the format ``projects/*/locations/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + name: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ServerConfig(proto.Message): + r"""Kubernetes Engine service configuration. + + Attributes: + default_cluster_version (str): + Version of Kubernetes the service deploys by + default. + valid_node_versions (MutableSequence[str]): + List of valid node upgrade target versions, + in descending order. + default_image_type (str): + Default image type. + valid_image_types (MutableSequence[str]): + List of valid image types. + valid_master_versions (MutableSequence[str]): + List of valid master versions, in descending + order. + channels (MutableSequence[google.cloud.container_v1.types.ServerConfig.ReleaseChannelConfig]): + List of release channel configurations. + """ + + class ReleaseChannelConfig(proto.Message): + r"""ReleaseChannelConfig exposes configuration for a release + channel. + + Attributes: + channel (google.cloud.container_v1.types.ReleaseChannel.Channel): + The release channel this configuration + applies to. + default_version (str): + The default version for newly created + clusters on the channel. + valid_versions (MutableSequence[str]): + List of valid versions for the channel. + """ + + channel: "ReleaseChannel.Channel" = proto.Field( + proto.ENUM, + number=1, + enum="ReleaseChannel.Channel", + ) + default_version: str = proto.Field( + proto.STRING, + number=2, + ) + valid_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + default_cluster_version: str = proto.Field( + proto.STRING, + number=1, + ) + valid_node_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + default_image_type: str = proto.Field( + proto.STRING, + number=4, + ) + valid_image_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + valid_master_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + channels: MutableSequence[ReleaseChannelConfig] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=ReleaseChannelConfig, + ) + + +class CreateNodePoolRequest(proto.Message): + r"""CreateNodePoolRequest creates a node pool for a cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the parent + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the parent field. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and replaced by + the parent field. + node_pool (google.cloud.container_v1.types.NodePool): + Required. The node pool to create. + parent (str): + The parent (project, location, cluster name) where the node + pool will be created. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool: "NodePool" = proto.Field( + proto.MESSAGE, + number=4, + message="NodePool", + ) + parent: str = proto.Field( + proto.STRING, + number=6, + ) + + +class DeleteNodePoolRequest(proto.Message): + r"""DeleteNodePoolRequest deletes a node pool for a cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and replaced by + the name field. + node_pool_id (str): + Deprecated. The name of the node pool to + delete. This field has been deprecated and + replaced by the name field. + name (str): + The name (project, location, cluster, node pool id) of the + node pool to delete. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class ListNodePoolsRequest(proto.Message): + r"""ListNodePoolsRequest lists the node pool(s) for a cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the parent + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the parent field. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and replaced by + the parent field. + parent (str): + The parent (project, location, cluster name) where the node + pools will be listed. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + parent: str = proto.Field( + proto.STRING, + number=5, + ) + + +class GetNodePoolRequest(proto.Message): + r"""GetNodePoolRequest retrieves a node pool for a cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and replaced by + the name field. + node_pool_id (str): + Deprecated. The name of the node pool. + This field has been deprecated and replaced by + the name field. + name (str): + The name (project, location, cluster, node pool id) of the + node pool to get. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class BlueGreenSettings(proto.Message): + r"""Settings for blue-green upgrade. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + standard_rollout_policy (google.cloud.container_v1.types.BlueGreenSettings.StandardRolloutPolicy): + Standard policy for the blue-green upgrade. + + This field is a member of `oneof`_ ``rollout_policy``. + node_pool_soak_duration (google.protobuf.duration_pb2.Duration): + Time needed after draining entire blue pool. + After this period, blue pool will be cleaned up. + + This field is a member of `oneof`_ ``_node_pool_soak_duration``. + """ + + class StandardRolloutPolicy(proto.Message): + r"""Standard rollout policy is the default policy for blue-green. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + batch_percentage (float): + Percentage of the blue pool nodes to drain in a batch. The + range of this field should be (0.0, 1.0]. + + This field is a member of `oneof`_ ``update_batch_size``. + batch_node_count (int): + Number of blue nodes to drain in a batch. + + This field is a member of `oneof`_ ``update_batch_size``. + batch_soak_duration (google.protobuf.duration_pb2.Duration): + Soak time after each batch gets drained. + Default to zero. + + This field is a member of `oneof`_ ``_batch_soak_duration``. + """ + + batch_percentage: float = proto.Field( + proto.FLOAT, + number=1, + oneof="update_batch_size", + ) + batch_node_count: int = proto.Field( + proto.INT32, + number=2, + oneof="update_batch_size", + ) + batch_soak_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=duration_pb2.Duration, + ) + + standard_rollout_policy: StandardRolloutPolicy = proto.Field( + proto.MESSAGE, + number=1, + oneof="rollout_policy", + message=StandardRolloutPolicy, + ) + node_pool_soak_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=duration_pb2.Duration, + ) + + +class NodePool(proto.Message): + r"""NodePool contains the name and configuration for a cluster's + node pool. Node pools are a set of nodes (i.e. VM's), with a + common configuration and specification, under the control of the + cluster master. They may have a set of Kubernetes labels applied + to them, which may be used to reference them during pod + scheduling. They may also be resized up or down, to accommodate + the workload. + + Attributes: + name (str): + The name of the node pool. + config (google.cloud.container_v1.types.NodeConfig): + The node configuration of the pool. + initial_node_count (int): + The initial node count for the pool. You must ensure that + your Compute Engine `resource + quota `__ is + sufficient for this number of instances. You must also have + available firewall and routes quota. + locations (MutableSequence[str]): + The list of Google Compute Engine + `zones `__ + in which the NodePool's nodes should be located. + + If this value is unspecified during node pool creation, the + `Cluster.Locations `__ + value will be used, instead. + + Warning: changing node pool locations will result in nodes + being added and/or removed. + network_config (google.cloud.container_v1.types.NodeNetworkConfig): + Networking configuration for this NodePool. + If specified, it overrides the cluster-level + defaults. + self_link (str): + [Output only] Server-defined URL for the resource. + version (str): + The version of Kubernetes running on this NodePool's nodes. + If unspecified, it defaults as described + `here `__. + instance_group_urls (MutableSequence[str]): + [Output only] The resource URLs of the `managed instance + groups `__ + associated with this node pool. During the node pool + blue-green upgrade operation, the URLs contain both blue and + green resources. + status (google.cloud.container_v1.types.NodePool.Status): + [Output only] The status of the nodes in this pool instance. + status_message (str): + [Output only] Deprecated. Use conditions instead. Additional + information about the current status of this node pool + instance, if available. + autoscaling (google.cloud.container_v1.types.NodePoolAutoscaling): + Autoscaler configuration for this NodePool. + Autoscaler is enabled only if a valid + configuration is present. + management (google.cloud.container_v1.types.NodeManagement): + NodeManagement configuration for this + NodePool. + max_pods_constraint (google.cloud.container_v1.types.MaxPodsConstraint): + The constraint on the maximum number of pods + that can be run simultaneously on a node in the + node pool. + conditions (MutableSequence[google.cloud.container_v1.types.StatusCondition]): + Which conditions caused the current node pool + state. + pod_ipv4_cidr_size (int): + [Output only] The pod CIDR block size per node in this node + pool. + upgrade_settings (google.cloud.container_v1.types.NodePool.UpgradeSettings): + Upgrade settings control disruption and speed + of the upgrade. + placement_policy (google.cloud.container_v1.types.NodePool.PlacementPolicy): + Specifies the node placement policy. + update_info (google.cloud.container_v1.types.NodePool.UpdateInfo): + Output only. [Output only] Update info contains relevant + information during a node pool update. + etag (str): + This checksum is computed by the server based + on the value of node pool fields, and may be + sent on update requests to ensure the client has + an up-to-date value before proceeding. + best_effort_provisioning (google.cloud.container_v1.types.BestEffortProvisioning): + Enable best effort provisioning for nodes + """ + + class Status(proto.Enum): + r"""The current status of the node pool instance. + + Values: + STATUS_UNSPECIFIED (0): + Not set. + PROVISIONING (1): + The PROVISIONING state indicates the node + pool is being created. + RUNNING (2): + The RUNNING state indicates the node pool has + been created and is fully usable. + RUNNING_WITH_ERROR (3): + The RUNNING_WITH_ERROR state indicates the node pool has + been created and is partially usable. Some error state has + occurred and some functionality may be impaired. Customer + may need to reissue a request or trigger a new update. + RECONCILING (4): + The RECONCILING state indicates that some work is actively + being done on the node pool, such as upgrading node + software. Details can be found in the ``statusMessage`` + field. + STOPPING (5): + The STOPPING state indicates the node pool is + being deleted. + ERROR (6): + The ERROR state indicates the node pool may be unusable. + Details can be found in the ``statusMessage`` field. + """ + STATUS_UNSPECIFIED = 0 + PROVISIONING = 1 + RUNNING = 2 + RUNNING_WITH_ERROR = 3 + RECONCILING = 4 + STOPPING = 5 + ERROR = 6 + + class UpgradeSettings(proto.Message): + r"""These upgrade settings control the level of parallelism and the + level of disruption caused by an upgrade. + + maxUnavailable controls the number of nodes that can be + simultaneously unavailable. + + maxSurge controls the number of additional nodes that can be added + to the node pool temporarily for the time of the upgrade to increase + the number of available nodes. + + (maxUnavailable + maxSurge) determines the level of parallelism (how + many nodes are being upgraded at the same time). + + Note: upgrades inevitably introduce some disruption since workloads + need to be moved from old nodes to new, upgraded ones. Even if + maxUnavailable=0, this holds true. (Disruption stays within the + limits of PodDisruptionBudget, if it is configured.) + + Consider a hypothetical node pool with 5 nodes having maxSurge=2, + maxUnavailable=1. This means the upgrade process upgrades 3 nodes + simultaneously. It creates 2 additional (upgraded) nodes, then it + brings down 3 old (not yet upgraded) nodes at the same time. This + ensures that there are always at least 4 nodes available. + + These upgrade settings configure the upgrade strategy for the node + pool. Use strategy to switch between the strategies applied to the + node pool. + + If the strategy is ROLLING, use max_surge and max_unavailable to + control the level of parallelism and the level of disruption caused + by upgrade. + + 1. maxSurge controls the number of additional nodes that can be + added to the node pool temporarily for the time of the upgrade to + increase the number of available nodes. + 2. maxUnavailable controls the number of nodes that can be + simultaneously unavailable. + 3. (maxUnavailable + maxSurge) determines the level of parallelism + (how many nodes are being upgraded at the same time). + + If the strategy is BLUE_GREEN, use blue_green_settings to configure + the blue-green upgrade related settings. + + 1. standard_rollout_policy is the default policy. The policy is used + to control the way blue pool gets drained. The draining is + executed in the batch mode. The batch size could be specified as + either percentage of the node pool size or the number of nodes. + batch_soak_duration is the soak time after each batch gets + drained. + 2. node_pool_soak_duration is the soak time after all blue nodes are + drained. After this period, the blue pool nodes will be deleted. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + max_surge (int): + The maximum number of nodes that can be + created beyond the current size of the node pool + during the upgrade process. + max_unavailable (int): + The maximum number of nodes that can be + simultaneously unavailable during the upgrade + process. A node is considered available if its + status is Ready. + strategy (google.cloud.container_v1.types.NodePoolUpdateStrategy): + Update strategy of the node pool. + + This field is a member of `oneof`_ ``_strategy``. + blue_green_settings (google.cloud.container_v1.types.BlueGreenSettings): + Settings for blue-green upgrade strategy. + + This field is a member of `oneof`_ ``_blue_green_settings``. + """ + + max_surge: int = proto.Field( + proto.INT32, + number=1, + ) + max_unavailable: int = proto.Field( + proto.INT32, + number=2, + ) + strategy: "NodePoolUpdateStrategy" = proto.Field( + proto.ENUM, + number=3, + optional=True, + enum="NodePoolUpdateStrategy", + ) + blue_green_settings: "BlueGreenSettings" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="BlueGreenSettings", + ) + + class UpdateInfo(proto.Message): + r"""UpdateInfo contains resource (instance groups, etc), status + and other intermediate information relevant to a node pool + upgrade. + + Attributes: + blue_green_info (google.cloud.container_v1.types.NodePool.UpdateInfo.BlueGreenInfo): + Information of a blue-green upgrade. + """ + + class BlueGreenInfo(proto.Message): + r"""Information relevant to blue-green upgrade. + + Attributes: + phase (google.cloud.container_v1.types.NodePool.UpdateInfo.BlueGreenInfo.Phase): + Current blue-green upgrade phase. + blue_instance_group_urls (MutableSequence[str]): + The resource URLs of the [managed instance groups] + (/compute/docs/instance-groups/creating-groups-of-managed-instances) + associated with blue pool. + green_instance_group_urls (MutableSequence[str]): + The resource URLs of the [managed instance groups] + (/compute/docs/instance-groups/creating-groups-of-managed-instances) + associated with green pool. + blue_pool_deletion_start_time (str): + Time to start deleting blue pool to complete blue-green + upgrade, in + `RFC3339 `__ text + format. + green_pool_version (str): + Version of green pool. + """ + + class Phase(proto.Enum): + r"""Phase represents the different stages blue-green upgrade is + running in. + + Values: + PHASE_UNSPECIFIED (0): + Unspecified phase. + UPDATE_STARTED (1): + blue-green upgrade has been initiated. + CREATING_GREEN_POOL (2): + Start creating green pool nodes. + CORDONING_BLUE_POOL (3): + Start cordoning blue pool nodes. + DRAINING_BLUE_POOL (4): + Start draining blue pool nodes. + NODE_POOL_SOAKING (5): + Start soaking time after draining entire blue + pool. + DELETING_BLUE_POOL (6): + Start deleting blue nodes. + ROLLBACK_STARTED (7): + Rollback has been initiated. + """ + PHASE_UNSPECIFIED = 0 + UPDATE_STARTED = 1 + CREATING_GREEN_POOL = 2 + CORDONING_BLUE_POOL = 3 + DRAINING_BLUE_POOL = 4 + NODE_POOL_SOAKING = 5 + DELETING_BLUE_POOL = 6 + ROLLBACK_STARTED = 7 + + phase: "NodePool.UpdateInfo.BlueGreenInfo.Phase" = proto.Field( + proto.ENUM, + number=1, + enum="NodePool.UpdateInfo.BlueGreenInfo.Phase", + ) + blue_instance_group_urls: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + green_instance_group_urls: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + blue_pool_deletion_start_time: str = proto.Field( + proto.STRING, + number=4, + ) + green_pool_version: str = proto.Field( + proto.STRING, + number=5, + ) + + blue_green_info: "NodePool.UpdateInfo.BlueGreenInfo" = proto.Field( + proto.MESSAGE, + number=1, + message="NodePool.UpdateInfo.BlueGreenInfo", + ) + + class PlacementPolicy(proto.Message): + r"""PlacementPolicy defines the placement policy used by the node + pool. + + Attributes: + type_ (google.cloud.container_v1.types.NodePool.PlacementPolicy.Type): + The type of placement. + tpu_topology (str): + Optional. TPU placement topology for pod slice node pool. + https://cloud.google.com/tpu/docs/types-topologies#tpu_topologies + policy_name (str): + If set, refers to the name of a custom + resource policy supplied by the user. The + resource policy must be in the same project and + region as the node pool. If not found, + InvalidArgument error is returned. + """ + + class Type(proto.Enum): + r"""Type defines the type of placement policy. + + Values: + TYPE_UNSPECIFIED (0): + TYPE_UNSPECIFIED specifies no requirements on nodes + placement. + COMPACT (1): + COMPACT specifies node placement in the same + availability domain to ensure low communication + latency. + """ + TYPE_UNSPECIFIED = 0 + COMPACT = 1 + + type_: "NodePool.PlacementPolicy.Type" = proto.Field( + proto.ENUM, + number=1, + enum="NodePool.PlacementPolicy.Type", + ) + tpu_topology: str = proto.Field( + proto.STRING, + number=2, + ) + policy_name: str = proto.Field( + proto.STRING, + number=3, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + config: "NodeConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="NodeConfig", + ) + initial_node_count: int = proto.Field( + proto.INT32, + number=3, + ) + locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + network_config: "NodeNetworkConfig" = proto.Field( + proto.MESSAGE, + number=14, + message="NodeNetworkConfig", + ) + self_link: str = proto.Field( + proto.STRING, + number=100, + ) + version: str = proto.Field( + proto.STRING, + number=101, + ) + instance_group_urls: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=102, + ) + status: Status = proto.Field( + proto.ENUM, + number=103, + enum=Status, + ) + status_message: str = proto.Field( + proto.STRING, + number=104, + ) + autoscaling: "NodePoolAutoscaling" = proto.Field( + proto.MESSAGE, + number=4, + message="NodePoolAutoscaling", + ) + management: "NodeManagement" = proto.Field( + proto.MESSAGE, + number=5, + message="NodeManagement", + ) + max_pods_constraint: "MaxPodsConstraint" = proto.Field( + proto.MESSAGE, + number=6, + message="MaxPodsConstraint", + ) + conditions: MutableSequence["StatusCondition"] = proto.RepeatedField( + proto.MESSAGE, + number=105, + message="StatusCondition", + ) + pod_ipv4_cidr_size: int = proto.Field( + proto.INT32, + number=7, + ) + upgrade_settings: UpgradeSettings = proto.Field( + proto.MESSAGE, + number=107, + message=UpgradeSettings, + ) + placement_policy: PlacementPolicy = proto.Field( + proto.MESSAGE, + number=108, + message=PlacementPolicy, + ) + update_info: UpdateInfo = proto.Field( + proto.MESSAGE, + number=109, + message=UpdateInfo, + ) + etag: str = proto.Field( + proto.STRING, + number=110, + ) + best_effort_provisioning: "BestEffortProvisioning" = proto.Field( + proto.MESSAGE, + number=113, + message="BestEffortProvisioning", + ) + + +class NodeManagement(proto.Message): + r"""NodeManagement defines the set of node management services + turned on for the node pool. + + Attributes: + auto_upgrade (bool): + A flag that specifies whether node + auto-upgrade is enabled for the node pool. If + enabled, node auto-upgrade helps keep the nodes + in your node pool up to date with the latest + release version of Kubernetes. + auto_repair (bool): + A flag that specifies whether the node + auto-repair is enabled for the node pool. If + enabled, the nodes in this node pool will be + monitored and, if they fail health checks too + many times, an automatic repair action will be + triggered. + upgrade_options (google.cloud.container_v1.types.AutoUpgradeOptions): + Specifies the Auto Upgrade knobs for the node + pool. + """ + + auto_upgrade: bool = proto.Field( + proto.BOOL, + number=1, + ) + auto_repair: bool = proto.Field( + proto.BOOL, + number=2, + ) + upgrade_options: "AutoUpgradeOptions" = proto.Field( + proto.MESSAGE, + number=10, + message="AutoUpgradeOptions", + ) + + +class BestEffortProvisioning(proto.Message): + r"""Best effort provisioning. + + Attributes: + enabled (bool): + When this is enabled, cluster/node pool + creations will ignore non-fatal errors like + stockout to best provision as many nodes as + possible right now and eventually bring up all + target number of nodes + min_provision_nodes (int): + Minimum number of nodes to be provisioned to + be considered as succeeded, and the rest of + nodes will be provisioned gradually and + eventually when stockout issue has been + resolved. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + min_provision_nodes: int = proto.Field( + proto.INT32, + number=2, + ) + + +class AutoUpgradeOptions(proto.Message): + r"""AutoUpgradeOptions defines the set of options for the user to + control how the Auto Upgrades will proceed. + + Attributes: + auto_upgrade_start_time (str): + [Output only] This field is set when upgrades are about to + commence with the approximate start time for the upgrades, + in `RFC3339 `__ text + format. + description (str): + [Output only] This field is set when upgrades are about to + commence with the description of the upgrade. + """ + + auto_upgrade_start_time: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MaintenancePolicy(proto.Message): + r"""MaintenancePolicy defines the maintenance policy to be used + for the cluster. + + Attributes: + window (google.cloud.container_v1.types.MaintenanceWindow): + Specifies the maintenance window in which + maintenance may be performed. + resource_version (str): + A hash identifying the version of this policy, so that + updates to fields of the policy won't accidentally undo + intermediate changes (and so that users of the API unaware + of some fields won't accidentally remove other fields). Make + a ``get()`` request to the cluster to get the current + resource version and include it with requests to set the + policy. + """ + + window: "MaintenanceWindow" = proto.Field( + proto.MESSAGE, + number=1, + message="MaintenanceWindow", + ) + resource_version: str = proto.Field( + proto.STRING, + number=3, + ) + + +class MaintenanceWindow(proto.Message): + r"""MaintenanceWindow defines the maintenance window to be used + for the cluster. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + daily_maintenance_window (google.cloud.container_v1.types.DailyMaintenanceWindow): + DailyMaintenanceWindow specifies a daily + maintenance operation window. + + This field is a member of `oneof`_ ``policy``. + recurring_window (google.cloud.container_v1.types.RecurringTimeWindow): + RecurringWindow specifies some number of + recurring time periods for maintenance to occur. + The time windows may be overlapping. If no + maintenance windows are set, maintenance can + occur at any time. + + This field is a member of `oneof`_ ``policy``. + maintenance_exclusions (MutableMapping[str, google.cloud.container_v1.types.TimeWindow]): + Exceptions to maintenance window. + Non-emergency maintenance should not occur in + these windows. + """ + + daily_maintenance_window: "DailyMaintenanceWindow" = proto.Field( + proto.MESSAGE, + number=2, + oneof="policy", + message="DailyMaintenanceWindow", + ) + recurring_window: "RecurringTimeWindow" = proto.Field( + proto.MESSAGE, + number=3, + oneof="policy", + message="RecurringTimeWindow", + ) + maintenance_exclusions: MutableMapping[str, "TimeWindow"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=4, + message="TimeWindow", + ) + + +class TimeWindow(proto.Message): + r"""Represents an arbitrary window of time. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + maintenance_exclusion_options (google.cloud.container_v1.types.MaintenanceExclusionOptions): + MaintenanceExclusionOptions provides + maintenance exclusion related options. + + This field is a member of `oneof`_ ``options``. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time that the window first starts. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time that the window ends. The end time + should take place after the start time. + """ + + maintenance_exclusion_options: "MaintenanceExclusionOptions" = proto.Field( + proto.MESSAGE, + number=3, + oneof="options", + message="MaintenanceExclusionOptions", + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class MaintenanceExclusionOptions(proto.Message): + r"""Represents the Maintenance exclusion option. + + Attributes: + scope (google.cloud.container_v1.types.MaintenanceExclusionOptions.Scope): + Scope specifies the upgrade scope which + upgrades are blocked by the exclusion. + """ + + class Scope(proto.Enum): + r"""Scope of exclusion. + + Values: + NO_UPGRADES (0): + NO_UPGRADES excludes all upgrades, including patch upgrades + and minor upgrades across control planes and nodes. This is + the default exclusion behavior. + NO_MINOR_UPGRADES (1): + NO_MINOR_UPGRADES excludes all minor upgrades for the + cluster, only patches are allowed. + NO_MINOR_OR_NODE_UPGRADES (2): + NO_MINOR_OR_NODE_UPGRADES excludes all minor upgrades for + the cluster, and also exclude all node pool upgrades. Only + control plane patches are allowed. + """ + NO_UPGRADES = 0 + NO_MINOR_UPGRADES = 1 + NO_MINOR_OR_NODE_UPGRADES = 2 + + scope: Scope = proto.Field( + proto.ENUM, + number=1, + enum=Scope, + ) + + +class RecurringTimeWindow(proto.Message): + r"""Represents an arbitrary window of time that recurs. + + Attributes: + window (google.cloud.container_v1.types.TimeWindow): + The window of the first recurrence. + recurrence (str): + An RRULE + (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for + how this window reccurs. They go on for the span of time + between the start and end time. + + For example, to have something repeat every weekday, you'd + use: ``FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR`` + + To repeat some window daily (equivalent to the + DailyMaintenanceWindow): ``FREQ=DAILY`` + + For the first weekend of every month: + ``FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU`` + + This specifies how frequently the window starts. Eg, if you + wanted to have a 9-5 UTC-4 window every weekday, you'd use + something like: + + :: + + start time = 2019-01-01T09:00:00-0400 + end time = 2019-01-01T17:00:00-0400 + recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR + + Windows can span multiple days. Eg, to make the window + encompass every weekend from midnight Saturday till the last + minute of Sunday UTC: + + :: + + start time = 2019-01-05T00:00:00Z + end time = 2019-01-07T23:59:00Z + recurrence = FREQ=WEEKLY;BYDAY=SA + + Note the start and end time's specific dates are largely + arbitrary except to specify duration of the window and when + it first starts. The FREQ values of HOURLY, MINUTELY, and + SECONDLY are not supported. + """ + + window: "TimeWindow" = proto.Field( + proto.MESSAGE, + number=1, + message="TimeWindow", + ) + recurrence: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DailyMaintenanceWindow(proto.Message): + r"""Time window specified for daily maintenance operations. + + Attributes: + start_time (str): + Time within the maintenance window to start the maintenance + operations. Time format should be in + `RFC3339 `__ format + "HH:MM", where HH : [00-23] and MM : [00-59] GMT. + duration (str): + [Output only] Duration of the time window, automatically + chosen to be smallest possible in the given scenario. + Duration will be in + `RFC3339 `__ format + "PTnHnMnS". + """ + + start_time: str = proto.Field( + proto.STRING, + number=2, + ) + duration: str = proto.Field( + proto.STRING, + number=3, + ) + + +class SetNodePoolManagementRequest(proto.Message): + r"""SetNodePoolManagementRequest sets the node management + properties of a node pool. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + update. This field has been deprecated and + replaced by the name field. + node_pool_id (str): + Deprecated. The name of the node pool to + update. This field has been deprecated and + replaced by the name field. + management (google.cloud.container_v1.types.NodeManagement): + Required. NodeManagement configuration for + the node pool. + name (str): + The name (project, location, cluster, node pool id) of the + node pool to set management properties. Specified in the + format ``projects/*/locations/*/clusters/*/nodePools/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=4, + ) + management: "NodeManagement" = proto.Field( + proto.MESSAGE, + number=5, + message="NodeManagement", + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class SetNodePoolSizeRequest(proto.Message): + r"""SetNodePoolSizeRequest sets the size of a node pool. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + update. This field has been deprecated and + replaced by the name field. + node_pool_id (str): + Deprecated. The name of the node pool to + update. This field has been deprecated and + replaced by the name field. + node_count (int): + Required. The desired node count for the + pool. + name (str): + The name (project, location, cluster, node pool id) of the + node pool to set size. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=4, + ) + node_count: int = proto.Field( + proto.INT32, + number=5, + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class CompleteNodePoolUpgradeRequest(proto.Message): + r"""CompleteNodePoolUpgradeRequest sets the name of target node + pool to complete upgrade. + + Attributes: + name (str): + The name (project, location, cluster, node pool id) of the + node pool to complete upgrade. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RollbackNodePoolUpgradeRequest(proto.Message): + r"""RollbackNodePoolUpgradeRequest rollbacks the previously + Aborted or Failed NodePool upgrade. This will be an no-op if the + last upgrade successfully completed. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + rollback. This field has been deprecated and + replaced by the name field. + node_pool_id (str): + Deprecated. The name of the node pool to + rollback. This field has been deprecated and + replaced by the name field. + name (str): + The name (project, location, cluster, node pool id) of the + node poll to rollback upgrade. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + respect_pdb (bool): + Option for rollback to ignore the + PodDisruptionBudget. Default value is false. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + respect_pdb: bool = proto.Field( + proto.BOOL, + number=7, + ) + + +class ListNodePoolsResponse(proto.Message): + r"""ListNodePoolsResponse is the result of ListNodePoolsRequest. + + Attributes: + node_pools (MutableSequence[google.cloud.container_v1.types.NodePool]): + A list of node pools for a cluster. + """ + + node_pools: MutableSequence["NodePool"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="NodePool", + ) + + +class ClusterAutoscaling(proto.Message): + r"""ClusterAutoscaling contains global, per-cluster information + required by Cluster Autoscaler to automatically adjust the size + of the cluster and create/delete + node pools based on the current needs. + + Attributes: + enable_node_autoprovisioning (bool): + Enables automatic node pool creation and + deletion. + resource_limits (MutableSequence[google.cloud.container_v1.types.ResourceLimit]): + Contains global constraints regarding minimum + and maximum amount of resources in the cluster. + autoscaling_profile (google.cloud.container_v1.types.ClusterAutoscaling.AutoscalingProfile): + Defines autoscaling behaviour. + autoprovisioning_node_pool_defaults (google.cloud.container_v1.types.AutoprovisioningNodePoolDefaults): + AutoprovisioningNodePoolDefaults contains + defaults for a node pool created by NAP. + autoprovisioning_locations (MutableSequence[str]): + The list of Google Compute Engine + `zones `__ + in which the NodePool's nodes can be created by NAP. + """ + + class AutoscalingProfile(proto.Enum): + r"""Defines possible options for autoscaling_profile field. + + Values: + PROFILE_UNSPECIFIED (0): + No change to autoscaling configuration. + OPTIMIZE_UTILIZATION (1): + Prioritize optimizing utilization of + resources. + BALANCED (2): + Use default (balanced) autoscaling + configuration. + """ + PROFILE_UNSPECIFIED = 0 + OPTIMIZE_UTILIZATION = 1 + BALANCED = 2 + + enable_node_autoprovisioning: bool = proto.Field( + proto.BOOL, + number=1, + ) + resource_limits: MutableSequence["ResourceLimit"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ResourceLimit", + ) + autoscaling_profile: AutoscalingProfile = proto.Field( + proto.ENUM, + number=3, + enum=AutoscalingProfile, + ) + autoprovisioning_node_pool_defaults: "AutoprovisioningNodePoolDefaults" = ( + proto.Field( + proto.MESSAGE, + number=4, + message="AutoprovisioningNodePoolDefaults", + ) + ) + autoprovisioning_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class AutoprovisioningNodePoolDefaults(proto.Message): + r"""AutoprovisioningNodePoolDefaults contains defaults for a node + pool created by NAP. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + oauth_scopes (MutableSequence[str]): + Scopes that are used by NAP when creating + node pools. + service_account (str): + The Google Cloud Platform Service Account to + be used by the node VMs. + upgrade_settings (google.cloud.container_v1.types.NodePool.UpgradeSettings): + Specifies the upgrade settings for NAP + created node pools + management (google.cloud.container_v1.types.NodeManagement): + Specifies the node management options for NAP + created node-pools. + min_cpu_platform (str): + Deprecated. Minimum CPU platform to be used for NAP created + node pools. The instance may be scheduled on the specified + or newer CPU platform. Applicable values are the friendly + names of CPU platforms, such as minCpuPlatform: Intel + Haswell or minCpuPlatform: Intel Sandy Bridge. For more + information, read `how to specify min CPU + platform `__. + This field is deprecated, min_cpu_platform should be + specified using + ``cloud.google.com/requested-min-cpu-platform`` label + selector on the pod. To unset the min cpu platform field + pass "automatic" as field value. + disk_size_gb (int): + Size of the disk attached to each node, + specified in GB. The smallest allowed disk size + is 10GB. + + If unspecified, the default disk size is 100GB. + disk_type (str): + Type of the disk attached to each node (e.g. + 'pd-standard', 'pd-ssd' or 'pd-balanced') + + If unspecified, the default disk type is + 'pd-standard' + shielded_instance_config (google.cloud.container_v1.types.ShieldedInstanceConfig): + Shielded Instance options. + boot_disk_kms_key (str): + The Customer Managed Encryption Key used to encrypt the boot + disk attached to each node in the node pool. This should be + of the form + projects/[KEY_PROJECT_ID]/locations/[LOCATION]/keyRings/[RING_NAME]/cryptoKeys/[KEY_NAME]. + For more information about protecting resources with Cloud + KMS Keys please see: + https://cloud.google.com/compute/docs/disks/customer-managed-encryption + image_type (str): + The image type to use for NAP created node. + Please see + https://cloud.google.com/kubernetes-engine/docs/concepts/node-images + for available image types. + insecure_kubelet_readonly_port_enabled (bool): + Enable or disable Kubelet read only port. + + This field is a member of `oneof`_ ``_insecure_kubelet_readonly_port_enabled``. + """ + + oauth_scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + service_account: str = proto.Field( + proto.STRING, + number=2, + ) + upgrade_settings: "NodePool.UpgradeSettings" = proto.Field( + proto.MESSAGE, + number=3, + message="NodePool.UpgradeSettings", + ) + management: "NodeManagement" = proto.Field( + proto.MESSAGE, + number=4, + message="NodeManagement", + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=5, + ) + disk_size_gb: int = proto.Field( + proto.INT32, + number=6, + ) + disk_type: str = proto.Field( + proto.STRING, + number=7, + ) + shielded_instance_config: "ShieldedInstanceConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="ShieldedInstanceConfig", + ) + boot_disk_kms_key: str = proto.Field( + proto.STRING, + number=9, + ) + image_type: str = proto.Field( + proto.STRING, + number=10, + ) + insecure_kubelet_readonly_port_enabled: bool = proto.Field( + proto.BOOL, + number=13, + optional=True, + ) + + +class ResourceLimit(proto.Message): + r"""Contains information about amount of some resource in the + cluster. For memory, value should be in GB. + + Attributes: + resource_type (str): + Resource name "cpu", "memory" or gpu-specific + string. + minimum (int): + Minimum amount of the resource in the + cluster. + maximum (int): + Maximum amount of the resource in the + cluster. + """ + + resource_type: str = proto.Field( + proto.STRING, + number=1, + ) + minimum: int = proto.Field( + proto.INT64, + number=2, + ) + maximum: int = proto.Field( + proto.INT64, + number=3, + ) + + +class NodePoolAutoscaling(proto.Message): + r"""NodePoolAutoscaling contains information required by cluster + autoscaler to adjust the size of the node pool to the current + cluster usage. + + Attributes: + enabled (bool): + Is autoscaling enabled for this node pool. + min_node_count (int): + Minimum number of nodes for one location in the NodePool. + Must be >= 1 and <= max_node_count. + max_node_count (int): + Maximum number of nodes for one location in the NodePool. + Must be >= min_node_count. There has to be enough quota to + scale up the cluster. + autoprovisioned (bool): + Can this node pool be deleted automatically. + location_policy (google.cloud.container_v1.types.NodePoolAutoscaling.LocationPolicy): + Location policy used when scaling up a + nodepool. + total_min_node_count (int): + Minimum number of nodes in the node pool. Must be greater + than 1 less than total_max_node_count. The + total_*_node_count fields are mutually exclusive with the + \*_node_count fields. + total_max_node_count (int): + Maximum number of nodes in the node pool. Must be greater + than total_min_node_count. There has to be enough quota to + scale up the cluster. The total_*_node_count fields are + mutually exclusive with the \*_node_count fields. + """ + + class LocationPolicy(proto.Enum): + r"""Location policy specifies how zones are picked when scaling + up the nodepool. + + Values: + LOCATION_POLICY_UNSPECIFIED (0): + Not set. + BALANCED (1): + BALANCED is a best effort policy that aims to + balance the sizes of different zones. + ANY (2): + ANY policy picks zones that have the highest + capacity available. + """ + LOCATION_POLICY_UNSPECIFIED = 0 + BALANCED = 1 + ANY = 2 + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + min_node_count: int = proto.Field( + proto.INT32, + number=2, + ) + max_node_count: int = proto.Field( + proto.INT32, + number=3, + ) + autoprovisioned: bool = proto.Field( + proto.BOOL, + number=4, + ) + location_policy: LocationPolicy = proto.Field( + proto.ENUM, + number=5, + enum=LocationPolicy, + ) + total_min_node_count: int = proto.Field( + proto.INT32, + number=6, + ) + total_max_node_count: int = proto.Field( + proto.INT32, + number=7, + ) + + +class SetLabelsRequest(proto.Message): + r"""SetLabelsRequest sets the Google Cloud Platform labels on a + Google Container Engine cluster, which will in turn set them for + Google Compute Engine resources used by that cluster + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and replaced by + the name field. + resource_labels (MutableMapping[str, str]): + Required. The labels to set for that cluster. + label_fingerprint (str): + Required. The fingerprint of the previous set of labels for + this resource, used to detect conflicts. The fingerprint is + initially generated by Kubernetes Engine and changes after + every request to modify or update labels. You must always + provide an up-to-date fingerprint hash when updating or + changing labels. Make a ``get()`` request to the resource to + get the latest fingerprint. + name (str): + The name (project, location, cluster name) of the cluster to + set labels. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + resource_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=5, + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class SetLegacyAbacRequest(proto.Message): + r"""SetLegacyAbacRequest enables or disables the ABAC + authorization mechanism for a cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster to + update. This field has been deprecated and + replaced by the name field. + enabled (bool): + Required. Whether ABAC authorization will be + enabled in the cluster. + name (str): + The name (project, location, cluster name) of the cluster to + set legacy abac. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + enabled: bool = proto.Field( + proto.BOOL, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class StartIPRotationRequest(proto.Message): + r"""StartIPRotationRequest creates a new IP for the cluster and + then performs a node upgrade on each node pool to point to the + new IP. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and replaced by + the name field. + name (str): + The name (project, location, cluster name) of the cluster to + start IP rotation. Specified in the format + ``projects/*/locations/*/clusters/*``. + rotate_credentials (bool): + Whether to rotate credentials during IP + rotation. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + rotate_credentials: bool = proto.Field( + proto.BOOL, + number=7, + ) + + +class CompleteIPRotationRequest(proto.Message): + r"""CompleteIPRotationRequest moves the cluster master back into + single-IP mode. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and replaced by + the name field. + name (str): + The name (project, location, cluster name) of the cluster to + complete IP rotation. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class AcceleratorConfig(proto.Message): + r"""AcceleratorConfig represents a Hardware Accelerator request. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + accelerator_count (int): + The number of the accelerator cards exposed + to an instance. + accelerator_type (str): + The accelerator type resource name. List of supported + accelerators + `here `__ + gpu_partition_size (str): + Size of partitions to create on the GPU. Valid values are + described in the NVIDIA `mig user + guide `__. + gpu_sharing_config (google.cloud.container_v1.types.GPUSharingConfig): + The configuration for GPU sharing options. + + This field is a member of `oneof`_ ``_gpu_sharing_config``. + gpu_driver_installation_config (google.cloud.container_v1.types.GPUDriverInstallationConfig): + The configuration for auto installation of + GPU driver. + + This field is a member of `oneof`_ ``_gpu_driver_installation_config``. + """ + + accelerator_count: int = proto.Field( + proto.INT64, + number=1, + ) + accelerator_type: str = proto.Field( + proto.STRING, + number=2, + ) + gpu_partition_size: str = proto.Field( + proto.STRING, + number=3, + ) + gpu_sharing_config: "GPUSharingConfig" = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message="GPUSharingConfig", + ) + gpu_driver_installation_config: "GPUDriverInstallationConfig" = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message="GPUDriverInstallationConfig", + ) + + +class GPUSharingConfig(proto.Message): + r"""GPUSharingConfig represents the GPU sharing configuration for + Hardware Accelerators. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + max_shared_clients_per_gpu (int): + The max number of containers that can share a + physical GPU. + gpu_sharing_strategy (google.cloud.container_v1.types.GPUSharingConfig.GPUSharingStrategy): + The type of GPU sharing strategy to enable on + the GPU node. + + This field is a member of `oneof`_ ``_gpu_sharing_strategy``. + """ + + class GPUSharingStrategy(proto.Enum): + r"""The type of GPU sharing strategy currently provided. + + Values: + GPU_SHARING_STRATEGY_UNSPECIFIED (0): + Default value. + TIME_SHARING (1): + GPUs are time-shared between containers. + """ + GPU_SHARING_STRATEGY_UNSPECIFIED = 0 + TIME_SHARING = 1 + + max_shared_clients_per_gpu: int = proto.Field( + proto.INT64, + number=1, + ) + gpu_sharing_strategy: GPUSharingStrategy = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum=GPUSharingStrategy, + ) + + +class GPUDriverInstallationConfig(proto.Message): + r"""GPUDriverInstallationConfig specifies the version of GPU + driver to be auto installed. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gpu_driver_version (google.cloud.container_v1.types.GPUDriverInstallationConfig.GPUDriverVersion): + Mode for how the GPU driver is installed. + + This field is a member of `oneof`_ ``_gpu_driver_version``. + """ + + class GPUDriverVersion(proto.Enum): + r"""The GPU driver version to install. + + Values: + GPU_DRIVER_VERSION_UNSPECIFIED (0): + Default value is to not install any GPU + driver. + INSTALLATION_DISABLED (1): + Disable GPU driver auto installation and + needs manual installation + DEFAULT (2): + "Default" GPU driver in COS and Ubuntu. + LATEST (3): + "Latest" GPU driver in COS. + """ + GPU_DRIVER_VERSION_UNSPECIFIED = 0 + INSTALLATION_DISABLED = 1 + DEFAULT = 2 + LATEST = 3 + + gpu_driver_version: GPUDriverVersion = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=GPUDriverVersion, + ) + + +class WorkloadMetadataConfig(proto.Message): + r"""WorkloadMetadataConfig defines the metadata configuration to + expose to workloads on the node pool. + + Attributes: + mode (google.cloud.container_v1.types.WorkloadMetadataConfig.Mode): + Mode is the configuration for how to expose + metadata to workloads running on the node pool. + """ + + class Mode(proto.Enum): + r"""Mode is the configuration for how to expose metadata to + workloads running on the node. + + Values: + MODE_UNSPECIFIED (0): + Not set. + GCE_METADATA (1): + Expose all Compute Engine metadata to pods. + GKE_METADATA (2): + Run the GKE Metadata Server on this node. The + GKE Metadata Server exposes a metadata API to + workloads that is compatible with the V1 Compute + Metadata APIs exposed by the Compute Engine and + App Engine Metadata Servers. This feature can + only be enabled if Workload Identity is enabled + at the cluster level. + """ + MODE_UNSPECIFIED = 0 + GCE_METADATA = 1 + GKE_METADATA = 2 + + mode: Mode = proto.Field( + proto.ENUM, + number=2, + enum=Mode, + ) + + +class SetNetworkPolicyRequest(proto.Message): + r"""SetNetworkPolicyRequest enables/disables network policy for a + cluster. + + Attributes: + project_id (str): + Deprecated. The Google Developers Console `project ID or + project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Deprecated. The name of the cluster. + This field has been deprecated and replaced by + the name field. + network_policy (google.cloud.container_v1.types.NetworkPolicy): + Required. Configuration options for the + NetworkPolicy feature. + name (str): + The name (project, location, cluster name) of the cluster to + set networking policy. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + network_policy: "NetworkPolicy" = proto.Field( + proto.MESSAGE, + number=4, + message="NetworkPolicy", + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class SetMaintenancePolicyRequest(proto.Message): + r"""SetMaintenancePolicyRequest sets the maintenance policy for a + cluster. + + Attributes: + project_id (str): + Required. The Google Developers Console `project ID or + project + number `__. + zone (str): + Required. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. + cluster_id (str): + Required. The name of the cluster to update. + maintenance_policy (google.cloud.container_v1.types.MaintenancePolicy): + Required. The maintenance policy to be set + for the cluster. An empty field clears the + existing maintenance policy. + name (str): + The name (project, location, cluster name) of the cluster to + set maintenance policy. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + maintenance_policy: "MaintenancePolicy" = proto.Field( + proto.MESSAGE, + number=4, + message="MaintenancePolicy", + ) + name: str = proto.Field( + proto.STRING, + number=5, + ) + + +class StatusCondition(proto.Message): + r"""StatusCondition describes why a cluster or a node pool has a + certain status (e.g., ERROR or DEGRADED). + + Attributes: + code (google.cloud.container_v1.types.StatusCondition.Code): + Machine-friendly representation of the condition Deprecated. + Use canonical_code instead. + message (str): + Human-friendly representation of the + condition + canonical_code (google.rpc.code_pb2.Code): + Canonical code of the condition. + """ + + class Code(proto.Enum): + r"""Code for each condition + + Values: + UNKNOWN (0): + UNKNOWN indicates a generic condition. + GCE_STOCKOUT (1): + GCE_STOCKOUT indicates that Google Compute Engine resources + are temporarily unavailable. + GKE_SERVICE_ACCOUNT_DELETED (2): + GKE_SERVICE_ACCOUNT_DELETED indicates that the user deleted + their robot service account. + GCE_QUOTA_EXCEEDED (3): + Google Compute Engine quota was exceeded. + SET_BY_OPERATOR (4): + Cluster state was manually changed by an SRE + due to a system logic error. + CLOUD_KMS_KEY_ERROR (7): + Unable to perform an encrypt operation + against the CloudKMS key used for etcd level + encryption. + CA_EXPIRING (9): + Cluster CA is expiring soon. + """ + UNKNOWN = 0 + GCE_STOCKOUT = 1 + GKE_SERVICE_ACCOUNT_DELETED = 2 + GCE_QUOTA_EXCEEDED = 3 + SET_BY_OPERATOR = 4 + CLOUD_KMS_KEY_ERROR = 7 + CA_EXPIRING = 9 + + code: Code = proto.Field( + proto.ENUM, + number=1, + enum=Code, + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + canonical_code: code_pb2.Code = proto.Field( + proto.ENUM, + number=3, + enum=code_pb2.Code, + ) + + +class NetworkConfig(proto.Message): + r"""NetworkConfig reports the relative names of network & + subnetwork. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Output only. The relative name of the Google Compute Engine + [network]`google.container.v1.NetworkConfig.network `__ + to which the cluster is connected. Example: + projects/my-project/global/networks/my-network + subnetwork (str): + Output only. The relative name of the Google Compute Engine + `subnetwork `__ + to which the cluster is connected. Example: + projects/my-project/regions/us-central1/subnetworks/my-subnet + enable_intra_node_visibility (bool): + Whether Intra-node visibility is enabled for + this cluster. This makes same node pod to pod + traffic visible for VPC network. + default_snat_status (google.cloud.container_v1.types.DefaultSnatStatus): + Whether the cluster disables default in-node sNAT rules. + In-node sNAT rules will be disabled when default_snat_status + is disabled. When disabled is set to false, default IP + masquerade rules will be applied to the nodes to prevent + sNAT on cluster internal traffic. + enable_l4ilb_subsetting (bool): + Whether L4ILB Subsetting is enabled for this + cluster. + datapath_provider (google.cloud.container_v1.types.DatapathProvider): + The desired datapath provider for this + cluster. By default, uses the IPTables-based + kube-proxy implementation. + private_ipv6_google_access (google.cloud.container_v1.types.PrivateIPv6GoogleAccess): + The desired state of IPv6 connectivity to + Google Services. By default, no private IPv6 + access to or from Google Services (all access + will be via IPv4) + dns_config (google.cloud.container_v1.types.DNSConfig): + DNSConfig contains clusterDNS config for this + cluster. + service_external_ips_config (google.cloud.container_v1.types.ServiceExternalIPsConfig): + ServiceExternalIPsConfig specifies if + services with externalIPs field are blocked or + not. + gateway_api_config (google.cloud.container_v1.types.GatewayAPIConfig): + GatewayAPIConfig contains the desired config + of Gateway API on this cluster. + enable_multi_networking (bool): + Whether multi-networking is enabled for this + cluster. + network_performance_config (google.cloud.container_v1.types.NetworkConfig.ClusterNetworkPerformanceConfig): + Network bandwidth tier configuration. + enable_fqdn_network_policy (bool): + Whether FQDN Network Policy is enabled on + this cluster. + + This field is a member of `oneof`_ ``_enable_fqdn_network_policy``. + """ + + class ClusterNetworkPerformanceConfig(proto.Message): + r"""Configuration of network bandwidth tiers + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + total_egress_bandwidth_tier (google.cloud.container_v1.types.NetworkConfig.ClusterNetworkPerformanceConfig.Tier): + Specifies the total network bandwidth tier + for NodePools in the cluster. + + This field is a member of `oneof`_ ``_total_egress_bandwidth_tier``. + """ + + class Tier(proto.Enum): + r"""Node network tier + + Values: + TIER_UNSPECIFIED (0): + Default value + TIER_1 (1): + Higher bandwidth, actual values based on VM + size. + """ + TIER_UNSPECIFIED = 0 + TIER_1 = 1 + + total_egress_bandwidth_tier: "NetworkConfig.ClusterNetworkPerformanceConfig.Tier" = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum="NetworkConfig.ClusterNetworkPerformanceConfig.Tier", + ) + + network: str = proto.Field( + proto.STRING, + number=1, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=2, + ) + enable_intra_node_visibility: bool = proto.Field( + proto.BOOL, + number=5, + ) + default_snat_status: "DefaultSnatStatus" = proto.Field( + proto.MESSAGE, + number=7, + message="DefaultSnatStatus", + ) + enable_l4ilb_subsetting: bool = proto.Field( + proto.BOOL, + number=10, + ) + datapath_provider: "DatapathProvider" = proto.Field( + proto.ENUM, + number=11, + enum="DatapathProvider", + ) + private_ipv6_google_access: "PrivateIPv6GoogleAccess" = proto.Field( + proto.ENUM, + number=12, + enum="PrivateIPv6GoogleAccess", + ) + dns_config: "DNSConfig" = proto.Field( + proto.MESSAGE, + number=13, + message="DNSConfig", + ) + service_external_ips_config: "ServiceExternalIPsConfig" = proto.Field( + proto.MESSAGE, + number=15, + message="ServiceExternalIPsConfig", + ) + gateway_api_config: "GatewayAPIConfig" = proto.Field( + proto.MESSAGE, + number=16, + message="GatewayAPIConfig", + ) + enable_multi_networking: bool = proto.Field( + proto.BOOL, + number=17, + ) + network_performance_config: ClusterNetworkPerformanceConfig = proto.Field( + proto.MESSAGE, + number=18, + message=ClusterNetworkPerformanceConfig, + ) + enable_fqdn_network_policy: bool = proto.Field( + proto.BOOL, + number=19, + optional=True, + ) + + +class GatewayAPIConfig(proto.Message): + r"""GatewayAPIConfig contains the desired config of Gateway API + on this cluster. + + Attributes: + channel (google.cloud.container_v1.types.GatewayAPIConfig.Channel): + The Gateway API release channel to use for + Gateway API. + """ + + class Channel(proto.Enum): + r"""Channel describes if/how Gateway API should be installed and + implemented in a cluster. + + Values: + CHANNEL_UNSPECIFIED (0): + Default value. + CHANNEL_DISABLED (1): + Gateway API support is disabled + CHANNEL_EXPERIMENTAL (3): + Gateway API support is enabled, experimental + CRDs are installed + CHANNEL_STANDARD (4): + Gateway API support is enabled, standard CRDs + are installed + """ + CHANNEL_UNSPECIFIED = 0 + CHANNEL_DISABLED = 1 + CHANNEL_EXPERIMENTAL = 3 + CHANNEL_STANDARD = 4 + + channel: Channel = proto.Field( + proto.ENUM, + number=1, + enum=Channel, + ) + + +class ServiceExternalIPsConfig(proto.Message): + r"""Config to block services with externalIPs field. + + Attributes: + enabled (bool): + Whether Services with ExternalIPs field are + allowed or not. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class GetOpenIDConfigRequest(proto.Message): + r"""GetOpenIDConfigRequest gets the OIDC discovery document for + the cluster. See the OpenID Connect Discovery 1.0 specification + for details. + + Attributes: + parent (str): + The cluster (project, location, cluster name) to get the + discovery document for. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetOpenIDConfigResponse(proto.Message): + r"""GetOpenIDConfigResponse is an OIDC discovery document for the + cluster. See the OpenID Connect Discovery 1.0 specification for + details. + + Attributes: + issuer (str): + OIDC Issuer. + jwks_uri (str): + JSON Web Key uri. + response_types_supported (MutableSequence[str]): + Supported response types. + subject_types_supported (MutableSequence[str]): + Supported subject types. + id_token_signing_alg_values_supported (MutableSequence[str]): + supported ID Token signing Algorithms. + claims_supported (MutableSequence[str]): + Supported claims. + grant_types (MutableSequence[str]): + Supported grant types. + """ + + issuer: str = proto.Field( + proto.STRING, + number=1, + ) + jwks_uri: str = proto.Field( + proto.STRING, + number=2, + ) + response_types_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + subject_types_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + id_token_signing_alg_values_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + claims_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + grant_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + + +class GetJSONWebKeysRequest(proto.Message): + r"""GetJSONWebKeysRequest gets the public component of the keys used by + the cluster to sign token requests. This will be the jwks_uri for + the discover document returned by getOpenIDConfig. See the OpenID + Connect Discovery 1.0 specification for details. + + Attributes: + parent (str): + The cluster (project, location, cluster name) to get keys + for. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Jwk(proto.Message): + r"""Jwk is a JSON Web Key as specified in RFC 7517 + + Attributes: + kty (str): + Key Type. + alg (str): + Algorithm. + use (str): + Permitted uses for the public keys. + kid (str): + Key ID. + n (str): + Used for RSA keys. + e (str): + Used for RSA keys. + x (str): + Used for ECDSA keys. + y (str): + Used for ECDSA keys. + crv (str): + Used for ECDSA keys. + """ + + kty: str = proto.Field( + proto.STRING, + number=1, + ) + alg: str = proto.Field( + proto.STRING, + number=2, + ) + use: str = proto.Field( + proto.STRING, + number=3, + ) + kid: str = proto.Field( + proto.STRING, + number=4, + ) + n: str = proto.Field( + proto.STRING, + number=5, + ) + e: str = proto.Field( + proto.STRING, + number=6, + ) + x: str = proto.Field( + proto.STRING, + number=7, + ) + y: str = proto.Field( + proto.STRING, + number=8, + ) + crv: str = proto.Field( + proto.STRING, + number=9, + ) + + +class GetJSONWebKeysResponse(proto.Message): + r"""GetJSONWebKeysResponse is a valid JSON Web Key Set as + specififed in rfc 7517 + + Attributes: + keys (MutableSequence[google.cloud.container_v1.types.Jwk]): + The public component of the keys used by the + cluster to sign token requests. + """ + + keys: MutableSequence["Jwk"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Jwk", + ) + + +class CheckAutopilotCompatibilityRequest(proto.Message): + r"""CheckAutopilotCompatibilityRequest requests getting the + blockers for the given operation in the cluster. + + Attributes: + name (str): + The name (project, location, cluster) of the cluster to + retrieve. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AutopilotCompatibilityIssue(proto.Message): + r"""AutopilotCompatibilityIssue contains information about a + specific compatibility issue with Autopilot mode. + + Attributes: + last_observation (google.protobuf.timestamp_pb2.Timestamp): + The last time when this issue was observed. + constraint_type (str): + The constraint type of the issue. + incompatibility_type (google.cloud.container_v1.types.AutopilotCompatibilityIssue.IssueType): + The incompatibility type of this issue. + subjects (MutableSequence[str]): + The name of the resources which are subject + to this issue. + documentation_url (str): + A URL to a public documnetation, which + addresses resolving this issue. + description (str): + The description of the issue. + """ + + class IssueType(proto.Enum): + r"""The type of the reported issue. + + Values: + UNSPECIFIED (0): + Default value, should not be used. + INCOMPATIBILITY (1): + Indicates that the issue is a known + incompatibility between the cluster and + Autopilot mode. + ADDITIONAL_CONFIG_REQUIRED (2): + Indicates the issue is an incompatibility if + customers take no further action to resolve. + PASSED_WITH_OPTIONAL_CONFIG (3): + Indicates the issue is not an + incompatibility, but depending on the workloads + business logic, there is a potential that they + won't work on Autopilot. + """ + UNSPECIFIED = 0 + INCOMPATIBILITY = 1 + ADDITIONAL_CONFIG_REQUIRED = 2 + PASSED_WITH_OPTIONAL_CONFIG = 3 + + last_observation: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + constraint_type: str = proto.Field( + proto.STRING, + number=2, + ) + incompatibility_type: IssueType = proto.Field( + proto.ENUM, + number=3, + enum=IssueType, + ) + subjects: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + documentation_url: str = proto.Field( + proto.STRING, + number=5, + ) + description: str = proto.Field( + proto.STRING, + number=6, + ) + + +class CheckAutopilotCompatibilityResponse(proto.Message): + r"""CheckAutopilotCompatibilityResponse has a list of + compatibility issues. + + Attributes: + issues (MutableSequence[google.cloud.container_v1.types.AutopilotCompatibilityIssue]): + The list of issues for the given operation. + summary (str): + The summary of the autopilot compatibility + response. + """ + + issues: MutableSequence["AutopilotCompatibilityIssue"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AutopilotCompatibilityIssue", + ) + summary: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ReleaseChannel(proto.Message): + r"""ReleaseChannel indicates which release channel a cluster is + subscribed to. Release channels are arranged in order of risk. + + When a cluster is subscribed to a release channel, Google + maintains both the master version and the node version. Node + auto-upgrade defaults to true and cannot be disabled. + + Attributes: + channel (google.cloud.container_v1.types.ReleaseChannel.Channel): + channel specifies which release channel the + cluster is subscribed to. + """ + + class Channel(proto.Enum): + r"""Possible values for 'channel'. + + Values: + UNSPECIFIED (0): + No channel specified. + RAPID (1): + RAPID channel is offered on an early access + basis for customers who want to test new + releases. + + WARNING: Versions available in the RAPID Channel + may be subject to unresolved issues with no + known workaround and are not subject to any + SLAs. + REGULAR (2): + Clusters subscribed to REGULAR receive + versions that are considered GA quality. REGULAR + is intended for production users who want to + take advantage of new features. + STABLE (3): + Clusters subscribed to STABLE receive + versions that are known to be stable and + reliable in production. + """ + UNSPECIFIED = 0 + RAPID = 1 + REGULAR = 2 + STABLE = 3 + + channel: Channel = proto.Field( + proto.ENUM, + number=1, + enum=Channel, + ) + + +class CostManagementConfig(proto.Message): + r"""Configuration for fine-grained cost management feature. + + Attributes: + enabled (bool): + Whether the feature is enabled or not. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class IntraNodeVisibilityConfig(proto.Message): + r"""IntraNodeVisibilityConfig contains the desired config of the + intra-node visibility on this cluster. + + Attributes: + enabled (bool): + Enables intra node visibility for this + cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ILBSubsettingConfig(proto.Message): + r"""ILBSubsettingConfig contains the desired config of L4 + Internal LoadBalancer subsetting on this cluster. + + Attributes: + enabled (bool): + Enables l4 ILB subsetting for this cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class DNSConfig(proto.Message): + r"""DNSConfig contains the desired set of options for configuring + clusterDNS. + + Attributes: + cluster_dns (google.cloud.container_v1.types.DNSConfig.Provider): + cluster_dns indicates which in-cluster DNS provider should + be used. + cluster_dns_scope (google.cloud.container_v1.types.DNSConfig.DNSScope): + cluster_dns_scope indicates the scope of access to cluster + DNS records. + cluster_dns_domain (str): + cluster_dns_domain is the suffix used for all cluster + service records. + """ + + class Provider(proto.Enum): + r"""Provider lists the various in-cluster DNS providers. + + Values: + PROVIDER_UNSPECIFIED (0): + Default value + PLATFORM_DEFAULT (1): + Use GKE default DNS provider(kube-dns) for + DNS resolution. + CLOUD_DNS (2): + Use CloudDNS for DNS resolution. + KUBE_DNS (3): + Use KubeDNS for DNS resolution. + """ + PROVIDER_UNSPECIFIED = 0 + PLATFORM_DEFAULT = 1 + CLOUD_DNS = 2 + KUBE_DNS = 3 + + class DNSScope(proto.Enum): + r"""DNSScope lists the various scopes of access to cluster DNS + records. + + Values: + DNS_SCOPE_UNSPECIFIED (0): + Default value, will be inferred as cluster + scope. + CLUSTER_SCOPE (1): + DNS records are accessible from within the + cluster. + VPC_SCOPE (2): + DNS records are accessible from within the + VPC. + """ + DNS_SCOPE_UNSPECIFIED = 0 + CLUSTER_SCOPE = 1 + VPC_SCOPE = 2 + + cluster_dns: Provider = proto.Field( + proto.ENUM, + number=1, + enum=Provider, + ) + cluster_dns_scope: DNSScope = proto.Field( + proto.ENUM, + number=2, + enum=DNSScope, + ) + cluster_dns_domain: str = proto.Field( + proto.STRING, + number=3, + ) + + +class MaxPodsConstraint(proto.Message): + r"""Constraints applied to pods. + + Attributes: + max_pods_per_node (int): + Constraint enforced on the max num of pods + per node. + """ + + max_pods_per_node: int = proto.Field( + proto.INT64, + number=1, + ) + + +class WorkloadIdentityConfig(proto.Message): + r"""Configuration for the use of Kubernetes Service Accounts in + GCP IAM policies. + + Attributes: + workload_pool (str): + The workload pool to attach all Kubernetes + service accounts to. + """ + + workload_pool: str = proto.Field( + proto.STRING, + number=2, + ) + + +class IdentityServiceConfig(proto.Message): + r"""IdentityServiceConfig is configuration for Identity Service + which allows customers to use external identity providers with + the K8S API + + Attributes: + enabled (bool): + Whether to enable the Identity Service + component + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class MeshCertificates(proto.Message): + r"""Configuration for issuance of mTLS keys and certificates to + Kubernetes pods. + + Attributes: + enable_certificates (google.protobuf.wrappers_pb2.BoolValue): + enable_certificates controls issuance of workload mTLS + certificates. + + If set, the GKE Workload Identity Certificates controller + and node agent will be deployed in the cluster, which can + then be configured by creating a WorkloadCertificateConfig + Custom Resource. + + Requires Workload Identity + ([workload_pool][google.container.v1.WorkloadIdentityConfig.workload_pool] + must be non-empty). + """ + + enable_certificates: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.BoolValue, + ) + + +class DatabaseEncryption(proto.Message): + r"""Configuration of etcd encryption. + + Attributes: + key_name (str): + Name of CloudKMS key to use for the + encryption of secrets in etcd. Ex. + projects/my-project/locations/global/keyRings/my-ring/cryptoKeys/my-key + state (google.cloud.container_v1.types.DatabaseEncryption.State): + The desired state of etcd encryption. + """ + + class State(proto.Enum): + r"""State of etcd encryption. + + Values: + UNKNOWN (0): + Should never be set + ENCRYPTED (1): + Secrets in etcd are encrypted. + DECRYPTED (2): + Secrets in etcd are stored in plain text (at + etcd level) - this is unrelated to Compute + Engine level full disk encryption. + """ + UNKNOWN = 0 + ENCRYPTED = 1 + DECRYPTED = 2 + + key_name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) + + +class ListUsableSubnetworksRequest(proto.Message): + r"""ListUsableSubnetworksRequest requests the list of usable + subnetworks available to a user for creating clusters. + + Attributes: + parent (str): + The parent project where subnetworks are usable. Specified + in the format ``projects/*``. + filter (str): + Filtering currently only supports equality on the + networkProjectId and must be in the form: + "networkProjectId=[PROJECTID]", where ``networkProjectId`` + is the project which owns the listed subnetworks. This + defaults to the parent project ID. + page_size (int): + The max number of results per page that should be returned. + If the number of available results is larger than + ``page_size``, a ``next_page_token`` is returned which can + be used to get the next page of results in subsequent + requests. Acceptable values are 0 to 500, inclusive. + (Default: 500) + page_token (str): + Specifies a page token to use. Set this to + the nextPageToken returned by previous list + requests to get the next page of results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListUsableSubnetworksResponse(proto.Message): + r"""ListUsableSubnetworksResponse is the response of + ListUsableSubnetworksRequest. + + Attributes: + subnetworks (MutableSequence[google.cloud.container_v1.types.UsableSubnetwork]): + A list of usable subnetworks in the specified + network project. + next_page_token (str): + This token allows you to get the next page of results for + list requests. If the number of results is larger than + ``page_size``, use the ``next_page_token`` as a value for + the query parameter ``page_token`` in the next request. The + value will become empty when there are no more pages. + """ + + @property + def raw_page(self): + return self + + subnetworks: MutableSequence["UsableSubnetwork"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="UsableSubnetwork", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UsableSubnetworkSecondaryRange(proto.Message): + r"""Secondary IP range of a usable subnetwork. + + Attributes: + range_name (str): + The name associated with this subnetwork + secondary range, used when adding an alias IP + range to a VM instance. + ip_cidr_range (str): + The range of IP addresses belonging to this + subnetwork secondary range. + status (google.cloud.container_v1.types.UsableSubnetworkSecondaryRange.Status): + This field is to determine the status of the + secondary range programmably. + """ + + class Status(proto.Enum): + r"""Status shows the current usage of a secondary IP range. + + Values: + UNKNOWN (0): + UNKNOWN is the zero value of the Status enum. + It's not a valid status. + UNUSED (1): + UNUSED denotes that this range is unclaimed + by any cluster. + IN_USE_SERVICE (2): + IN_USE_SERVICE denotes that this range is claimed by + cluster(s) for services. User-managed services range can be + shared between clusters within the same subnetwork. + IN_USE_SHAREABLE_POD (3): + IN_USE_SHAREABLE_POD denotes this range was created by the + network admin and is currently claimed by a cluster for + pods. It can only be used by other clusters as a pod range. + IN_USE_MANAGED_POD (4): + IN_USE_MANAGED_POD denotes this range was created by GKE and + is claimed for pods. It cannot be used for other clusters. + """ + UNKNOWN = 0 + UNUSED = 1 + IN_USE_SERVICE = 2 + IN_USE_SHAREABLE_POD = 3 + IN_USE_MANAGED_POD = 4 + + range_name: str = proto.Field( + proto.STRING, + number=1, + ) + ip_cidr_range: str = proto.Field( + proto.STRING, + number=2, + ) + status: Status = proto.Field( + proto.ENUM, + number=3, + enum=Status, + ) + + +class UsableSubnetwork(proto.Message): + r"""UsableSubnetwork resource returns the subnetwork name, its + associated network and the primary CIDR range. + + Attributes: + subnetwork (str): + Subnetwork Name. + Example: + projects/my-project/regions/us-central1/subnetworks/my-subnet + network (str): + Network Name. + Example: + projects/my-project/global/networks/my-network + ip_cidr_range (str): + The range of internal addresses that are + owned by this subnetwork. + secondary_ip_ranges (MutableSequence[google.cloud.container_v1.types.UsableSubnetworkSecondaryRange]): + Secondary IP ranges. + status_message (str): + A human readable status message representing the reasons for + cases where the caller cannot use the secondary ranges under + the subnet. For example if the secondary_ip_ranges is empty + due to a permission issue, an insufficient permission + message will be given by status_message. + """ + + subnetwork: str = proto.Field( + proto.STRING, + number=1, + ) + network: str = proto.Field( + proto.STRING, + number=2, + ) + ip_cidr_range: str = proto.Field( + proto.STRING, + number=3, + ) + secondary_ip_ranges: MutableSequence[ + "UsableSubnetworkSecondaryRange" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="UsableSubnetworkSecondaryRange", + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ResourceUsageExportConfig(proto.Message): + r"""Configuration for exporting cluster resource usages. + + Attributes: + bigquery_destination (google.cloud.container_v1.types.ResourceUsageExportConfig.BigQueryDestination): + Configuration to use BigQuery as usage export + destination. + enable_network_egress_metering (bool): + Whether to enable network egress metering for + this cluster. If enabled, a daemonset will be + created in the cluster to meter network egress + traffic. + consumption_metering_config (google.cloud.container_v1.types.ResourceUsageExportConfig.ConsumptionMeteringConfig): + Configuration to enable resource consumption + metering. + """ + + class BigQueryDestination(proto.Message): + r"""Parameters for using BigQuery as the destination of resource + usage export. + + Attributes: + dataset_id (str): + The ID of a BigQuery Dataset. + """ + + dataset_id: str = proto.Field( + proto.STRING, + number=1, + ) + + class ConsumptionMeteringConfig(proto.Message): + r"""Parameters for controlling consumption metering. + + Attributes: + enabled (bool): + Whether to enable consumption metering for + this cluster. If enabled, a second BigQuery + table will be created to hold resource + consumption records. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + bigquery_destination: BigQueryDestination = proto.Field( + proto.MESSAGE, + number=1, + message=BigQueryDestination, + ) + enable_network_egress_metering: bool = proto.Field( + proto.BOOL, + number=2, + ) + consumption_metering_config: ConsumptionMeteringConfig = proto.Field( + proto.MESSAGE, + number=3, + message=ConsumptionMeteringConfig, + ) + + +class VerticalPodAutoscaling(proto.Message): + r"""VerticalPodAutoscaling contains global, per-cluster + information required by Vertical Pod Autoscaler to automatically + adjust the resources of pods controlled by it. + + Attributes: + enabled (bool): + Enables vertical pod autoscaling. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class DefaultSnatStatus(proto.Message): + r"""DefaultSnatStatus contains the desired state of whether + default sNAT should be disabled on the cluster. + + Attributes: + disabled (bool): + Disables cluster default sNAT rules. + """ + + disabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ShieldedNodes(proto.Message): + r"""Configuration of Shielded Nodes feature. + + Attributes: + enabled (bool): + Whether Shielded Nodes features are enabled + on all nodes in this cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class VirtualNIC(proto.Message): + r"""Configuration of gVNIC feature. + + Attributes: + enabled (bool): + Whether gVNIC features are enabled in the + node pool. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class FastSocket(proto.Message): + r"""Configuration of Fast Socket feature. + + Attributes: + enabled (bool): + Whether Fast Socket features are enabled in + the node pool. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class NotificationConfig(proto.Message): + r"""NotificationConfig is the configuration of notifications. + + Attributes: + pubsub (google.cloud.container_v1.types.NotificationConfig.PubSub): + Notification config for Pub/Sub. + """ + + class EventType(proto.Enum): + r"""Types of notifications currently supported. Can be used to + filter what notifications are sent. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + Not set, will be ignored. + UPGRADE_AVAILABLE_EVENT (1): + Corresponds with UpgradeAvailableEvent. + UPGRADE_EVENT (2): + Corresponds with UpgradeEvent. + SECURITY_BULLETIN_EVENT (3): + Corresponds with SecurityBulletinEvent. + """ + EVENT_TYPE_UNSPECIFIED = 0 + UPGRADE_AVAILABLE_EVENT = 1 + UPGRADE_EVENT = 2 + SECURITY_BULLETIN_EVENT = 3 + + class PubSub(proto.Message): + r"""Pub/Sub specific notification config. + + Attributes: + enabled (bool): + Enable notifications for Pub/Sub. + topic (str): + The desired Pub/Sub topic to which notifications will be + sent by GKE. Format is + ``projects/{project}/topics/{topic}``. + filter (google.cloud.container_v1.types.NotificationConfig.Filter): + Allows filtering to one or more specific + event types. If no filter is specified, or if a + filter is specified with no event types, all + event types will be sent + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + topic: str = proto.Field( + proto.STRING, + number=2, + ) + filter: "NotificationConfig.Filter" = proto.Field( + proto.MESSAGE, + number=3, + message="NotificationConfig.Filter", + ) + + class Filter(proto.Message): + r"""Allows filtering to one or more specific event types. If + event types are present, those and only those event types will + be transmitted to the cluster. Other types will be skipped. If + no filter is specified, or no event types are present, all event + types will be sent + + Attributes: + event_type (MutableSequence[google.cloud.container_v1.types.NotificationConfig.EventType]): + Event types to allowlist. + """ + + event_type: MutableSequence[ + "NotificationConfig.EventType" + ] = proto.RepeatedField( + proto.ENUM, + number=1, + enum="NotificationConfig.EventType", + ) + + pubsub: PubSub = proto.Field( + proto.MESSAGE, + number=1, + message=PubSub, + ) + + +class ConfidentialNodes(proto.Message): + r"""ConfidentialNodes is configuration for the confidential nodes + feature, which makes nodes run on confidential VMs. + + Attributes: + enabled (bool): + Whether Confidential Nodes feature is + enabled. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class UpgradeEvent(proto.Message): + r"""UpgradeEvent is a notification sent to customers by the + cluster server when a resource is upgrading. + + Attributes: + resource_type (google.cloud.container_v1.types.UpgradeResourceType): + The resource type that is upgrading. + operation (str): + The operation associated with this upgrade. + operation_start_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the operation was started. + current_version (str): + The current version before the upgrade. + target_version (str): + The target version for the upgrade. + resource (str): + Optional relative path to the resource. For + example in node pool upgrades, the relative path + of the node pool. + """ + + resource_type: "UpgradeResourceType" = proto.Field( + proto.ENUM, + number=1, + enum="UpgradeResourceType", + ) + operation: str = proto.Field( + proto.STRING, + number=2, + ) + operation_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + current_version: str = proto.Field( + proto.STRING, + number=4, + ) + target_version: str = proto.Field( + proto.STRING, + number=5, + ) + resource: str = proto.Field( + proto.STRING, + number=6, + ) + + +class UpgradeAvailableEvent(proto.Message): + r"""UpgradeAvailableEvent is a notification sent to customers + when a new available version is released. + + Attributes: + version (str): + The release version available for upgrade. + resource_type (google.cloud.container_v1.types.UpgradeResourceType): + The resource type of the release version. + release_channel (google.cloud.container_v1.types.ReleaseChannel): + The release channel of the version. If empty, + it means a non-channel release. + resource (str): + Optional relative path to the resource. For + example, the relative path of the node pool. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + resource_type: "UpgradeResourceType" = proto.Field( + proto.ENUM, + number=2, + enum="UpgradeResourceType", + ) + release_channel: "ReleaseChannel" = proto.Field( + proto.MESSAGE, + number=3, + message="ReleaseChannel", + ) + resource: str = proto.Field( + proto.STRING, + number=4, + ) + + +class SecurityBulletinEvent(proto.Message): + r"""SecurityBulletinEvent is a notification sent to customers + when a security bulletin has been posted that they are + vulnerable to. + + Attributes: + resource_type_affected (str): + The resource type (node/control plane) that + has the vulnerability. Multiple notifications (1 + notification per resource type) will be sent for + a vulnerability that affects > 1 resource type. + bulletin_id (str): + The ID of the bulletin corresponding to the + vulnerability. + cve_ids (MutableSequence[str]): + The CVEs associated with this bulletin. + severity (str): + The severity of this bulletin as it relates + to GKE. + bulletin_uri (str): + The URI link to the bulletin on the website + for more information. + brief_description (str): + A brief description of the bulletin. See the bulletin + pointed to by the bulletin_uri field for an expanded + description. + affected_supported_minors (MutableSequence[str]): + The GKE minor versions affected by this + vulnerability. + patched_versions (MutableSequence[str]): + The GKE versions where this vulnerability is + patched. + suggested_upgrade_target (str): + This represents a version selected from the patched_versions + field that the cluster receiving this notification should + most likely want to upgrade to based on its current version. + Note that if this notification is being received by a given + cluster, it means that this version is currently available + as an upgrade target in that cluster's location. + manual_steps_required (bool): + If this field is specified, it means there + are manual steps that the user must take to make + their clusters safe. + """ + + resource_type_affected: str = proto.Field( + proto.STRING, + number=1, + ) + bulletin_id: str = proto.Field( + proto.STRING, + number=2, + ) + cve_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + severity: str = proto.Field( + proto.STRING, + number=4, + ) + bulletin_uri: str = proto.Field( + proto.STRING, + number=5, + ) + brief_description: str = proto.Field( + proto.STRING, + number=6, + ) + affected_supported_minors: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + patched_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + suggested_upgrade_target: str = proto.Field( + proto.STRING, + number=9, + ) + manual_steps_required: bool = proto.Field( + proto.BOOL, + number=10, + ) + + +class Autopilot(proto.Message): + r"""Autopilot is the configuration for Autopilot settings on the + cluster. + + Attributes: + enabled (bool): + Enable Autopilot + workload_policy_config (google.cloud.container_v1.types.WorkloadPolicyConfig): + Workload policy configuration for Autopilot. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + workload_policy_config: "WorkloadPolicyConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="WorkloadPolicyConfig", + ) + + +class WorkloadPolicyConfig(proto.Message): + r"""WorkloadPolicyConfig is the configuration of workload policy + for autopilot clusters. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + allow_net_admin (bool): + If true, workloads can use NET_ADMIN capability. + + This field is a member of `oneof`_ ``_allow_net_admin``. + """ + + allow_net_admin: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + + +class LoggingConfig(proto.Message): + r"""LoggingConfig is cluster logging configuration. + + Attributes: + component_config (google.cloud.container_v1.types.LoggingComponentConfig): + Logging components configuration + """ + + component_config: "LoggingComponentConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="LoggingComponentConfig", + ) + + +class LoggingComponentConfig(proto.Message): + r"""LoggingComponentConfig is cluster logging component + configuration. + + Attributes: + enable_components (MutableSequence[google.cloud.container_v1.types.LoggingComponentConfig.Component]): + Select components to collect logs. An empty + set would disable all logging. + """ + + class Component(proto.Enum): + r"""GKE components exposing logs + + Values: + COMPONENT_UNSPECIFIED (0): + Default value. This shouldn't be used. + SYSTEM_COMPONENTS (1): + system components + WORKLOADS (2): + workloads + APISERVER (3): + kube-apiserver + SCHEDULER (4): + kube-scheduler + CONTROLLER_MANAGER (5): + kube-controller-manager + """ + COMPONENT_UNSPECIFIED = 0 + SYSTEM_COMPONENTS = 1 + WORKLOADS = 2 + APISERVER = 3 + SCHEDULER = 4 + CONTROLLER_MANAGER = 5 + + enable_components: MutableSequence[Component] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=Component, + ) + + +class MonitoringConfig(proto.Message): + r"""MonitoringConfig is cluster monitoring configuration. + + Attributes: + component_config (google.cloud.container_v1.types.MonitoringComponentConfig): + Monitoring components configuration + managed_prometheus_config (google.cloud.container_v1.types.ManagedPrometheusConfig): + Enable Google Cloud Managed Service for + Prometheus in the cluster. + advanced_datapath_observability_config (google.cloud.container_v1.types.AdvancedDatapathObservabilityConfig): + Configuration of Advanced Datapath + Observability features. + """ + + component_config: "MonitoringComponentConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="MonitoringComponentConfig", + ) + managed_prometheus_config: "ManagedPrometheusConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="ManagedPrometheusConfig", + ) + advanced_datapath_observability_config: "AdvancedDatapathObservabilityConfig" = ( + proto.Field( + proto.MESSAGE, + number=3, + message="AdvancedDatapathObservabilityConfig", + ) + ) + + +class AdvancedDatapathObservabilityConfig(proto.Message): + r"""AdvancedDatapathObservabilityConfig specifies configuration + of observability features of advanced datapath. + + Attributes: + enable_metrics (bool): + Expose flow metrics on nodes + relay_mode (google.cloud.container_v1.types.AdvancedDatapathObservabilityConfig.RelayMode): + Method used to make Relay available + """ + + class RelayMode(proto.Enum): + r"""Supported Relay modes + + Values: + RELAY_MODE_UNSPECIFIED (0): + Default value. This shouldn't be used. + DISABLED (1): + disabled + INTERNAL_VPC_LB (3): + exposed via internal load balancer + EXTERNAL_LB (4): + exposed via external load balancer + """ + RELAY_MODE_UNSPECIFIED = 0 + DISABLED = 1 + INTERNAL_VPC_LB = 3 + EXTERNAL_LB = 4 + + enable_metrics: bool = proto.Field( + proto.BOOL, + number=1, + ) + relay_mode: RelayMode = proto.Field( + proto.ENUM, + number=2, + enum=RelayMode, + ) + + +class NodePoolLoggingConfig(proto.Message): + r"""NodePoolLoggingConfig specifies logging configuration for + nodepools. + + Attributes: + variant_config (google.cloud.container_v1.types.LoggingVariantConfig): + Logging variant configuration. + """ + + variant_config: "LoggingVariantConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="LoggingVariantConfig", + ) + + +class LoggingVariantConfig(proto.Message): + r"""LoggingVariantConfig specifies the behaviour of the logging + component. + + Attributes: + variant (google.cloud.container_v1.types.LoggingVariantConfig.Variant): + Logging variant deployed on nodes. + """ + + class Variant(proto.Enum): + r"""Logging component variants. + + Values: + VARIANT_UNSPECIFIED (0): + Default value. This shouldn't be used. + DEFAULT (1): + default logging variant. + MAX_THROUGHPUT (2): + maximum logging throughput variant. + """ + VARIANT_UNSPECIFIED = 0 + DEFAULT = 1 + MAX_THROUGHPUT = 2 + + variant: Variant = proto.Field( + proto.ENUM, + number=1, + enum=Variant, + ) + + +class MonitoringComponentConfig(proto.Message): + r"""MonitoringComponentConfig is cluster monitoring component + configuration. + + Attributes: + enable_components (MutableSequence[google.cloud.container_v1.types.MonitoringComponentConfig.Component]): + Select components to collect metrics. An + empty set would disable all monitoring. + """ + + class Component(proto.Enum): + r"""GKE components exposing metrics + + Values: + COMPONENT_UNSPECIFIED (0): + Default value. This shouldn't be used. + SYSTEM_COMPONENTS (1): + system components + APISERVER (3): + kube-apiserver + SCHEDULER (4): + kube-scheduler + CONTROLLER_MANAGER (5): + kube-controller-manager + STORAGE (7): + Storage + HPA (8): + Horizontal Pod Autoscaling + POD (9): + Pod + DAEMONSET (10): + DaemonSet + DEPLOYMENT (11): + Deployment + STATEFULSET (12): + Statefulset + """ + COMPONENT_UNSPECIFIED = 0 + SYSTEM_COMPONENTS = 1 + APISERVER = 3 + SCHEDULER = 4 + CONTROLLER_MANAGER = 5 + STORAGE = 7 + HPA = 8 + POD = 9 + DAEMONSET = 10 + DEPLOYMENT = 11 + STATEFULSET = 12 + + enable_components: MutableSequence[Component] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=Component, + ) + + +class ManagedPrometheusConfig(proto.Message): + r"""ManagedPrometheusConfig defines the configuration for + Google Cloud Managed Service for Prometheus. + + Attributes: + enabled (bool): + Enable Managed Collection. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class Fleet(proto.Message): + r"""Fleet is the fleet configuration for the cluster. + + Attributes: + project (str): + The Fleet host project(project ID or project + number) where this cluster will be registered + to. This field cannot be changed after the + cluster has been registered. + membership (str): + [Output only] The full resource name of the registered fleet + membership of the cluster, in the format + ``//gkehub.googleapis.com/projects/*/locations/*/memberships/*``. + pre_registered (bool): + [Output only] Whether the cluster has been registered + through the fleet API. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + membership: str = proto.Field( + proto.STRING, + number=2, + ) + pre_registered: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class LocalNvmeSsdBlockConfig(proto.Message): + r"""LocalNvmeSsdBlockConfig contains configuration for using + raw-block local NVMe SSD. + + Attributes: + local_ssd_count (int): + The number of raw-block local NVMe SSD disks + to be attached to the node. Each local SSD is + 375 GB in size. If zero, it means no raw-block + local NVMe SSD disks to be attached to the node. + The limit for this value is dependent upon the + maximum number of disks available on a machine + per zone. See: + + https://cloud.google.com/compute/docs/disks/local-ssd + for more information. + """ + + local_ssd_count: int = proto.Field( + proto.INT32, + number=1, + ) + + +class EphemeralStorageLocalSsdConfig(proto.Message): + r"""EphemeralStorageLocalSsdConfig contains configuration for the + node ephemeral storage using Local SSD. + + Attributes: + local_ssd_count (int): + Number of local SSDs to use to back ephemeral + storage. Uses NVMe interfaces. Each local SSD is + 375 GB in size. If zero, it means to disable + using local SSDs as ephemeral storage. The limit + for this value is dependent upon the maximum + number of disks available on a machine per zone. + See: + + https://cloud.google.com/compute/docs/disks/local-ssd + for more information. + """ + + local_ssd_count: int = proto.Field( + proto.INT32, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/__init__.py b/packages/google-cloud-container/google/cloud/container_v1beta1/__init__.py new file mode 100644 index 000000000000..1b66aa68aaaa --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/__init__.py @@ -0,0 +1,382 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.container_v1beta1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.cluster_manager import ClusterManagerAsyncClient, ClusterManagerClient +from .types.cluster_service import ( + AcceleratorConfig, + AdditionalNodeNetworkConfig, + AdditionalPodNetworkConfig, + AdditionalPodRangesConfig, + AddonsConfig, + AdvancedDatapathObservabilityConfig, + AdvancedMachineFeatures, + AuthenticatorGroupsConfig, + Autopilot, + AutopilotCompatibilityIssue, + AutoprovisioningNodePoolDefaults, + AutoUpgradeOptions, + BestEffortProvisioning, + BinaryAuthorization, + BlueGreenSettings, + CancelOperationRequest, + CheckAutopilotCompatibilityRequest, + CheckAutopilotCompatibilityResponse, + ClientCertificateConfig, + CloudRunConfig, + Cluster, + ClusterAutoscaling, + ClusterTelemetry, + ClusterUpdate, + CompleteIPRotationRequest, + CompleteNodePoolUpgradeRequest, + ConfidentialNodes, + ConfigConnectorConfig, + CostManagementConfig, + CreateClusterRequest, + CreateNodePoolRequest, + DailyMaintenanceWindow, + DatabaseEncryption, + DatapathProvider, + DefaultSnatStatus, + DeleteClusterRequest, + DeleteNodePoolRequest, + DnsCacheConfig, + DNSConfig, + EphemeralStorageConfig, + EphemeralStorageLocalSsdConfig, + FastSocket, + Fleet, + GatewayAPIConfig, + GcePersistentDiskCsiDriverConfig, + GcfsConfig, + GcpFilestoreCsiDriverConfig, + GcsFuseCsiDriverConfig, + GetClusterRequest, + GetJSONWebKeysRequest, + GetJSONWebKeysResponse, + GetNodePoolRequest, + GetOpenIDConfigRequest, + GetOpenIDConfigResponse, + GetOperationRequest, + GetServerConfigRequest, + GkeBackupAgentConfig, + GPUDriverInstallationConfig, + GPUSharingConfig, + HorizontalPodAutoscaling, + HostMaintenancePolicy, + HttpLoadBalancing, + IdentityServiceConfig, + ILBSubsettingConfig, + IntraNodeVisibilityConfig, + IPAllocationPolicy, + IstioConfig, + Jwk, + K8sBetaAPIConfig, + KalmConfig, + KubernetesDashboard, + LegacyAbac, + LinuxNodeConfig, + ListClustersRequest, + ListClustersResponse, + ListLocationsRequest, + ListLocationsResponse, + ListNodePoolsRequest, + ListNodePoolsResponse, + ListOperationsRequest, + ListOperationsResponse, + ListUsableSubnetworksRequest, + ListUsableSubnetworksResponse, + LocalNvmeSsdBlockConfig, + Location, + LoggingComponentConfig, + LoggingConfig, + LoggingVariantConfig, + MaintenanceExclusionOptions, + MaintenancePolicy, + MaintenanceWindow, + ManagedPrometheusConfig, + Master, + MasterAuth, + MasterAuthorizedNetworksConfig, + MaxPodsConstraint, + MeshCertificates, + MonitoringComponentConfig, + MonitoringConfig, + NetworkConfig, + NetworkPolicy, + NetworkPolicyConfig, + NetworkTags, + NodeConfig, + NodeConfigDefaults, + NodeKubeletConfig, + NodeLabels, + NodeManagement, + NodeNetworkConfig, + NodePool, + NodePoolAutoConfig, + NodePoolAutoscaling, + NodePoolDefaults, + NodePoolLoggingConfig, + NodePoolUpdateStrategy, + NodeTaint, + NodeTaints, + NotificationConfig, + Operation, + OperationProgress, + PodCIDROverprovisionConfig, + PodSecurityPolicyConfig, + PrivateClusterConfig, + PrivateClusterMasterGlobalAccessConfig, + PrivateIPv6GoogleAccess, + ProtectConfig, + RangeInfo, + RecurringTimeWindow, + ReleaseChannel, + ReservationAffinity, + ResourceLabels, + ResourceLimit, + ResourceUsageExportConfig, + RollbackNodePoolUpgradeRequest, + SandboxConfig, + SecurityBulletinEvent, + SecurityPostureConfig, + ServerConfig, + ServiceExternalIPsConfig, + SetAddonsConfigRequest, + SetLabelsRequest, + SetLegacyAbacRequest, + SetLocationsRequest, + SetLoggingServiceRequest, + SetMaintenancePolicyRequest, + SetMasterAuthRequest, + SetMonitoringServiceRequest, + SetNetworkPolicyRequest, + SetNodePoolAutoscalingRequest, + SetNodePoolManagementRequest, + SetNodePoolSizeRequest, + ShieldedInstanceConfig, + ShieldedNodes, + SoleTenantConfig, + StackType, + StartIPRotationRequest, + StatusCondition, + TimeWindow, + TpuConfig, + UpdateClusterRequest, + UpdateMasterRequest, + UpdateNodePoolRequest, + UpgradeAvailableEvent, + UpgradeEvent, + UpgradeResourceType, + UsableSubnetwork, + UsableSubnetworkSecondaryRange, + VerticalPodAutoscaling, + VirtualNIC, + WindowsNodeConfig, + WindowsVersions, + WorkloadALTSConfig, + WorkloadCertificates, + WorkloadConfig, + WorkloadIdentityConfig, + WorkloadMetadataConfig, + WorkloadPolicyConfig, +) + +__all__ = ( + "ClusterManagerAsyncClient", + "AcceleratorConfig", + "AdditionalNodeNetworkConfig", + "AdditionalPodNetworkConfig", + "AdditionalPodRangesConfig", + "AddonsConfig", + "AdvancedDatapathObservabilityConfig", + "AdvancedMachineFeatures", + "AuthenticatorGroupsConfig", + "AutoUpgradeOptions", + "Autopilot", + "AutopilotCompatibilityIssue", + "AutoprovisioningNodePoolDefaults", + "BestEffortProvisioning", + "BinaryAuthorization", + "BlueGreenSettings", + "CancelOperationRequest", + "CheckAutopilotCompatibilityRequest", + "CheckAutopilotCompatibilityResponse", + "ClientCertificateConfig", + "CloudRunConfig", + "Cluster", + "ClusterAutoscaling", + "ClusterManagerClient", + "ClusterTelemetry", + "ClusterUpdate", + "CompleteIPRotationRequest", + "CompleteNodePoolUpgradeRequest", + "ConfidentialNodes", + "ConfigConnectorConfig", + "CostManagementConfig", + "CreateClusterRequest", + "CreateNodePoolRequest", + "DNSConfig", + "DailyMaintenanceWindow", + "DatabaseEncryption", + "DatapathProvider", + "DefaultSnatStatus", + "DeleteClusterRequest", + "DeleteNodePoolRequest", + "DnsCacheConfig", + "EphemeralStorageConfig", + "EphemeralStorageLocalSsdConfig", + "FastSocket", + "Fleet", + "GPUDriverInstallationConfig", + "GPUSharingConfig", + "GatewayAPIConfig", + "GcePersistentDiskCsiDriverConfig", + "GcfsConfig", + "GcpFilestoreCsiDriverConfig", + "GcsFuseCsiDriverConfig", + "GetClusterRequest", + "GetJSONWebKeysRequest", + "GetJSONWebKeysResponse", + "GetNodePoolRequest", + "GetOpenIDConfigRequest", + "GetOpenIDConfigResponse", + "GetOperationRequest", + "GetServerConfigRequest", + "GkeBackupAgentConfig", + "HorizontalPodAutoscaling", + "HostMaintenancePolicy", + "HttpLoadBalancing", + "ILBSubsettingConfig", + "IPAllocationPolicy", + "IdentityServiceConfig", + "IntraNodeVisibilityConfig", + "IstioConfig", + "Jwk", + "K8sBetaAPIConfig", + "KalmConfig", + "KubernetesDashboard", + "LegacyAbac", + "LinuxNodeConfig", + "ListClustersRequest", + "ListClustersResponse", + "ListLocationsRequest", + "ListLocationsResponse", + "ListNodePoolsRequest", + "ListNodePoolsResponse", + "ListOperationsRequest", + "ListOperationsResponse", + "ListUsableSubnetworksRequest", + "ListUsableSubnetworksResponse", + "LocalNvmeSsdBlockConfig", + "Location", + "LoggingComponentConfig", + "LoggingConfig", + "LoggingVariantConfig", + "MaintenanceExclusionOptions", + "MaintenancePolicy", + "MaintenanceWindow", + "ManagedPrometheusConfig", + "Master", + "MasterAuth", + "MasterAuthorizedNetworksConfig", + "MaxPodsConstraint", + "MeshCertificates", + "MonitoringComponentConfig", + "MonitoringConfig", + "NetworkConfig", + "NetworkPolicy", + "NetworkPolicyConfig", + "NetworkTags", + "NodeConfig", + "NodeConfigDefaults", + "NodeKubeletConfig", + "NodeLabels", + "NodeManagement", + "NodeNetworkConfig", + "NodePool", + "NodePoolAutoConfig", + "NodePoolAutoscaling", + "NodePoolDefaults", + "NodePoolLoggingConfig", + "NodePoolUpdateStrategy", + "NodeTaint", + "NodeTaints", + "NotificationConfig", + "Operation", + "OperationProgress", + "PodCIDROverprovisionConfig", + "PodSecurityPolicyConfig", + "PrivateClusterConfig", + "PrivateClusterMasterGlobalAccessConfig", + "PrivateIPv6GoogleAccess", + "ProtectConfig", + "RangeInfo", + "RecurringTimeWindow", + "ReleaseChannel", + "ReservationAffinity", + "ResourceLabels", + "ResourceLimit", + "ResourceUsageExportConfig", + "RollbackNodePoolUpgradeRequest", + "SandboxConfig", + "SecurityBulletinEvent", + "SecurityPostureConfig", + "ServerConfig", + "ServiceExternalIPsConfig", + "SetAddonsConfigRequest", + "SetLabelsRequest", + "SetLegacyAbacRequest", + "SetLocationsRequest", + "SetLoggingServiceRequest", + "SetMaintenancePolicyRequest", + "SetMasterAuthRequest", + "SetMonitoringServiceRequest", + "SetNetworkPolicyRequest", + "SetNodePoolAutoscalingRequest", + "SetNodePoolManagementRequest", + "SetNodePoolSizeRequest", + "ShieldedInstanceConfig", + "ShieldedNodes", + "SoleTenantConfig", + "StackType", + "StartIPRotationRequest", + "StatusCondition", + "TimeWindow", + "TpuConfig", + "UpdateClusterRequest", + "UpdateMasterRequest", + "UpdateNodePoolRequest", + "UpgradeAvailableEvent", + "UpgradeEvent", + "UpgradeResourceType", + "UsableSubnetwork", + "UsableSubnetworkSecondaryRange", + "VerticalPodAutoscaling", + "VirtualNIC", + "WindowsNodeConfig", + "WindowsVersions", + "WorkloadALTSConfig", + "WorkloadCertificates", + "WorkloadConfig", + "WorkloadIdentityConfig", + "WorkloadMetadataConfig", + "WorkloadPolicyConfig", +) diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_metadata.json b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_metadata.json new file mode 100644 index 000000000000..b925b82155e0 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_metadata.json @@ -0,0 +1,373 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.container_v1beta1", + "protoPackage": "google.container.v1beta1", + "schema": "1.0", + "services": { + "ClusterManager": { + "clients": { + "grpc": { + "libraryClient": "ClusterManagerClient", + "rpcs": { + "CancelOperation": { + "methods": [ + "cancel_operation" + ] + }, + "CheckAutopilotCompatibility": { + "methods": [ + "check_autopilot_compatibility" + ] + }, + "CompleteIPRotation": { + "methods": [ + "complete_ip_rotation" + ] + }, + "CompleteNodePoolUpgrade": { + "methods": [ + "complete_node_pool_upgrade" + ] + }, + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateNodePool": { + "methods": [ + "create_node_pool" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteNodePool": { + "methods": [ + "delete_node_pool" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetJSONWebKeys": { + "methods": [ + "get_json_web_keys" + ] + }, + "GetNodePool": { + "methods": [ + "get_node_pool" + ] + }, + "GetOperation": { + "methods": [ + "get_operation" + ] + }, + "GetServerConfig": { + "methods": [ + "get_server_config" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListLocations": { + "methods": [ + "list_locations" + ] + }, + "ListNodePools": { + "methods": [ + "list_node_pools" + ] + }, + "ListOperations": { + "methods": [ + "list_operations" + ] + }, + "ListUsableSubnetworks": { + "methods": [ + "list_usable_subnetworks" + ] + }, + "RollbackNodePoolUpgrade": { + "methods": [ + "rollback_node_pool_upgrade" + ] + }, + "SetAddonsConfig": { + "methods": [ + "set_addons_config" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "SetLegacyAbac": { + "methods": [ + "set_legacy_abac" + ] + }, + "SetLocations": { + "methods": [ + "set_locations" + ] + }, + "SetLoggingService": { + "methods": [ + "set_logging_service" + ] + }, + "SetMaintenancePolicy": { + "methods": [ + "set_maintenance_policy" + ] + }, + "SetMasterAuth": { + "methods": [ + "set_master_auth" + ] + }, + "SetMonitoringService": { + "methods": [ + "set_monitoring_service" + ] + }, + "SetNetworkPolicy": { + "methods": [ + "set_network_policy" + ] + }, + "SetNodePoolAutoscaling": { + "methods": [ + "set_node_pool_autoscaling" + ] + }, + "SetNodePoolManagement": { + "methods": [ + "set_node_pool_management" + ] + }, + "SetNodePoolSize": { + "methods": [ + "set_node_pool_size" + ] + }, + "StartIPRotation": { + "methods": [ + "start_ip_rotation" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateMaster": { + "methods": [ + "update_master" + ] + }, + "UpdateNodePool": { + "methods": [ + "update_node_pool" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ClusterManagerAsyncClient", + "rpcs": { + "CancelOperation": { + "methods": [ + "cancel_operation" + ] + }, + "CheckAutopilotCompatibility": { + "methods": [ + "check_autopilot_compatibility" + ] + }, + "CompleteIPRotation": { + "methods": [ + "complete_ip_rotation" + ] + }, + "CompleteNodePoolUpgrade": { + "methods": [ + "complete_node_pool_upgrade" + ] + }, + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "CreateNodePool": { + "methods": [ + "create_node_pool" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DeleteNodePool": { + "methods": [ + "delete_node_pool" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "GetJSONWebKeys": { + "methods": [ + "get_json_web_keys" + ] + }, + "GetNodePool": { + "methods": [ + "get_node_pool" + ] + }, + "GetOperation": { + "methods": [ + "get_operation" + ] + }, + "GetServerConfig": { + "methods": [ + "get_server_config" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "ListLocations": { + "methods": [ + "list_locations" + ] + }, + "ListNodePools": { + "methods": [ + "list_node_pools" + ] + }, + "ListOperations": { + "methods": [ + "list_operations" + ] + }, + "ListUsableSubnetworks": { + "methods": [ + "list_usable_subnetworks" + ] + }, + "RollbackNodePoolUpgrade": { + "methods": [ + "rollback_node_pool_upgrade" + ] + }, + "SetAddonsConfig": { + "methods": [ + "set_addons_config" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "SetLegacyAbac": { + "methods": [ + "set_legacy_abac" + ] + }, + "SetLocations": { + "methods": [ + "set_locations" + ] + }, + "SetLoggingService": { + "methods": [ + "set_logging_service" + ] + }, + "SetMaintenancePolicy": { + "methods": [ + "set_maintenance_policy" + ] + }, + "SetMasterAuth": { + "methods": [ + "set_master_auth" + ] + }, + "SetMonitoringService": { + "methods": [ + "set_monitoring_service" + ] + }, + "SetNetworkPolicy": { + "methods": [ + "set_network_policy" + ] + }, + "SetNodePoolAutoscaling": { + "methods": [ + "set_node_pool_autoscaling" + ] + }, + "SetNodePoolManagement": { + "methods": [ + "set_node_pool_management" + ] + }, + "SetNodePoolSize": { + "methods": [ + "set_node_pool_size" + ] + }, + "StartIPRotation": { + "methods": [ + "start_ip_rotation" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + }, + "UpdateMaster": { + "methods": [ + "update_master" + ] + }, + "UpdateNodePool": { + "methods": [ + "update_node_pool" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py new file mode 100644 index 000000000000..8ab09c42e9c1 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.31.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/py.typed b/packages/google-cloud-container/google/cloud/container_v1beta1/py.typed new file mode 100644 index 000000000000..d5b0e29f91b4 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-container package uses inline types. diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/__init__.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/__init__.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/__init__.py new file mode 100644 index 000000000000..54d42dba56b7 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ClusterManagerAsyncClient +from .client import ClusterManagerClient + +__all__ = ( + "ClusterManagerClient", + "ClusterManagerAsyncClient", +) diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py new file mode 100644 index 000000000000..5b94d4dfbe25 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/async_client.py @@ -0,0 +1,4904 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.container_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.rpc import status_pb2 # type: ignore + +from google.cloud.container_v1beta1.services.cluster_manager import pagers +from google.cloud.container_v1beta1.types import cluster_service + +from .client import ClusterManagerClient +from .transports.base import DEFAULT_CLIENT_INFO, ClusterManagerTransport +from .transports.grpc_asyncio import ClusterManagerGrpcAsyncIOTransport + + +class ClusterManagerAsyncClient: + """Google Kubernetes Engine Cluster Manager v1beta1""" + + _client: ClusterManagerClient + + DEFAULT_ENDPOINT = ClusterManagerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ClusterManagerClient.DEFAULT_MTLS_ENDPOINT + + topic_path = staticmethod(ClusterManagerClient.topic_path) + parse_topic_path = staticmethod(ClusterManagerClient.parse_topic_path) + common_billing_account_path = staticmethod( + ClusterManagerClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ClusterManagerClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ClusterManagerClient.common_folder_path) + parse_common_folder_path = staticmethod( + ClusterManagerClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ClusterManagerClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ClusterManagerClient.parse_common_organization_path + ) + common_project_path = staticmethod(ClusterManagerClient.common_project_path) + parse_common_project_path = staticmethod( + ClusterManagerClient.parse_common_project_path + ) + common_location_path = staticmethod(ClusterManagerClient.common_location_path) + parse_common_location_path = staticmethod( + ClusterManagerClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterManagerAsyncClient: The constructed client. + """ + return ClusterManagerClient.from_service_account_info.__func__(ClusterManagerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterManagerAsyncClient: The constructed client. + """ + return ClusterManagerClient.from_service_account_file.__func__(ClusterManagerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ClusterManagerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ClusterManagerTransport: + """Returns the transport used by the client instance. + + Returns: + ClusterManagerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ClusterManagerClient).get_transport_class, type(ClusterManagerClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ClusterManagerTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cluster manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ClusterManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ClusterManagerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_clusters( + self, + request: Optional[Union[cluster_service.ListClustersRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListClustersResponse: + r"""Lists all clusters owned by a project in either the + specified zone or all zones. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_list_clusters(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.ListClustersRequest( + project_id="project_id_value", + zone="zone_value", + ) + + # Make the request + response = await client.list_clusters(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.ListClustersRequest, dict]]): + The request object. ListClustersRequest lists clusters. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides, or "-" for all zones. This + field has been deprecated and replaced by the parent + field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.ListClustersResponse: + ListClustersResponse is the result of + ListClustersRequest. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.ListClustersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_clusters, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_cluster( + self, + request: Optional[Union[cluster_service.GetClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Cluster: + r"""Gets the details for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_get_cluster(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.GetClusterRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = await client.get_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.GetClusterRequest, dict]]): + The request object. GetClusterRequest gets the settings + of a cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster to retrieve. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Cluster: + A Google Kubernetes Engine cluster. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.GetClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_cluster( + self, + request: Optional[Union[cluster_service.CreateClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster: Optional[cluster_service.Cluster] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Creates a cluster, consisting of the specified number and type + of Google Compute Engine instances. + + By default, the cluster is created in the project's `default + network `__. + + One firewall is added for the cluster. After cluster creation, + the Kubelet creates routes for each node to allow the containers + on that node to communicate with all other instances in the + cluster. + + Finally, an entry is added to the project's global metadata + indicating which CIDR range the cluster is using. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_create_cluster(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.CreateClusterRequest( + project_id="project_id_value", + zone="zone_value", + ) + + # Make the request + response = await client.create_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.CreateClusterRequest, dict]]): + The request object. CreateClusterRequest creates a + cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the parent field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (:class:`google.cloud.container_v1beta1.types.Cluster`): + Required. A `cluster + resource `__ + + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.CreateClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster is not None: + request.cluster = cluster + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_cluster, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_cluster( + self, + request: Optional[Union[cluster_service.UpdateClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + update: Optional[cluster_service.ClusterUpdate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Updates the settings for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_update_cluster(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.UpdateClusterRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = await client.update_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.UpdateClusterRequest, dict]]): + The request object. UpdateClusterRequest updates the + settings of a cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update (:class:`google.cloud.container_v1beta1.types.ClusterUpdate`): + Required. A description of the + update. + + This corresponds to the ``update`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, update]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.UpdateClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if update is not None: + request.update = update + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_cluster, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_node_pool( + self, + request: Optional[Union[cluster_service.UpdateNodePoolRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Updates the version and/or image type of a specific + node pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_update_node_pool(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.UpdateNodePoolRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + node_version="node_version_value", + image_type="image_type_value", + ) + + # Make the request + response = await client.update_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.UpdateNodePoolRequest, dict]]): + The request object. SetNodePoolVersionRequest updates the + version of a node pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + request = cluster_service.UpdateNodePoolRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_node_pool, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_node_pool_autoscaling( + self, + request: Optional[ + Union[cluster_service.SetNodePoolAutoscalingRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the autoscaling settings of a specific node + pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_set_node_pool_autoscaling(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.SetNodePoolAutoscalingRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Make the request + response = await client.set_node_pool_autoscaling(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.SetNodePoolAutoscalingRequest, dict]]): + The request object. SetNodePoolAutoscalingRequest sets + the autoscaler settings of a node pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + request = cluster_service.SetNodePoolAutoscalingRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_node_pool_autoscaling, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_logging_service( + self, + request: Optional[Union[cluster_service.SetLoggingServiceRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + logging_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the logging service for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_set_logging_service(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.SetLoggingServiceRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + logging_service="logging_service_value", + ) + + # Make the request + response = await client.set_logging_service(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.SetLoggingServiceRequest, dict]]): + The request object. SetLoggingServiceRequest sets the + logging service of a cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + logging_service (:class:`str`): + Required. The logging service the cluster should use to + write logs. Currently available options: + + - ``logging.googleapis.com/kubernetes`` - The Cloud + Logging service with a Kubernetes-native resource + model + - ``logging.googleapis.com`` - The legacy Cloud Logging + service (no longer available as of GKE 1.15). + - ``none`` - no logs will be exported from the cluster. + + If left as an empty + string,\ ``logging.googleapis.com/kubernetes`` will be + used for GKE 1.14+ or ``logging.googleapis.com`` for + earlier versions. + + This corresponds to the ``logging_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, logging_service]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetLoggingServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if logging_service is not None: + request.logging_service = logging_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_logging_service, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_monitoring_service( + self, + request: Optional[ + Union[cluster_service.SetMonitoringServiceRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + monitoring_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the monitoring service for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_set_monitoring_service(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.SetMonitoringServiceRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + monitoring_service="monitoring_service_value", + ) + + # Make the request + response = await client.set_monitoring_service(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.SetMonitoringServiceRequest, dict]]): + The request object. SetMonitoringServiceRequest sets the + monitoring service of a cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + monitoring_service (:class:`str`): + Required. The monitoring service the cluster should use + to write metrics. Currently available options: + + - "monitoring.googleapis.com/kubernetes" - The Cloud + Monitoring service with a Kubernetes-native resource + model + - ``monitoring.googleapis.com`` - The legacy Cloud + Monitoring service (no longer available as of GKE + 1.15). + - ``none`` - No metrics will be exported from the + cluster. + + If left as an empty + string,\ ``monitoring.googleapis.com/kubernetes`` will + be used for GKE 1.14+ or ``monitoring.googleapis.com`` + for earlier versions. + + This corresponds to the ``monitoring_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, monitoring_service]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetMonitoringServiceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if monitoring_service is not None: + request.monitoring_service = monitoring_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_monitoring_service, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_addons_config( + self, + request: Optional[Union[cluster_service.SetAddonsConfigRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + addons_config: Optional[cluster_service.AddonsConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the addons for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_set_addons_config(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.SetAddonsConfigRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = await client.set_addons_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.SetAddonsConfigRequest, dict]]): + The request object. SetAddonsRequest sets the addons + associated with the cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + addons_config (:class:`google.cloud.container_v1beta1.types.AddonsConfig`): + Required. The desired configurations + for the various addons available to run + in the cluster. + + This corresponds to the ``addons_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, addons_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetAddonsConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if addons_config is not None: + request.addons_config = addons_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_addons_config, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_locations( + self, + request: Optional[Union[cluster_service.SetLocationsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + locations: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the locations for a specific cluster. Deprecated. Use + `projects.locations.clusters.update `__ + instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_set_locations(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.SetLocationsRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + locations=['locations_value1', 'locations_value2'], + ) + + # Make the request + response = await client.set_locations(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.SetLocationsRequest, dict]]): + The request object. SetLocationsRequest sets the + locations of the cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + locations (:class:`MutableSequence[str]`): + Required. The desired list of Google Compute Engine + `zones `__ + in which the cluster's nodes should be located. Changing + the locations a cluster is in will result in nodes being + either created or removed from the cluster, depending on + whether locations are being added or removed. + + This list must always include the cluster's primary + zone. + + This corresponds to the ``locations`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + warnings.warn( + "ClusterManagerAsyncClient.set_locations is deprecated", DeprecationWarning + ) + + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, locations]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetLocationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if locations: + request.locations.extend(locations) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_locations, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_master( + self, + request: Optional[Union[cluster_service.UpdateMasterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + master_version: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Updates the master for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_update_master(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.UpdateMasterRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + master_version="master_version_value", + ) + + # Make the request + response = await client.update_master(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.UpdateMasterRequest, dict]]): + The request object. UpdateMasterRequest updates the + master of the cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + master_version (:class:`str`): + Required. The Kubernetes version to + change the master to. + Users may specify either explicit + versions offered by Kubernetes Engine or + version aliases, which have the + following behavior: + + - "latest": picks the highest valid + Kubernetes version + - "1.X": picks the highest valid + patch+gke.N patch in the 1.X version + - "1.X.Y": picks the highest valid gke.N + patch in the 1.X.Y version + - "1.X.Y-gke.N": picks an explicit + Kubernetes version + - "-": picks the default Kubernetes + version + + This corresponds to the ``master_version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, master_version]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.UpdateMasterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if master_version is not None: + request.master_version = master_version + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_master, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_master_auth( + self, + request: Optional[Union[cluster_service.SetMasterAuthRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets master auth materials. Currently supports + changing the admin password or a specific cluster, + either via password generation or explicitly setting the + password. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_set_master_auth(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.SetMasterAuthRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + action="SET_USERNAME", + ) + + # Make the request + response = await client.set_master_auth(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.SetMasterAuthRequest, dict]]): + The request object. SetMasterAuthRequest updates the + admin password of a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + request = cluster_service.SetMasterAuthRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_master_auth, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_cluster( + self, + request: Optional[Union[cluster_service.DeleteClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Deletes the cluster, including the Kubernetes + endpoint and all worker nodes. + + Firewalls and routes that were configured during cluster + creation are also deleted. + + Other Google Compute Engine resources that might be in + use by the cluster, such as load balancer resources, are + not deleted if they weren't present when the cluster was + initially created. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_delete_cluster(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.DeleteClusterRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = await client.delete_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.DeleteClusterRequest, dict]]): + The request object. DeleteClusterRequest deletes a + cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster to delete. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.DeleteClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[Union[cluster_service.ListOperationsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListOperationsResponse: + r"""Lists all operations in a project in the specified + zone or all zones. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_list_operations(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.ListOperationsRequest( + project_id="project_id_value", + zone="zone_value", + ) + + # Make the request + response = await client.list_operations(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.ListOperationsRequest, dict]]): + The request object. ListOperationsRequest lists + operations. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + to return operations for, or ``-`` for all zones. This + field has been deprecated and replaced by the parent + field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.ListOperationsResponse: + ListOperationsResponse is the result + of ListOperationsRequest. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.ListOperationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[Union[cluster_service.GetOperationRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + operation_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Gets the specified operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_get_operation(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.GetOperationRequest( + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + ) + + # Make the request + response = await client.get_operation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.GetOperationRequest, dict]]): + The request object. GetOperationRequest gets a single + operation. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation_id (:class:`str`): + Required. Deprecated. The server-assigned ``name`` of + the operation. This field has been deprecated and + replaced by the name field. + + This corresponds to the ``operation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, operation_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.GetOperationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if operation_id is not None: + request.operation_id = operation_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[Union[cluster_service.CancelOperationRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + operation_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Cancels the specified operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_cancel_operation(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.CancelOperationRequest( + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + ) + + # Make the request + await client.cancel_operation(request=request) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.CancelOperationRequest, dict]]): + The request object. CancelOperationRequest cancels a + single operation. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the operation resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation_id (:class:`str`): + Required. Deprecated. The server-assigned ``name`` of + the operation. This field has been deprecated and + replaced by the name field. + + This corresponds to the ``operation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, operation_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.CancelOperationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if operation_id is not None: + request.operation_id = operation_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_server_config( + self, + request: Optional[Union[cluster_service.GetServerConfigRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ServerConfig: + r"""Returns configuration info about the Google + Kubernetes Engine service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_get_server_config(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.GetServerConfigRequest( + project_id="project_id_value", + zone="zone_value", + ) + + # Make the request + response = await client.get_server_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.GetServerConfigRequest, dict]]): + The request object. Gets the current Kubernetes Engine + service configuration. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + to return operations for. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.ServerConfig: + Kubernetes Engine service + configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.GetServerConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_server_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_json_web_keys( + self, + request: Optional[Union[cluster_service.GetJSONWebKeysRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.GetJSONWebKeysResponse: + r"""Gets the public component of the cluster signing keys + in JSON Web Key format. + This API is not yet intended for general use, and is not + available for all clusters. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_get_json_web_keys(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.GetJSONWebKeysRequest( + ) + + # Make the request + response = await client.get_json_web_keys(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.GetJSONWebKeysRequest, dict]]): + The request object. GetJSONWebKeysRequest gets the public component of the + keys used by the cluster to sign token requests. This + will be the jwks_uri for the discover document returned + by getOpenIDConfig. See the OpenID Connect Discovery 1.0 + specification for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.GetJSONWebKeysResponse: + GetJSONWebKeysResponse is a valid + JSON Web Key Set as specififed in rfc + 7517 + + """ + # Create or coerce a protobuf request object. + request = cluster_service.GetJSONWebKeysRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_json_web_keys, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_node_pools( + self, + request: Optional[Union[cluster_service.ListNodePoolsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListNodePoolsResponse: + r"""Lists the node pools for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_list_node_pools(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.ListNodePoolsRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = await client.list_node_pools(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.ListNodePoolsRequest, dict]]): + The request object. ListNodePoolsRequest lists the node + pool(s) for a cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the parent field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the parent field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.ListNodePoolsResponse: + ListNodePoolsResponse is the result + of ListNodePoolsRequest. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.ListNodePoolsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_node_pools, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_node_pool( + self, + request: Optional[Union[cluster_service.GetNodePoolRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.NodePool: + r"""Retrieves the requested node pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_get_node_pool(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.GetNodePoolRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Make the request + response = await client.get_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.GetNodePoolRequest, dict]]): + The request object. GetNodePoolRequest retrieves a node + pool for a cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool_id (:class:`str`): + Required. Deprecated. The name of the + node pool. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``node_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.NodePool: + NodePool contains the name and + configuration for a cluster's node pool. + Node pools are a set of nodes (i.e. + VM's), with a common configuration and + specification, under the control of the + cluster master. They may have a set of + Kubernetes labels applied to them, which + may be used to reference them during pod + scheduling. They may also be resized up + or down, to accommodate the workload. + These upgrade settings control the level + of parallelism and the level of + disruption caused by an upgrade. + + maxUnavailable controls the number of + nodes that can be simultaneously + unavailable. + + maxSurge controls the number of + additional nodes that can be added to + the node pool temporarily for the time + of the upgrade to increase the number of + available nodes. + + (maxUnavailable + maxSurge) determines + the level of parallelism (how many nodes + are being upgraded at the same time). + + Note: upgrades inevitably introduce some + disruption since workloads need to be + moved from old nodes to new, upgraded + ones. Even if maxUnavailable=0, this + holds true. (Disruption stays within the + limits of PodDisruptionBudget, if it is + configured.) + + Consider a hypothetical node pool with 5 + nodes having maxSurge=2, + maxUnavailable=1. This means the upgrade + process upgrades 3 nodes simultaneously. + It creates 2 additional (upgraded) + nodes, then it brings down 3 old (not + yet upgraded) nodes at the same time. + This ensures that there are always at + least 4 nodes available. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.GetNodePoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool_id is not None: + request.node_pool_id = node_pool_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_node_pool, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_node_pool( + self, + request: Optional[Union[cluster_service.CreateNodePoolRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool: Optional[cluster_service.NodePool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Creates a node pool for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_create_node_pool(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.CreateNodePoolRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = await client.create_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.CreateNodePoolRequest, dict]]): + The request object. CreateNodePoolRequest creates a node + pool for a cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the parent field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the parent field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool (:class:`google.cloud.container_v1beta1.types.NodePool`): + Required. The node pool to create. + This corresponds to the ``node_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.CreateNodePoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool is not None: + request.node_pool = node_pool + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_node_pool, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_node_pool( + self, + request: Optional[Union[cluster_service.DeleteNodePoolRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Deletes a node pool from a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_delete_node_pool(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.DeleteNodePoolRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Make the request + response = await client.delete_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.DeleteNodePoolRequest, dict]]): + The request object. DeleteNodePoolRequest deletes a node + pool for a cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool_id (:class:`str`): + Required. Deprecated. The name of the + node pool to delete. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``node_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.DeleteNodePoolRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool_id is not None: + request.node_pool_id = node_pool_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_node_pool, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def complete_node_pool_upgrade( + self, + request: Optional[ + Union[cluster_service.CompleteNodePoolUpgradeRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""CompleteNodePoolUpgrade will signal an on-going node + pool upgrade to complete. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_complete_node_pool_upgrade(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.CompleteNodePoolUpgradeRequest( + ) + + # Make the request + await client.complete_node_pool_upgrade(request=request) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.CompleteNodePoolUpgradeRequest, dict]]): + The request object. CompleteNodePoolUpgradeRequest sets + the name of target node pool to complete + upgrade. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = cluster_service.CompleteNodePoolUpgradeRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.complete_node_pool_upgrade, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def rollback_node_pool_upgrade( + self, + request: Optional[ + Union[cluster_service.RollbackNodePoolUpgradeRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Rolls back a previously Aborted or Failed NodePool + upgrade. This makes no changes if the last upgrade + successfully completed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_rollback_node_pool_upgrade(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.RollbackNodePoolUpgradeRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Make the request + response = await client.rollback_node_pool_upgrade(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.RollbackNodePoolUpgradeRequest, dict]]): + The request object. RollbackNodePoolUpgradeRequest + rollbacks the previously Aborted or + Failed NodePool upgrade. This will be an + no-op if the last upgrade successfully + completed. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster to rollback. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool_id (:class:`str`): + Required. Deprecated. The name of the + node pool to rollback. This field has + been deprecated and replaced by the name + field. + + This corresponds to the ``node_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.RollbackNodePoolUpgradeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool_id is not None: + request.node_pool_id = node_pool_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rollback_node_pool_upgrade, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_node_pool_management( + self, + request: Optional[ + Union[cluster_service.SetNodePoolManagementRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool_id: Optional[str] = None, + management: Optional[cluster_service.NodeManagement] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the NodeManagement options for a node pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_set_node_pool_management(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.SetNodePoolManagementRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Make the request + response = await client.set_node_pool_management(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.SetNodePoolManagementRequest, dict]]): + The request object. SetNodePoolManagementRequest sets the + node management properties of a node + pool. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster to update. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool_id (:class:`str`): + Required. Deprecated. The name of the + node pool to update. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``node_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + management (:class:`google.cloud.container_v1beta1.types.NodeManagement`): + Required. NodeManagement + configuration for the node pool. + + This corresponds to the ``management`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project_id, zone, cluster_id, node_pool_id, management] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetNodePoolManagementRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool_id is not None: + request.node_pool_id = node_pool_id + if management is not None: + request.management = management + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_node_pool_management, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_labels( + self, + request: Optional[Union[cluster_service.SetLabelsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + resource_labels: Optional[MutableMapping[str, str]] = None, + label_fingerprint: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets labels on a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_set_labels(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.SetLabelsRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + label_fingerprint="label_fingerprint_value", + ) + + # Make the request + response = await client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.SetLabelsRequest, dict]]): + The request object. SetLabelsRequest sets the Google + Cloud Platform labels on a Google + Container Engine cluster, which will in + turn set them for Google Compute Engine + resources used by that cluster + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_labels (:class:`MutableMapping[str, str]`): + Required. The labels to set for that + cluster. + + This corresponds to the ``resource_labels`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + label_fingerprint (:class:`str`): + Required. The fingerprint of the previous set of labels + for this resource, used to detect conflicts. The + fingerprint is initially generated by Kubernetes Engine + and changes after every request to modify or update + labels. You must always provide an up-to-date + fingerprint hash when updating or changing labels. Make + a ``get()`` request to the resource to get the latest + fingerprint. + + This corresponds to the ``label_fingerprint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project_id, zone, cluster_id, resource_labels, label_fingerprint] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetLabelsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if label_fingerprint is not None: + request.label_fingerprint = label_fingerprint + + if resource_labels: + request.resource_labels.update(resource_labels) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_labels, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_legacy_abac( + self, + request: Optional[Union[cluster_service.SetLegacyAbacRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + enabled: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Enables or disables the ABAC authorization mechanism + on a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_set_legacy_abac(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.SetLegacyAbacRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + enabled=True, + ) + + # Make the request + response = await client.set_legacy_abac(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.SetLegacyAbacRequest, dict]]): + The request object. SetLegacyAbacRequest enables or + disables the ABAC authorization + mechanism for a cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster to update. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + enabled (:class:`bool`): + Required. Whether ABAC authorization + will be enabled in the cluster. + + This corresponds to the ``enabled`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, enabled]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetLegacyAbacRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if enabled is not None: + request.enabled = enabled + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_legacy_abac, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def start_ip_rotation( + self, + request: Optional[Union[cluster_service.StartIPRotationRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Starts master IP rotation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_start_ip_rotation(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.StartIPRotationRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = await client.start_ip_rotation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.StartIPRotationRequest, dict]]): + The request object. StartIPRotationRequest creates a new + IP for the cluster and then performs a + node upgrade on each node pool to point + to the new IP. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.StartIPRotationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.start_ip_rotation, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def complete_ip_rotation( + self, + request: Optional[ + Union[cluster_service.CompleteIPRotationRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Completes master IP rotation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_complete_ip_rotation(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.CompleteIPRotationRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = await client.complete_ip_rotation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.CompleteIPRotationRequest, dict]]): + The request object. CompleteIPRotationRequest moves the + cluster master back into single-IP mode. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.CompleteIPRotationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.complete_ip_rotation, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_node_pool_size( + self, + request: Optional[Union[cluster_service.SetNodePoolSizeRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""SetNodePoolSizeRequest sets the size of a node pool. The new + size will be used for all replicas, including future replicas + created by modifying + [NodePool.locations][google.container.v1beta1.NodePool.locations]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_set_node_pool_size(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.SetNodePoolSizeRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + node_count=1070, + ) + + # Make the request + response = await client.set_node_pool_size(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.SetNodePoolSizeRequest, dict]]): + The request object. SetNodePoolSizeRequest sets the size + of a node pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + request = cluster_service.SetNodePoolSizeRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_node_pool_size, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_network_policy( + self, + request: Optional[Union[cluster_service.SetNetworkPolicyRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + network_policy: Optional[cluster_service.NetworkPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Enables or disables Network Policy for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_set_network_policy(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.SetNetworkPolicyRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = await client.set_network_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.SetNetworkPolicyRequest, dict]]): + The request object. SetNetworkPolicyRequest + enables/disables network policy for a + cluster. + project_id (:class:`str`): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_policy (:class:`google.cloud.container_v1beta1.types.NetworkPolicy`): + Required. Configuration options for + the NetworkPolicy feature. + + This corresponds to the ``network_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, network_policy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetNetworkPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if network_policy is not None: + request.network_policy = network_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_network_policy, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_maintenance_policy( + self, + request: Optional[ + Union[cluster_service.SetMaintenancePolicyRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + maintenance_policy: Optional[cluster_service.MaintenancePolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the maintenance policy for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_set_maintenance_policy(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.SetMaintenancePolicyRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = await client.set_maintenance_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.SetMaintenancePolicyRequest, dict]]): + The request object. SetMaintenancePolicyRequest sets the + maintenance policy for a cluster. + project_id (:class:`str`): + Required. The Google Developers Console `project ID or + project + number `__. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (:class:`str`): + Required. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (:class:`str`): + Required. The name of the cluster to + update. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + maintenance_policy (:class:`google.cloud.container_v1beta1.types.MaintenancePolicy`): + Required. The maintenance policy to + be set for the cluster. An empty field + clears the existing maintenance policy. + + This corresponds to the ``maintenance_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, maintenance_policy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.SetMaintenancePolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if maintenance_policy is not None: + request.maintenance_policy = maintenance_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_maintenance_policy, + default_timeout=45.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_usable_subnetworks( + self, + request: Optional[ + Union[cluster_service.ListUsableSubnetworksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsableSubnetworksAsyncPager: + r"""Lists subnetworks that can be used for creating + clusters in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_list_usable_subnetworks(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.ListUsableSubnetworksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_subnetworks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.ListUsableSubnetworksRequest, dict]]): + The request object. ListUsableSubnetworksRequest requests + the list of usable subnetworks. + available to a user for creating + clusters. + parent (:class:`str`): + Required. The parent project where subnetworks are + usable. Specified in the format ``projects/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.services.cluster_manager.pagers.ListUsableSubnetworksAsyncPager: + ListUsableSubnetworksResponse is the + response of + ListUsableSubnetworksRequest. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.ListUsableSubnetworksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_usable_subnetworks, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListUsableSubnetworksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def check_autopilot_compatibility( + self, + request: Optional[ + Union[cluster_service.CheckAutopilotCompatibilityRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.CheckAutopilotCompatibilityResponse: + r"""Checks the cluster compatibility with Autopilot mode, + and returns a list of compatibility issues. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_check_autopilot_compatibility(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.CheckAutopilotCompatibilityRequest( + ) + + # Make the request + response = await client.check_autopilot_compatibility(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.CheckAutopilotCompatibilityRequest, dict]]): + The request object. CheckAutopilotCompatibilityRequest + requests getting the blockers for the + given operation in the cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.CheckAutopilotCompatibilityResponse: + CheckAutopilotCompatibilityResponse + has a list of compatibility issues. + + """ + # Create or coerce a protobuf request object. + request = cluster_service.CheckAutopilotCompatibilityRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.check_autopilot_compatibility, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[Union[cluster_service.ListLocationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListLocationsResponse: + r"""Fetches locations that offer Google Kubernetes + Engine. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + async def sample_list_locations(): + # Create a client + client = container_v1beta1.ClusterManagerAsyncClient() + + # Initialize request argument(s) + request = container_v1beta1.ListLocationsRequest( + parent="parent_value", + ) + + # Make the request + response = await client.list_locations(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.container_v1beta1.types.ListLocationsRequest, dict]]): + The request object. ListLocationsRequest is used to + request the locations that offer GKE. + parent (:class:`str`): + Required. Contains the name of the resource requested. + Specified in the format ``projects/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.ListLocationsResponse: + ListLocationsResponse returns the + list of all GKE locations and their + recommendation state. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cluster_service.ListLocationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_locations, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ClusterManagerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ClusterManagerAsyncClient",) diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py new file mode 100644 index 000000000000..6657f0ab75bf --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py @@ -0,0 +1,5037 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.container_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.rpc import status_pb2 # type: ignore + +from google.cloud.container_v1beta1.services.cluster_manager import pagers +from google.cloud.container_v1beta1.types import cluster_service + +from .transports.base import DEFAULT_CLIENT_INFO, ClusterManagerTransport +from .transports.grpc import ClusterManagerGrpcTransport +from .transports.grpc_asyncio import ClusterManagerGrpcAsyncIOTransport + + +class ClusterManagerClientMeta(type): + """Metaclass for the ClusterManager client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ClusterManagerTransport]] + _transport_registry["grpc"] = ClusterManagerGrpcTransport + _transport_registry["grpc_asyncio"] = ClusterManagerGrpcAsyncIOTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ClusterManagerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ClusterManagerClient(metaclass=ClusterManagerClientMeta): + """Google Kubernetes Engine Cluster Manager v1beta1""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "container.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ClusterManagerTransport: + """Returns the transport used by the client instance. + + Returns: + ClusterManagerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def topic_path( + project: str, + topic: str, + ) -> str: + """Returns a fully-qualified topic string.""" + return "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str, str]: + """Parses a topic path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ClusterManagerTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cluster manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ClusterManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ClusterManagerTransport): + # transport is a ClusterManagerTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_clusters( + self, + request: Optional[Union[cluster_service.ListClustersRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListClustersResponse: + r"""Lists all clusters owned by a project in either the + specified zone or all zones. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_list_clusters(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.ListClustersRequest( + project_id="project_id_value", + zone="zone_value", + ) + + # Make the request + response = client.list_clusters(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.ListClustersRequest, dict]): + The request object. ListClustersRequest lists clusters. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides, or "-" for all zones. This + field has been deprecated and replaced by the parent + field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.ListClustersResponse: + ListClustersResponse is the result of + ListClustersRequest. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.ListClustersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.ListClustersRequest): + request = cluster_service.ListClustersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_clusters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_cluster( + self, + request: Optional[Union[cluster_service.GetClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Cluster: + r"""Gets the details for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_get_cluster(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.GetClusterRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = client.get_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.GetClusterRequest, dict]): + The request object. GetClusterRequest gets the settings + of a cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster to retrieve. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Cluster: + A Google Kubernetes Engine cluster. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.GetClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.GetClusterRequest): + request = cluster_service.GetClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_cluster( + self, + request: Optional[Union[cluster_service.CreateClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster: Optional[cluster_service.Cluster] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Creates a cluster, consisting of the specified number and type + of Google Compute Engine instances. + + By default, the cluster is created in the project's `default + network `__. + + One firewall is added for the cluster. After cluster creation, + the Kubelet creates routes for each node to allow the containers + on that node to communicate with all other instances in the + cluster. + + Finally, an entry is added to the project's global metadata + indicating which CIDR range the cluster is using. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_create_cluster(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.CreateClusterRequest( + project_id="project_id_value", + zone="zone_value", + ) + + # Make the request + response = client.create_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.CreateClusterRequest, dict]): + The request object. CreateClusterRequest creates a + cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the parent field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (google.cloud.container_v1beta1.types.Cluster): + Required. A `cluster + resource `__ + + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.CreateClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.CreateClusterRequest): + request = cluster_service.CreateClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster is not None: + request.cluster = cluster + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_cluster( + self, + request: Optional[Union[cluster_service.UpdateClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + update: Optional[cluster_service.ClusterUpdate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Updates the settings for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_update_cluster(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.UpdateClusterRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = client.update_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.UpdateClusterRequest, dict]): + The request object. UpdateClusterRequest updates the + settings of a cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update (google.cloud.container_v1beta1.types.ClusterUpdate): + Required. A description of the + update. + + This corresponds to the ``update`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, update]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.UpdateClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.UpdateClusterRequest): + request = cluster_service.UpdateClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if update is not None: + request.update = update + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_node_pool( + self, + request: Optional[Union[cluster_service.UpdateNodePoolRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Updates the version and/or image type of a specific + node pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_update_node_pool(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.UpdateNodePoolRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + node_version="node_version_value", + image_type="image_type_value", + ) + + # Make the request + response = client.update_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.UpdateNodePoolRequest, dict]): + The request object. SetNodePoolVersionRequest updates the + version of a node pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.UpdateNodePoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.UpdateNodePoolRequest): + request = cluster_service.UpdateNodePoolRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_node_pool] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_node_pool_autoscaling( + self, + request: Optional[ + Union[cluster_service.SetNodePoolAutoscalingRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the autoscaling settings of a specific node + pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_set_node_pool_autoscaling(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.SetNodePoolAutoscalingRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Make the request + response = client.set_node_pool_autoscaling(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.SetNodePoolAutoscalingRequest, dict]): + The request object. SetNodePoolAutoscalingRequest sets + the autoscaler settings of a node pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetNodePoolAutoscalingRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetNodePoolAutoscalingRequest): + request = cluster_service.SetNodePoolAutoscalingRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.set_node_pool_autoscaling + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_logging_service( + self, + request: Optional[Union[cluster_service.SetLoggingServiceRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + logging_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the logging service for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_set_logging_service(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.SetLoggingServiceRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + logging_service="logging_service_value", + ) + + # Make the request + response = client.set_logging_service(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.SetLoggingServiceRequest, dict]): + The request object. SetLoggingServiceRequest sets the + logging service of a cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + logging_service (str): + Required. The logging service the cluster should use to + write logs. Currently available options: + + - ``logging.googleapis.com/kubernetes`` - The Cloud + Logging service with a Kubernetes-native resource + model + - ``logging.googleapis.com`` - The legacy Cloud Logging + service (no longer available as of GKE 1.15). + - ``none`` - no logs will be exported from the cluster. + + If left as an empty + string,\ ``logging.googleapis.com/kubernetes`` will be + used for GKE 1.14+ or ``logging.googleapis.com`` for + earlier versions. + + This corresponds to the ``logging_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, logging_service]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetLoggingServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetLoggingServiceRequest): + request = cluster_service.SetLoggingServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if logging_service is not None: + request.logging_service = logging_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_logging_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_monitoring_service( + self, + request: Optional[ + Union[cluster_service.SetMonitoringServiceRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + monitoring_service: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the monitoring service for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_set_monitoring_service(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.SetMonitoringServiceRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + monitoring_service="monitoring_service_value", + ) + + # Make the request + response = client.set_monitoring_service(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.SetMonitoringServiceRequest, dict]): + The request object. SetMonitoringServiceRequest sets the + monitoring service of a cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + monitoring_service (str): + Required. The monitoring service the cluster should use + to write metrics. Currently available options: + + - "monitoring.googleapis.com/kubernetes" - The Cloud + Monitoring service with a Kubernetes-native resource + model + - ``monitoring.googleapis.com`` - The legacy Cloud + Monitoring service (no longer available as of GKE + 1.15). + - ``none`` - No metrics will be exported from the + cluster. + + If left as an empty + string,\ ``monitoring.googleapis.com/kubernetes`` will + be used for GKE 1.14+ or ``monitoring.googleapis.com`` + for earlier versions. + + This corresponds to the ``monitoring_service`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, monitoring_service]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetMonitoringServiceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetMonitoringServiceRequest): + request = cluster_service.SetMonitoringServiceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if monitoring_service is not None: + request.monitoring_service = monitoring_service + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_monitoring_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_addons_config( + self, + request: Optional[Union[cluster_service.SetAddonsConfigRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + addons_config: Optional[cluster_service.AddonsConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the addons for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_set_addons_config(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.SetAddonsConfigRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = client.set_addons_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.SetAddonsConfigRequest, dict]): + The request object. SetAddonsRequest sets the addons + associated with the cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + addons_config (google.cloud.container_v1beta1.types.AddonsConfig): + Required. The desired configurations + for the various addons available to run + in the cluster. + + This corresponds to the ``addons_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, addons_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetAddonsConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetAddonsConfigRequest): + request = cluster_service.SetAddonsConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if addons_config is not None: + request.addons_config = addons_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_addons_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_locations( + self, + request: Optional[Union[cluster_service.SetLocationsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + locations: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the locations for a specific cluster. Deprecated. Use + `projects.locations.clusters.update `__ + instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_set_locations(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.SetLocationsRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + locations=['locations_value1', 'locations_value2'], + ) + + # Make the request + response = client.set_locations(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.SetLocationsRequest, dict]): + The request object. SetLocationsRequest sets the + locations of the cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + locations (MutableSequence[str]): + Required. The desired list of Google Compute Engine + `zones `__ + in which the cluster's nodes should be located. Changing + the locations a cluster is in will result in nodes being + either created or removed from the cluster, depending on + whether locations are being added or removed. + + This list must always include the cluster's primary + zone. + + This corresponds to the ``locations`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + warnings.warn( + "ClusterManagerClient.set_locations is deprecated", DeprecationWarning + ) + + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, locations]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetLocationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetLocationsRequest): + request = cluster_service.SetLocationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if locations is not None: + request.locations = locations + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_master( + self, + request: Optional[Union[cluster_service.UpdateMasterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + master_version: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Updates the master for a specific cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_update_master(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.UpdateMasterRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + master_version="master_version_value", + ) + + # Make the request + response = client.update_master(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.UpdateMasterRequest, dict]): + The request object. UpdateMasterRequest updates the + master of the cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster to upgrade. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + master_version (str): + Required. The Kubernetes version to + change the master to. + Users may specify either explicit + versions offered by Kubernetes Engine or + version aliases, which have the + following behavior: + + - "latest": picks the highest valid + Kubernetes version + - "1.X": picks the highest valid + patch+gke.N patch in the 1.X version + - "1.X.Y": picks the highest valid gke.N + patch in the 1.X.Y version + - "1.X.Y-gke.N": picks an explicit + Kubernetes version + - "-": picks the default Kubernetes + version + + This corresponds to the ``master_version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, master_version]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.UpdateMasterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.UpdateMasterRequest): + request = cluster_service.UpdateMasterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if master_version is not None: + request.master_version = master_version + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_master] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_master_auth( + self, + request: Optional[Union[cluster_service.SetMasterAuthRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets master auth materials. Currently supports + changing the admin password or a specific cluster, + either via password generation or explicitly setting the + password. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_set_master_auth(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.SetMasterAuthRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + action="SET_USERNAME", + ) + + # Make the request + response = client.set_master_auth(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.SetMasterAuthRequest, dict]): + The request object. SetMasterAuthRequest updates the + admin password of a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetMasterAuthRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetMasterAuthRequest): + request = cluster_service.SetMasterAuthRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_master_auth] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_cluster( + self, + request: Optional[Union[cluster_service.DeleteClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Deletes the cluster, including the Kubernetes + endpoint and all worker nodes. + + Firewalls and routes that were configured during cluster + creation are also deleted. + + Other Google Compute Engine resources that might be in + use by the cluster, such as load balancer resources, are + not deleted if they weren't present when the cluster was + initially created. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_delete_cluster(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.DeleteClusterRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = client.delete_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.DeleteClusterRequest, dict]): + The request object. DeleteClusterRequest deletes a + cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster to delete. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.DeleteClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.DeleteClusterRequest): + request = cluster_service.DeleteClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_operations( + self, + request: Optional[Union[cluster_service.ListOperationsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListOperationsResponse: + r"""Lists all operations in a project in the specified + zone or all zones. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_list_operations(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.ListOperationsRequest( + project_id="project_id_value", + zone="zone_value", + ) + + # Make the request + response = client.list_operations(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.ListOperationsRequest, dict]): + The request object. ListOperationsRequest lists + operations. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + to return operations for, or ``-`` for all zones. This + field has been deprecated and replaced by the parent + field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.ListOperationsResponse: + ListOperationsResponse is the result + of ListOperationsRequest. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.ListOperationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.ListOperationsRequest): + request = cluster_service.ListOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[Union[cluster_service.GetOperationRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + operation_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Gets the specified operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_get_operation(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.GetOperationRequest( + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + ) + + # Make the request + response = client.get_operation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.GetOperationRequest, dict]): + The request object. GetOperationRequest gets a single + operation. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation_id (str): + Required. Deprecated. The server-assigned ``name`` of + the operation. This field has been deprecated and + replaced by the name field. + + This corresponds to the ``operation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, operation_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.GetOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.GetOperationRequest): + request = cluster_service.GetOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if operation_id is not None: + request.operation_id = operation_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[Union[cluster_service.CancelOperationRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + operation_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Cancels the specified operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_cancel_operation(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.CancelOperationRequest( + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + ) + + # Make the request + client.cancel_operation(request=request) + + Args: + request (Union[google.cloud.container_v1beta1.types.CancelOperationRequest, dict]): + The request object. CancelOperationRequest cancels a + single operation. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the operation resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation_id (str): + Required. Deprecated. The server-assigned ``name`` of + the operation. This field has been deprecated and + replaced by the name field. + + This corresponds to the ``operation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, operation_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.CancelOperationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.CancelOperationRequest): + request = cluster_service.CancelOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if operation_id is not None: + request.operation_id = operation_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_server_config( + self, + request: Optional[Union[cluster_service.GetServerConfigRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ServerConfig: + r"""Returns configuration info about the Google + Kubernetes Engine service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_get_server_config(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.GetServerConfigRequest( + project_id="project_id_value", + zone="zone_value", + ) + + # Make the request + response = client.get_server_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.GetServerConfigRequest, dict]): + The request object. Gets the current Kubernetes Engine + service configuration. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + to return operations for. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.ServerConfig: + Kubernetes Engine service + configuration. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.GetServerConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.GetServerConfigRequest): + request = cluster_service.GetServerConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_server_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_json_web_keys( + self, + request: Optional[Union[cluster_service.GetJSONWebKeysRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.GetJSONWebKeysResponse: + r"""Gets the public component of the cluster signing keys + in JSON Web Key format. + This API is not yet intended for general use, and is not + available for all clusters. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_get_json_web_keys(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.GetJSONWebKeysRequest( + ) + + # Make the request + response = client.get_json_web_keys(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.GetJSONWebKeysRequest, dict]): + The request object. GetJSONWebKeysRequest gets the public component of the + keys used by the cluster to sign token requests. This + will be the jwks_uri for the discover document returned + by getOpenIDConfig. See the OpenID Connect Discovery 1.0 + specification for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.GetJSONWebKeysResponse: + GetJSONWebKeysResponse is a valid + JSON Web Key Set as specififed in rfc + 7517 + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.GetJSONWebKeysRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.GetJSONWebKeysRequest): + request = cluster_service.GetJSONWebKeysRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_json_web_keys] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_node_pools( + self, + request: Optional[Union[cluster_service.ListNodePoolsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListNodePoolsResponse: + r"""Lists the node pools for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_list_node_pools(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.ListNodePoolsRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = client.list_node_pools(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.ListNodePoolsRequest, dict]): + The request object. ListNodePoolsRequest lists the node + pool(s) for a cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the parent field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the parent field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.ListNodePoolsResponse: + ListNodePoolsResponse is the result + of ListNodePoolsRequest. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.ListNodePoolsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.ListNodePoolsRequest): + request = cluster_service.ListNodePoolsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_node_pools] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_node_pool( + self, + request: Optional[Union[cluster_service.GetNodePoolRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.NodePool: + r"""Retrieves the requested node pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_get_node_pool(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.GetNodePoolRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Make the request + response = client.get_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.GetNodePoolRequest, dict]): + The request object. GetNodePoolRequest retrieves a node + pool for a cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool_id (str): + Required. Deprecated. The name of the + node pool. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``node_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.NodePool: + NodePool contains the name and + configuration for a cluster's node pool. + Node pools are a set of nodes (i.e. + VM's), with a common configuration and + specification, under the control of the + cluster master. They may have a set of + Kubernetes labels applied to them, which + may be used to reference them during pod + scheduling. They may also be resized up + or down, to accommodate the workload. + These upgrade settings control the level + of parallelism and the level of + disruption caused by an upgrade. + + maxUnavailable controls the number of + nodes that can be simultaneously + unavailable. + + maxSurge controls the number of + additional nodes that can be added to + the node pool temporarily for the time + of the upgrade to increase the number of + available nodes. + + (maxUnavailable + maxSurge) determines + the level of parallelism (how many nodes + are being upgraded at the same time). + + Note: upgrades inevitably introduce some + disruption since workloads need to be + moved from old nodes to new, upgraded + ones. Even if maxUnavailable=0, this + holds true. (Disruption stays within the + limits of PodDisruptionBudget, if it is + configured.) + + Consider a hypothetical node pool with 5 + nodes having maxSurge=2, + maxUnavailable=1. This means the upgrade + process upgrades 3 nodes simultaneously. + It creates 2 additional (upgraded) + nodes, then it brings down 3 old (not + yet upgraded) nodes at the same time. + This ensures that there are always at + least 4 nodes available. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.GetNodePoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.GetNodePoolRequest): + request = cluster_service.GetNodePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool_id is not None: + request.node_pool_id = node_pool_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_node_pool] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_node_pool( + self, + request: Optional[Union[cluster_service.CreateNodePoolRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool: Optional[cluster_service.NodePool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Creates a node pool for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_create_node_pool(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.CreateNodePoolRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = client.create_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.CreateNodePoolRequest, dict]): + The request object. CreateNodePoolRequest creates a node + pool for a cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the + parent field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the parent field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the parent field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool (google.cloud.container_v1beta1.types.NodePool): + Required. The node pool to create. + This corresponds to the ``node_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.CreateNodePoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.CreateNodePoolRequest): + request = cluster_service.CreateNodePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool is not None: + request.node_pool = node_pool + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_node_pool] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_node_pool( + self, + request: Optional[Union[cluster_service.DeleteNodePoolRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Deletes a node pool from a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_delete_node_pool(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.DeleteNodePoolRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Make the request + response = client.delete_node_pool(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.DeleteNodePoolRequest, dict]): + The request object. DeleteNodePoolRequest deletes a node + pool for a cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool_id (str): + Required. Deprecated. The name of the + node pool to delete. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``node_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.DeleteNodePoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.DeleteNodePoolRequest): + request = cluster_service.DeleteNodePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool_id is not None: + request.node_pool_id = node_pool_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_node_pool] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def complete_node_pool_upgrade( + self, + request: Optional[ + Union[cluster_service.CompleteNodePoolUpgradeRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""CompleteNodePoolUpgrade will signal an on-going node + pool upgrade to complete. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_complete_node_pool_upgrade(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.CompleteNodePoolUpgradeRequest( + ) + + # Make the request + client.complete_node_pool_upgrade(request=request) + + Args: + request (Union[google.cloud.container_v1beta1.types.CompleteNodePoolUpgradeRequest, dict]): + The request object. CompleteNodePoolUpgradeRequest sets + the name of target node pool to complete + upgrade. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.CompleteNodePoolUpgradeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.CompleteNodePoolUpgradeRequest): + request = cluster_service.CompleteNodePoolUpgradeRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.complete_node_pool_upgrade + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def rollback_node_pool_upgrade( + self, + request: Optional[ + Union[cluster_service.RollbackNodePoolUpgradeRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Rolls back a previously Aborted or Failed NodePool + upgrade. This makes no changes if the last upgrade + successfully completed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_rollback_node_pool_upgrade(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.RollbackNodePoolUpgradeRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Make the request + response = client.rollback_node_pool_upgrade(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.RollbackNodePoolUpgradeRequest, dict]): + The request object. RollbackNodePoolUpgradeRequest + rollbacks the previously Aborted or + Failed NodePool upgrade. This will be an + no-op if the last upgrade successfully + completed. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster to rollback. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool_id (str): + Required. Deprecated. The name of the + node pool to rollback. This field has + been deprecated and replaced by the name + field. + + This corresponds to the ``node_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, node_pool_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.RollbackNodePoolUpgradeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.RollbackNodePoolUpgradeRequest): + request = cluster_service.RollbackNodePoolUpgradeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool_id is not None: + request.node_pool_id = node_pool_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.rollback_node_pool_upgrade + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_node_pool_management( + self, + request: Optional[ + Union[cluster_service.SetNodePoolManagementRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + node_pool_id: Optional[str] = None, + management: Optional[cluster_service.NodeManagement] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the NodeManagement options for a node pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_set_node_pool_management(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.SetNodePoolManagementRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Make the request + response = client.set_node_pool_management(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.SetNodePoolManagementRequest, dict]): + The request object. SetNodePoolManagementRequest sets the + node management properties of a node + pool. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster to update. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_pool_id (str): + Required. Deprecated. The name of the + node pool to update. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``node_pool_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + management (google.cloud.container_v1beta1.types.NodeManagement): + Required. NodeManagement + configuration for the node pool. + + This corresponds to the ``management`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project_id, zone, cluster_id, node_pool_id, management] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetNodePoolManagementRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetNodePoolManagementRequest): + request = cluster_service.SetNodePoolManagementRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if node_pool_id is not None: + request.node_pool_id = node_pool_id + if management is not None: + request.management = management + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_node_pool_management] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels( + self, + request: Optional[Union[cluster_service.SetLabelsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + resource_labels: Optional[MutableMapping[str, str]] = None, + label_fingerprint: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets labels on a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_set_labels(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.SetLabelsRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + label_fingerprint="label_fingerprint_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.SetLabelsRequest, dict]): + The request object. SetLabelsRequest sets the Google + Cloud Platform labels on a Google + Container Engine cluster, which will in + turn set them for Google Compute Engine + resources used by that cluster + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_labels (MutableMapping[str, str]): + Required. The labels to set for that + cluster. + + This corresponds to the ``resource_labels`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + label_fingerprint (str): + Required. The fingerprint of the previous set of labels + for this resource, used to detect conflicts. The + fingerprint is initially generated by Kubernetes Engine + and changes after every request to modify or update + labels. You must always provide an up-to-date + fingerprint hash when updating or changing labels. Make + a ``get()`` request to the resource to get the latest + fingerprint. + + This corresponds to the ``label_fingerprint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project_id, zone, cluster_id, resource_labels, label_fingerprint] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetLabelsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetLabelsRequest): + request = cluster_service.SetLabelsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if resource_labels is not None: + request.resource_labels = resource_labels + if label_fingerprint is not None: + request.label_fingerprint = label_fingerprint + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_legacy_abac( + self, + request: Optional[Union[cluster_service.SetLegacyAbacRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + enabled: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Enables or disables the ABAC authorization mechanism + on a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_set_legacy_abac(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.SetLegacyAbacRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + enabled=True, + ) + + # Make the request + response = client.set_legacy_abac(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.SetLegacyAbacRequest, dict]): + The request object. SetLegacyAbacRequest enables or + disables the ABAC authorization + mechanism for a cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster to update. This field has been + deprecated and replaced by the name + field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + enabled (bool): + Required. Whether ABAC authorization + will be enabled in the cluster. + + This corresponds to the ``enabled`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, enabled]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetLegacyAbacRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetLegacyAbacRequest): + request = cluster_service.SetLegacyAbacRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if enabled is not None: + request.enabled = enabled + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_legacy_abac] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def start_ip_rotation( + self, + request: Optional[Union[cluster_service.StartIPRotationRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Starts master IP rotation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_start_ip_rotation(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.StartIPRotationRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = client.start_ip_rotation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.StartIPRotationRequest, dict]): + The request object. StartIPRotationRequest creates a new + IP for the cluster and then performs a + node upgrade on each node pool to point + to the new IP. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.StartIPRotationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.StartIPRotationRequest): + request = cluster_service.StartIPRotationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_ip_rotation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def complete_ip_rotation( + self, + request: Optional[ + Union[cluster_service.CompleteIPRotationRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Completes master IP rotation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_complete_ip_rotation(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.CompleteIPRotationRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = client.complete_ip_rotation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.CompleteIPRotationRequest, dict]): + The request object. CompleteIPRotationRequest moves the + cluster master back into single-IP mode. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.CompleteIPRotationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.CompleteIPRotationRequest): + request = cluster_service.CompleteIPRotationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.complete_ip_rotation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_node_pool_size( + self, + request: Optional[Union[cluster_service.SetNodePoolSizeRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""SetNodePoolSizeRequest sets the size of a node pool. The new + size will be used for all replicas, including future replicas + created by modifying + [NodePool.locations][google.container.v1beta1.NodePool.locations]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_set_node_pool_size(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.SetNodePoolSizeRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + node_count=1070, + ) + + # Make the request + response = client.set_node_pool_size(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.SetNodePoolSizeRequest, dict]): + The request object. SetNodePoolSizeRequest sets the size + of a node pool. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetNodePoolSizeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetNodePoolSizeRequest): + request = cluster_service.SetNodePoolSizeRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_node_pool_size] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_network_policy( + self, + request: Optional[Union[cluster_service.SetNetworkPolicyRequest, dict]] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + network_policy: Optional[cluster_service.NetworkPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Enables or disables Network Policy for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_set_network_policy(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.SetNetworkPolicyRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = client.set_network_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.SetNetworkPolicyRequest, dict]): + The request object. SetNetworkPolicyRequest + enables/disables network policy for a + cluster. + project_id (str): + Required. Deprecated. The Google Developers Console + `project ID or project + number `__. + This field has been deprecated and replaced by the name + field. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. Deprecated. The name of the Google Compute + Engine + `zone `__ + in which the cluster resides. This field has been + deprecated and replaced by the name field. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated + and replaced by the name field. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + network_policy (google.cloud.container_v1beta1.types.NetworkPolicy): + Required. Configuration options for + the NetworkPolicy feature. + + This corresponds to the ``network_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, network_policy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetNetworkPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetNetworkPolicyRequest): + request = cluster_service.SetNetworkPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if network_policy is not None: + request.network_policy = network_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_network_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_maintenance_policy( + self, + request: Optional[ + Union[cluster_service.SetMaintenancePolicyRequest, dict] + ] = None, + *, + project_id: Optional[str] = None, + zone: Optional[str] = None, + cluster_id: Optional[str] = None, + maintenance_policy: Optional[cluster_service.MaintenancePolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.Operation: + r"""Sets the maintenance policy for a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_set_maintenance_policy(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.SetMaintenancePolicyRequest( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Make the request + response = client.set_maintenance_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.SetMaintenancePolicyRequest, dict]): + The request object. SetMaintenancePolicyRequest sets the + maintenance policy for a cluster. + project_id (str): + Required. The Google Developers Console `project ID or + project + number `__. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Required. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_id (str): + Required. The name of the cluster to + update. + + This corresponds to the ``cluster_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + maintenance_policy (google.cloud.container_v1beta1.types.MaintenancePolicy): + Required. The maintenance policy to + be set for the cluster. An empty field + clears the existing maintenance policy. + + This corresponds to the ``maintenance_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.Operation: + This operation resource represents + operations that may have happened or are + happening on the cluster. All fields are + output only. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, zone, cluster_id, maintenance_policy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.SetMaintenancePolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.SetMaintenancePolicyRequest): + request = cluster_service.SetMaintenancePolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if zone is not None: + request.zone = zone + if cluster_id is not None: + request.cluster_id = cluster_id + if maintenance_policy is not None: + request.maintenance_policy = maintenance_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_maintenance_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_usable_subnetworks( + self, + request: Optional[ + Union[cluster_service.ListUsableSubnetworksRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListUsableSubnetworksPager: + r"""Lists subnetworks that can be used for creating + clusters in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_list_usable_subnetworks(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.ListUsableSubnetworksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_usable_subnetworks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.ListUsableSubnetworksRequest, dict]): + The request object. ListUsableSubnetworksRequest requests + the list of usable subnetworks. + available to a user for creating + clusters. + parent (str): + Required. The parent project where subnetworks are + usable. Specified in the format ``projects/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.services.cluster_manager.pagers.ListUsableSubnetworksPager: + ListUsableSubnetworksResponse is the + response of + ListUsableSubnetworksRequest. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.ListUsableSubnetworksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.ListUsableSubnetworksRequest): + request = cluster_service.ListUsableSubnetworksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_usable_subnetworks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUsableSubnetworksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def check_autopilot_compatibility( + self, + request: Optional[ + Union[cluster_service.CheckAutopilotCompatibilityRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.CheckAutopilotCompatibilityResponse: + r"""Checks the cluster compatibility with Autopilot mode, + and returns a list of compatibility issues. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_check_autopilot_compatibility(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.CheckAutopilotCompatibilityRequest( + ) + + # Make the request + response = client.check_autopilot_compatibility(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.CheckAutopilotCompatibilityRequest, dict]): + The request object. CheckAutopilotCompatibilityRequest + requests getting the blockers for the + given operation in the cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.CheckAutopilotCompatibilityResponse: + CheckAutopilotCompatibilityResponse + has a list of compatibility issues. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.CheckAutopilotCompatibilityRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.CheckAutopilotCompatibilityRequest): + request = cluster_service.CheckAutopilotCompatibilityRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.check_autopilot_compatibility + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[Union[cluster_service.ListLocationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cluster_service.ListLocationsResponse: + r"""Fetches locations that offer Google Kubernetes + Engine. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import container_v1beta1 + + def sample_list_locations(): + # Create a client + client = container_v1beta1.ClusterManagerClient() + + # Initialize request argument(s) + request = container_v1beta1.ListLocationsRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_locations(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.container_v1beta1.types.ListLocationsRequest, dict]): + The request object. ListLocationsRequest is used to + request the locations that offer GKE. + parent (str): + Required. Contains the name of the resource requested. + Specified in the format ``projects/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.container_v1beta1.types.ListLocationsResponse: + ListLocationsResponse returns the + list of all GKE locations and their + recommendation state. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cluster_service.ListLocationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cluster_service.ListLocationsRequest): + request = cluster_service.ListLocationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ClusterManagerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ClusterManagerClient",) diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/pagers.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/pagers.py new file mode 100644 index 000000000000..a1474858f201 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/pagers.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.container_v1beta1.types import cluster_service + + +class ListUsableSubnetworksPager: + """A pager for iterating through ``list_usable_subnetworks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.container_v1beta1.types.ListUsableSubnetworksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``subnetworks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUsableSubnetworks`` requests and continue to iterate + through the ``subnetworks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.container_v1beta1.types.ListUsableSubnetworksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cluster_service.ListUsableSubnetworksResponse], + request: cluster_service.ListUsableSubnetworksRequest, + response: cluster_service.ListUsableSubnetworksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.container_v1beta1.types.ListUsableSubnetworksRequest): + The initial request object. + response (google.cloud.container_v1beta1.types.ListUsableSubnetworksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cluster_service.ListUsableSubnetworksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cluster_service.ListUsableSubnetworksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[cluster_service.UsableSubnetwork]: + for page in self.pages: + yield from page.subnetworks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsableSubnetworksAsyncPager: + """A pager for iterating through ``list_usable_subnetworks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.container_v1beta1.types.ListUsableSubnetworksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``subnetworks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListUsableSubnetworks`` requests and continue to iterate + through the ``subnetworks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.container_v1beta1.types.ListUsableSubnetworksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cluster_service.ListUsableSubnetworksResponse]], + request: cluster_service.ListUsableSubnetworksRequest, + response: cluster_service.ListUsableSubnetworksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.container_v1beta1.types.ListUsableSubnetworksRequest): + The initial request object. + response (google.cloud.container_v1beta1.types.ListUsableSubnetworksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cluster_service.ListUsableSubnetworksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[cluster_service.ListUsableSubnetworksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[cluster_service.UsableSubnetwork]: + async def async_generator(): + async for page in self.pages: + for response in page.subnetworks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/__init__.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/__init__.py new file mode 100644 index 000000000000..dd25fdf97133 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/__init__.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ClusterManagerTransport +from .grpc import ClusterManagerGrpcTransport +from .grpc_asyncio import ClusterManagerGrpcAsyncIOTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ClusterManagerTransport]] +_transport_registry["grpc"] = ClusterManagerGrpcTransport +_transport_registry["grpc_asyncio"] = ClusterManagerGrpcAsyncIOTransport + +__all__ = ( + "ClusterManagerTransport", + "ClusterManagerGrpcTransport", + "ClusterManagerGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/base.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/base.py new file mode 100644 index 000000000000..e48897fcc460 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/base.py @@ -0,0 +1,763 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.container_v1beta1 import gapic_version as package_version +from google.cloud.container_v1beta1.types import cluster_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ClusterManagerTransport(abc.ABC): + """Abstract transport class for ClusterManager.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "container.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_clusters: gapic_v1.method.wrap_method( + self.list_clusters, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_cluster: gapic_v1.method.wrap_method( + self.get_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_cluster: gapic_v1.method.wrap_method( + self.create_cluster, + default_timeout=45.0, + client_info=client_info, + ), + self.update_cluster: gapic_v1.method.wrap_method( + self.update_cluster, + default_timeout=45.0, + client_info=client_info, + ), + self.update_node_pool: gapic_v1.method.wrap_method( + self.update_node_pool, + default_timeout=45.0, + client_info=client_info, + ), + self.set_node_pool_autoscaling: gapic_v1.method.wrap_method( + self.set_node_pool_autoscaling, + default_timeout=45.0, + client_info=client_info, + ), + self.set_logging_service: gapic_v1.method.wrap_method( + self.set_logging_service, + default_timeout=45.0, + client_info=client_info, + ), + self.set_monitoring_service: gapic_v1.method.wrap_method( + self.set_monitoring_service, + default_timeout=45.0, + client_info=client_info, + ), + self.set_addons_config: gapic_v1.method.wrap_method( + self.set_addons_config, + default_timeout=45.0, + client_info=client_info, + ), + self.set_locations: gapic_v1.method.wrap_method( + self.set_locations, + default_timeout=45.0, + client_info=client_info, + ), + self.update_master: gapic_v1.method.wrap_method( + self.update_master, + default_timeout=45.0, + client_info=client_info, + ), + self.set_master_auth: gapic_v1.method.wrap_method( + self.set_master_auth, + default_timeout=45.0, + client_info=client_info, + ), + self.delete_cluster: gapic_v1.method.wrap_method( + self.delete_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=45.0, + client_info=client_info, + ), + self.get_server_config: gapic_v1.method.wrap_method( + self.get_server_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_json_web_keys: gapic_v1.method.wrap_method( + self.get_json_web_keys, + default_timeout=None, + client_info=client_info, + ), + self.list_node_pools: gapic_v1.method.wrap_method( + self.list_node_pools, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_node_pool: gapic_v1.method.wrap_method( + self.get_node_pool, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_node_pool: gapic_v1.method.wrap_method( + self.create_node_pool, + default_timeout=45.0, + client_info=client_info, + ), + self.delete_node_pool: gapic_v1.method.wrap_method( + self.delete_node_pool, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.complete_node_pool_upgrade: gapic_v1.method.wrap_method( + self.complete_node_pool_upgrade, + default_timeout=None, + client_info=client_info, + ), + self.rollback_node_pool_upgrade: gapic_v1.method.wrap_method( + self.rollback_node_pool_upgrade, + default_timeout=45.0, + client_info=client_info, + ), + self.set_node_pool_management: gapic_v1.method.wrap_method( + self.set_node_pool_management, + default_timeout=45.0, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=45.0, + client_info=client_info, + ), + self.set_legacy_abac: gapic_v1.method.wrap_method( + self.set_legacy_abac, + default_timeout=45.0, + client_info=client_info, + ), + self.start_ip_rotation: gapic_v1.method.wrap_method( + self.start_ip_rotation, + default_timeout=45.0, + client_info=client_info, + ), + self.complete_ip_rotation: gapic_v1.method.wrap_method( + self.complete_ip_rotation, + default_timeout=45.0, + client_info=client_info, + ), + self.set_node_pool_size: gapic_v1.method.wrap_method( + self.set_node_pool_size, + default_timeout=45.0, + client_info=client_info, + ), + self.set_network_policy: gapic_v1.method.wrap_method( + self.set_network_policy, + default_timeout=45.0, + client_info=client_info, + ), + self.set_maintenance_policy: gapic_v1.method.wrap_method( + self.set_maintenance_policy, + default_timeout=45.0, + client_info=client_info, + ), + self.list_usable_subnetworks: gapic_v1.method.wrap_method( + self.list_usable_subnetworks, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.check_autopilot_compatibility: gapic_v1.method.wrap_method( + self.check_autopilot_compatibility, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_clusters( + self, + ) -> Callable[ + [cluster_service.ListClustersRequest], + Union[ + cluster_service.ListClustersResponse, + Awaitable[cluster_service.ListClustersResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_cluster( + self, + ) -> Callable[ + [cluster_service.GetClusterRequest], + Union[cluster_service.Cluster, Awaitable[cluster_service.Cluster]], + ]: + raise NotImplementedError() + + @property + def create_cluster( + self, + ) -> Callable[ + [cluster_service.CreateClusterRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def update_cluster( + self, + ) -> Callable[ + [cluster_service.UpdateClusterRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def update_node_pool( + self, + ) -> Callable[ + [cluster_service.UpdateNodePoolRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_node_pool_autoscaling( + self, + ) -> Callable[ + [cluster_service.SetNodePoolAutoscalingRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_logging_service( + self, + ) -> Callable[ + [cluster_service.SetLoggingServiceRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_monitoring_service( + self, + ) -> Callable[ + [cluster_service.SetMonitoringServiceRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_addons_config( + self, + ) -> Callable[ + [cluster_service.SetAddonsConfigRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_locations( + self, + ) -> Callable[ + [cluster_service.SetLocationsRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def update_master( + self, + ) -> Callable[ + [cluster_service.UpdateMasterRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_master_auth( + self, + ) -> Callable[ + [cluster_service.SetMasterAuthRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_cluster( + self, + ) -> Callable[ + [cluster_service.DeleteClusterRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [cluster_service.ListOperationsRequest], + Union[ + cluster_service.ListOperationsResponse, + Awaitable[cluster_service.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [cluster_service.GetOperationRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [cluster_service.CancelOperationRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_server_config( + self, + ) -> Callable[ + [cluster_service.GetServerConfigRequest], + Union[cluster_service.ServerConfig, Awaitable[cluster_service.ServerConfig]], + ]: + raise NotImplementedError() + + @property + def get_json_web_keys( + self, + ) -> Callable[ + [cluster_service.GetJSONWebKeysRequest], + Union[ + cluster_service.GetJSONWebKeysResponse, + Awaitable[cluster_service.GetJSONWebKeysResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_node_pools( + self, + ) -> Callable[ + [cluster_service.ListNodePoolsRequest], + Union[ + cluster_service.ListNodePoolsResponse, + Awaitable[cluster_service.ListNodePoolsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_node_pool( + self, + ) -> Callable[ + [cluster_service.GetNodePoolRequest], + Union[cluster_service.NodePool, Awaitable[cluster_service.NodePool]], + ]: + raise NotImplementedError() + + @property + def create_node_pool( + self, + ) -> Callable[ + [cluster_service.CreateNodePoolRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_node_pool( + self, + ) -> Callable[ + [cluster_service.DeleteNodePoolRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def complete_node_pool_upgrade( + self, + ) -> Callable[ + [cluster_service.CompleteNodePoolUpgradeRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def rollback_node_pool_upgrade( + self, + ) -> Callable[ + [cluster_service.RollbackNodePoolUpgradeRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_node_pool_management( + self, + ) -> Callable[ + [cluster_service.SetNodePoolManagementRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_labels( + self, + ) -> Callable[ + [cluster_service.SetLabelsRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_legacy_abac( + self, + ) -> Callable[ + [cluster_service.SetLegacyAbacRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def start_ip_rotation( + self, + ) -> Callable[ + [cluster_service.StartIPRotationRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def complete_ip_rotation( + self, + ) -> Callable[ + [cluster_service.CompleteIPRotationRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_node_pool_size( + self, + ) -> Callable[ + [cluster_service.SetNodePoolSizeRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_network_policy( + self, + ) -> Callable[ + [cluster_service.SetNetworkPolicyRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def set_maintenance_policy( + self, + ) -> Callable[ + [cluster_service.SetMaintenancePolicyRequest], + Union[cluster_service.Operation, Awaitable[cluster_service.Operation]], + ]: + raise NotImplementedError() + + @property + def list_usable_subnetworks( + self, + ) -> Callable[ + [cluster_service.ListUsableSubnetworksRequest], + Union[ + cluster_service.ListUsableSubnetworksResponse, + Awaitable[cluster_service.ListUsableSubnetworksResponse], + ], + ]: + raise NotImplementedError() + + @property + def check_autopilot_compatibility( + self, + ) -> Callable[ + [cluster_service.CheckAutopilotCompatibilityRequest], + Union[ + cluster_service.CheckAutopilotCompatibilityResponse, + Awaitable[cluster_service.CheckAutopilotCompatibilityResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [cluster_service.ListLocationsRequest], + Union[ + cluster_service.ListLocationsResponse, + Awaitable[cluster_service.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ClusterManagerTransport",) diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/grpc.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/grpc.py new file mode 100644 index 000000000000..0fae52c969e2 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/grpc.py @@ -0,0 +1,1228 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.container_v1beta1.types import cluster_service + +from .base import DEFAULT_CLIENT_INFO, ClusterManagerTransport + + +class ClusterManagerGrpcTransport(ClusterManagerTransport): + """gRPC backend transport for ClusterManager. + + Google Kubernetes Engine Cluster Manager v1beta1 + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "container.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "container.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_clusters( + self, + ) -> Callable[ + [cluster_service.ListClustersRequest], cluster_service.ListClustersResponse + ]: + r"""Return a callable for the list clusters method over gRPC. + + Lists all clusters owned by a project in either the + specified zone or all zones. + + Returns: + Callable[[~.ListClustersRequest], + ~.ListClustersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_clusters" not in self._stubs: + self._stubs["list_clusters"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/ListClusters", + request_serializer=cluster_service.ListClustersRequest.serialize, + response_deserializer=cluster_service.ListClustersResponse.deserialize, + ) + return self._stubs["list_clusters"] + + @property + def get_cluster( + self, + ) -> Callable[[cluster_service.GetClusterRequest], cluster_service.Cluster]: + r"""Return a callable for the get cluster method over gRPC. + + Gets the details for a specific cluster. + + Returns: + Callable[[~.GetClusterRequest], + ~.Cluster]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster" not in self._stubs: + self._stubs["get_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/GetCluster", + request_serializer=cluster_service.GetClusterRequest.serialize, + response_deserializer=cluster_service.Cluster.deserialize, + ) + return self._stubs["get_cluster"] + + @property + def create_cluster( + self, + ) -> Callable[[cluster_service.CreateClusterRequest], cluster_service.Operation]: + r"""Return a callable for the create cluster method over gRPC. + + Creates a cluster, consisting of the specified number and type + of Google Compute Engine instances. + + By default, the cluster is created in the project's `default + network `__. + + One firewall is added for the cluster. After cluster creation, + the Kubelet creates routes for each node to allow the containers + on that node to communicate with all other instances in the + cluster. + + Finally, an entry is added to the project's global metadata + indicating which CIDR range the cluster is using. + + Returns: + Callable[[~.CreateClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cluster" not in self._stubs: + self._stubs["create_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/CreateCluster", + request_serializer=cluster_service.CreateClusterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["create_cluster"] + + @property + def update_cluster( + self, + ) -> Callable[[cluster_service.UpdateClusterRequest], cluster_service.Operation]: + r"""Return a callable for the update cluster method over gRPC. + + Updates the settings for a specific cluster. + + Returns: + Callable[[~.UpdateClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cluster" not in self._stubs: + self._stubs["update_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/UpdateCluster", + request_serializer=cluster_service.UpdateClusterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["update_cluster"] + + @property + def update_node_pool( + self, + ) -> Callable[[cluster_service.UpdateNodePoolRequest], cluster_service.Operation]: + r"""Return a callable for the update node pool method over gRPC. + + Updates the version and/or image type of a specific + node pool. + + Returns: + Callable[[~.UpdateNodePoolRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_node_pool" not in self._stubs: + self._stubs["update_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/UpdateNodePool", + request_serializer=cluster_service.UpdateNodePoolRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["update_node_pool"] + + @property + def set_node_pool_autoscaling( + self, + ) -> Callable[ + [cluster_service.SetNodePoolAutoscalingRequest], cluster_service.Operation + ]: + r"""Return a callable for the set node pool autoscaling method over gRPC. + + Sets the autoscaling settings of a specific node + pool. + + Returns: + Callable[[~.SetNodePoolAutoscalingRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_node_pool_autoscaling" not in self._stubs: + self._stubs["set_node_pool_autoscaling"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetNodePoolAutoscaling", + request_serializer=cluster_service.SetNodePoolAutoscalingRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_node_pool_autoscaling"] + + @property + def set_logging_service( + self, + ) -> Callable[ + [cluster_service.SetLoggingServiceRequest], cluster_service.Operation + ]: + r"""Return a callable for the set logging service method over gRPC. + + Sets the logging service for a specific cluster. + + Returns: + Callable[[~.SetLoggingServiceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_logging_service" not in self._stubs: + self._stubs["set_logging_service"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetLoggingService", + request_serializer=cluster_service.SetLoggingServiceRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_logging_service"] + + @property + def set_monitoring_service( + self, + ) -> Callable[ + [cluster_service.SetMonitoringServiceRequest], cluster_service.Operation + ]: + r"""Return a callable for the set monitoring service method over gRPC. + + Sets the monitoring service for a specific cluster. + + Returns: + Callable[[~.SetMonitoringServiceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_monitoring_service" not in self._stubs: + self._stubs["set_monitoring_service"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetMonitoringService", + request_serializer=cluster_service.SetMonitoringServiceRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_monitoring_service"] + + @property + def set_addons_config( + self, + ) -> Callable[[cluster_service.SetAddonsConfigRequest], cluster_service.Operation]: + r"""Return a callable for the set addons config method over gRPC. + + Sets the addons for a specific cluster. + + Returns: + Callable[[~.SetAddonsConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_addons_config" not in self._stubs: + self._stubs["set_addons_config"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetAddonsConfig", + request_serializer=cluster_service.SetAddonsConfigRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_addons_config"] + + @property + def set_locations( + self, + ) -> Callable[[cluster_service.SetLocationsRequest], cluster_service.Operation]: + r"""Return a callable for the set locations method over gRPC. + + Sets the locations for a specific cluster. Deprecated. Use + `projects.locations.clusters.update `__ + instead. + + Returns: + Callable[[~.SetLocationsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_locations" not in self._stubs: + self._stubs["set_locations"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetLocations", + request_serializer=cluster_service.SetLocationsRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_locations"] + + @property + def update_master( + self, + ) -> Callable[[cluster_service.UpdateMasterRequest], cluster_service.Operation]: + r"""Return a callable for the update master method over gRPC. + + Updates the master for a specific cluster. + + Returns: + Callable[[~.UpdateMasterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_master" not in self._stubs: + self._stubs["update_master"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/UpdateMaster", + request_serializer=cluster_service.UpdateMasterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["update_master"] + + @property + def set_master_auth( + self, + ) -> Callable[[cluster_service.SetMasterAuthRequest], cluster_service.Operation]: + r"""Return a callable for the set master auth method over gRPC. + + Sets master auth materials. Currently supports + changing the admin password or a specific cluster, + either via password generation or explicitly setting the + password. + + Returns: + Callable[[~.SetMasterAuthRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_master_auth" not in self._stubs: + self._stubs["set_master_auth"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetMasterAuth", + request_serializer=cluster_service.SetMasterAuthRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_master_auth"] + + @property + def delete_cluster( + self, + ) -> Callable[[cluster_service.DeleteClusterRequest], cluster_service.Operation]: + r"""Return a callable for the delete cluster method over gRPC. + + Deletes the cluster, including the Kubernetes + endpoint and all worker nodes. + + Firewalls and routes that were configured during cluster + creation are also deleted. + + Other Google Compute Engine resources that might be in + use by the cluster, such as load balancer resources, are + not deleted if they weren't present when the cluster was + initially created. + + Returns: + Callable[[~.DeleteClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cluster" not in self._stubs: + self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/DeleteCluster", + request_serializer=cluster_service.DeleteClusterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["delete_cluster"] + + @property + def list_operations( + self, + ) -> Callable[ + [cluster_service.ListOperationsRequest], cluster_service.ListOperationsResponse + ]: + r"""Return a callable for the list operations method over gRPC. + + Lists all operations in a project in the specified + zone or all zones. + + Returns: + Callable[[~.ListOperationsRequest], + ~.ListOperationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/ListOperations", + request_serializer=cluster_service.ListOperationsRequest.serialize, + response_deserializer=cluster_service.ListOperationsResponse.deserialize, + ) + return self._stubs["list_operations"] + + @property + def get_operation( + self, + ) -> Callable[[cluster_service.GetOperationRequest], cluster_service.Operation]: + r"""Return a callable for the get operation method over gRPC. + + Gets the specified operation. + + Returns: + Callable[[~.GetOperationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/GetOperation", + request_serializer=cluster_service.GetOperationRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["get_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[cluster_service.CancelOperationRequest], empty_pb2.Empty]: + r"""Return a callable for the cancel operation method over gRPC. + + Cancels the specified operation. + + Returns: + Callable[[~.CancelOperationRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/CancelOperation", + request_serializer=cluster_service.CancelOperationRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["cancel_operation"] + + @property + def get_server_config( + self, + ) -> Callable[ + [cluster_service.GetServerConfigRequest], cluster_service.ServerConfig + ]: + r"""Return a callable for the get server config method over gRPC. + + Returns configuration info about the Google + Kubernetes Engine service. + + Returns: + Callable[[~.GetServerConfigRequest], + ~.ServerConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_server_config" not in self._stubs: + self._stubs["get_server_config"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/GetServerConfig", + request_serializer=cluster_service.GetServerConfigRequest.serialize, + response_deserializer=cluster_service.ServerConfig.deserialize, + ) + return self._stubs["get_server_config"] + + @property + def get_json_web_keys( + self, + ) -> Callable[ + [cluster_service.GetJSONWebKeysRequest], cluster_service.GetJSONWebKeysResponse + ]: + r"""Return a callable for the get json web keys method over gRPC. + + Gets the public component of the cluster signing keys + in JSON Web Key format. + This API is not yet intended for general use, and is not + available for all clusters. + + Returns: + Callable[[~.GetJSONWebKeysRequest], + ~.GetJSONWebKeysResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_json_web_keys" not in self._stubs: + self._stubs["get_json_web_keys"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/GetJSONWebKeys", + request_serializer=cluster_service.GetJSONWebKeysRequest.serialize, + response_deserializer=cluster_service.GetJSONWebKeysResponse.deserialize, + ) + return self._stubs["get_json_web_keys"] + + @property + def list_node_pools( + self, + ) -> Callable[ + [cluster_service.ListNodePoolsRequest], cluster_service.ListNodePoolsResponse + ]: + r"""Return a callable for the list node pools method over gRPC. + + Lists the node pools for a cluster. + + Returns: + Callable[[~.ListNodePoolsRequest], + ~.ListNodePoolsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_node_pools" not in self._stubs: + self._stubs["list_node_pools"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/ListNodePools", + request_serializer=cluster_service.ListNodePoolsRequest.serialize, + response_deserializer=cluster_service.ListNodePoolsResponse.deserialize, + ) + return self._stubs["list_node_pools"] + + @property + def get_node_pool( + self, + ) -> Callable[[cluster_service.GetNodePoolRequest], cluster_service.NodePool]: + r"""Return a callable for the get node pool method over gRPC. + + Retrieves the requested node pool. + + Returns: + Callable[[~.GetNodePoolRequest], + ~.NodePool]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_node_pool" not in self._stubs: + self._stubs["get_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/GetNodePool", + request_serializer=cluster_service.GetNodePoolRequest.serialize, + response_deserializer=cluster_service.NodePool.deserialize, + ) + return self._stubs["get_node_pool"] + + @property + def create_node_pool( + self, + ) -> Callable[[cluster_service.CreateNodePoolRequest], cluster_service.Operation]: + r"""Return a callable for the create node pool method over gRPC. + + Creates a node pool for a cluster. + + Returns: + Callable[[~.CreateNodePoolRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_node_pool" not in self._stubs: + self._stubs["create_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/CreateNodePool", + request_serializer=cluster_service.CreateNodePoolRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["create_node_pool"] + + @property + def delete_node_pool( + self, + ) -> Callable[[cluster_service.DeleteNodePoolRequest], cluster_service.Operation]: + r"""Return a callable for the delete node pool method over gRPC. + + Deletes a node pool from a cluster. + + Returns: + Callable[[~.DeleteNodePoolRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_node_pool" not in self._stubs: + self._stubs["delete_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/DeleteNodePool", + request_serializer=cluster_service.DeleteNodePoolRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["delete_node_pool"] + + @property + def complete_node_pool_upgrade( + self, + ) -> Callable[[cluster_service.CompleteNodePoolUpgradeRequest], empty_pb2.Empty]: + r"""Return a callable for the complete node pool upgrade method over gRPC. + + CompleteNodePoolUpgrade will signal an on-going node + pool upgrade to complete. + + Returns: + Callable[[~.CompleteNodePoolUpgradeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "complete_node_pool_upgrade" not in self._stubs: + self._stubs["complete_node_pool_upgrade"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/CompleteNodePoolUpgrade", + request_serializer=cluster_service.CompleteNodePoolUpgradeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["complete_node_pool_upgrade"] + + @property + def rollback_node_pool_upgrade( + self, + ) -> Callable[ + [cluster_service.RollbackNodePoolUpgradeRequest], cluster_service.Operation + ]: + r"""Return a callable for the rollback node pool upgrade method over gRPC. + + Rolls back a previously Aborted or Failed NodePool + upgrade. This makes no changes if the last upgrade + successfully completed. + + Returns: + Callable[[~.RollbackNodePoolUpgradeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_node_pool_upgrade" not in self._stubs: + self._stubs["rollback_node_pool_upgrade"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/RollbackNodePoolUpgrade", + request_serializer=cluster_service.RollbackNodePoolUpgradeRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["rollback_node_pool_upgrade"] + + @property + def set_node_pool_management( + self, + ) -> Callable[ + [cluster_service.SetNodePoolManagementRequest], cluster_service.Operation + ]: + r"""Return a callable for the set node pool management method over gRPC. + + Sets the NodeManagement options for a node pool. + + Returns: + Callable[[~.SetNodePoolManagementRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_node_pool_management" not in self._stubs: + self._stubs["set_node_pool_management"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetNodePoolManagement", + request_serializer=cluster_service.SetNodePoolManagementRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_node_pool_management"] + + @property + def set_labels( + self, + ) -> Callable[[cluster_service.SetLabelsRequest], cluster_service.Operation]: + r"""Return a callable for the set labels method over gRPC. + + Sets labels on a cluster. + + Returns: + Callable[[~.SetLabelsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_labels" not in self._stubs: + self._stubs["set_labels"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetLabels", + request_serializer=cluster_service.SetLabelsRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_labels"] + + @property + def set_legacy_abac( + self, + ) -> Callable[[cluster_service.SetLegacyAbacRequest], cluster_service.Operation]: + r"""Return a callable for the set legacy abac method over gRPC. + + Enables or disables the ABAC authorization mechanism + on a cluster. + + Returns: + Callable[[~.SetLegacyAbacRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_legacy_abac" not in self._stubs: + self._stubs["set_legacy_abac"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetLegacyAbac", + request_serializer=cluster_service.SetLegacyAbacRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_legacy_abac"] + + @property + def start_ip_rotation( + self, + ) -> Callable[[cluster_service.StartIPRotationRequest], cluster_service.Operation]: + r"""Return a callable for the start ip rotation method over gRPC. + + Starts master IP rotation. + + Returns: + Callable[[~.StartIPRotationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_ip_rotation" not in self._stubs: + self._stubs["start_ip_rotation"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/StartIPRotation", + request_serializer=cluster_service.StartIPRotationRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["start_ip_rotation"] + + @property + def complete_ip_rotation( + self, + ) -> Callable[ + [cluster_service.CompleteIPRotationRequest], cluster_service.Operation + ]: + r"""Return a callable for the complete ip rotation method over gRPC. + + Completes master IP rotation. + + Returns: + Callable[[~.CompleteIPRotationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "complete_ip_rotation" not in self._stubs: + self._stubs["complete_ip_rotation"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/CompleteIPRotation", + request_serializer=cluster_service.CompleteIPRotationRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["complete_ip_rotation"] + + @property + def set_node_pool_size( + self, + ) -> Callable[[cluster_service.SetNodePoolSizeRequest], cluster_service.Operation]: + r"""Return a callable for the set node pool size method over gRPC. + + SetNodePoolSizeRequest sets the size of a node pool. The new + size will be used for all replicas, including future replicas + created by modifying + [NodePool.locations][google.container.v1beta1.NodePool.locations]. + + Returns: + Callable[[~.SetNodePoolSizeRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_node_pool_size" not in self._stubs: + self._stubs["set_node_pool_size"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetNodePoolSize", + request_serializer=cluster_service.SetNodePoolSizeRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_node_pool_size"] + + @property + def set_network_policy( + self, + ) -> Callable[[cluster_service.SetNetworkPolicyRequest], cluster_service.Operation]: + r"""Return a callable for the set network policy method over gRPC. + + Enables or disables Network Policy for a cluster. + + Returns: + Callable[[~.SetNetworkPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_network_policy" not in self._stubs: + self._stubs["set_network_policy"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetNetworkPolicy", + request_serializer=cluster_service.SetNetworkPolicyRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_network_policy"] + + @property + def set_maintenance_policy( + self, + ) -> Callable[ + [cluster_service.SetMaintenancePolicyRequest], cluster_service.Operation + ]: + r"""Return a callable for the set maintenance policy method over gRPC. + + Sets the maintenance policy for a cluster. + + Returns: + Callable[[~.SetMaintenancePolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_maintenance_policy" not in self._stubs: + self._stubs["set_maintenance_policy"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetMaintenancePolicy", + request_serializer=cluster_service.SetMaintenancePolicyRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_maintenance_policy"] + + @property + def list_usable_subnetworks( + self, + ) -> Callable[ + [cluster_service.ListUsableSubnetworksRequest], + cluster_service.ListUsableSubnetworksResponse, + ]: + r"""Return a callable for the list usable subnetworks method over gRPC. + + Lists subnetworks that can be used for creating + clusters in a project. + + Returns: + Callable[[~.ListUsableSubnetworksRequest], + ~.ListUsableSubnetworksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_usable_subnetworks" not in self._stubs: + self._stubs["list_usable_subnetworks"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/ListUsableSubnetworks", + request_serializer=cluster_service.ListUsableSubnetworksRequest.serialize, + response_deserializer=cluster_service.ListUsableSubnetworksResponse.deserialize, + ) + return self._stubs["list_usable_subnetworks"] + + @property + def check_autopilot_compatibility( + self, + ) -> Callable[ + [cluster_service.CheckAutopilotCompatibilityRequest], + cluster_service.CheckAutopilotCompatibilityResponse, + ]: + r"""Return a callable for the check autopilot compatibility method over gRPC. + + Checks the cluster compatibility with Autopilot mode, + and returns a list of compatibility issues. + + Returns: + Callable[[~.CheckAutopilotCompatibilityRequest], + ~.CheckAutopilotCompatibilityResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_autopilot_compatibility" not in self._stubs: + self._stubs[ + "check_autopilot_compatibility" + ] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/CheckAutopilotCompatibility", + request_serializer=cluster_service.CheckAutopilotCompatibilityRequest.serialize, + response_deserializer=cluster_service.CheckAutopilotCompatibilityResponse.deserialize, + ) + return self._stubs["check_autopilot_compatibility"] + + @property + def list_locations( + self, + ) -> Callable[ + [cluster_service.ListLocationsRequest], cluster_service.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC. + + Fetches locations that offer Google Kubernetes + Engine. + + Returns: + Callable[[~.ListLocationsRequest], + ~.ListLocationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/ListLocations", + request_serializer=cluster_service.ListLocationsRequest.serialize, + response_deserializer=cluster_service.ListLocationsResponse.deserialize, + ) + return self._stubs["list_locations"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ClusterManagerGrpcTransport",) diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/grpc_asyncio.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/grpc_asyncio.py new file mode 100644 index 000000000000..2276c5c22ab2 --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/transports/grpc_asyncio.py @@ -0,0 +1,1277 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.container_v1beta1.types import cluster_service + +from .base import DEFAULT_CLIENT_INFO, ClusterManagerTransport +from .grpc import ClusterManagerGrpcTransport + + +class ClusterManagerGrpcAsyncIOTransport(ClusterManagerTransport): + """gRPC AsyncIO backend transport for ClusterManager. + + Google Kubernetes Engine Cluster Manager v1beta1 + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "container.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "container.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_clusters( + self, + ) -> Callable[ + [cluster_service.ListClustersRequest], + Awaitable[cluster_service.ListClustersResponse], + ]: + r"""Return a callable for the list clusters method over gRPC. + + Lists all clusters owned by a project in either the + specified zone or all zones. + + Returns: + Callable[[~.ListClustersRequest], + Awaitable[~.ListClustersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_clusters" not in self._stubs: + self._stubs["list_clusters"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/ListClusters", + request_serializer=cluster_service.ListClustersRequest.serialize, + response_deserializer=cluster_service.ListClustersResponse.deserialize, + ) + return self._stubs["list_clusters"] + + @property + def get_cluster( + self, + ) -> Callable[ + [cluster_service.GetClusterRequest], Awaitable[cluster_service.Cluster] + ]: + r"""Return a callable for the get cluster method over gRPC. + + Gets the details for a specific cluster. + + Returns: + Callable[[~.GetClusterRequest], + Awaitable[~.Cluster]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster" not in self._stubs: + self._stubs["get_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/GetCluster", + request_serializer=cluster_service.GetClusterRequest.serialize, + response_deserializer=cluster_service.Cluster.deserialize, + ) + return self._stubs["get_cluster"] + + @property + def create_cluster( + self, + ) -> Callable[ + [cluster_service.CreateClusterRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the create cluster method over gRPC. + + Creates a cluster, consisting of the specified number and type + of Google Compute Engine instances. + + By default, the cluster is created in the project's `default + network `__. + + One firewall is added for the cluster. After cluster creation, + the Kubelet creates routes for each node to allow the containers + on that node to communicate with all other instances in the + cluster. + + Finally, an entry is added to the project's global metadata + indicating which CIDR range the cluster is using. + + Returns: + Callable[[~.CreateClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cluster" not in self._stubs: + self._stubs["create_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/CreateCluster", + request_serializer=cluster_service.CreateClusterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["create_cluster"] + + @property + def update_cluster( + self, + ) -> Callable[ + [cluster_service.UpdateClusterRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the update cluster method over gRPC. + + Updates the settings for a specific cluster. + + Returns: + Callable[[~.UpdateClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cluster" not in self._stubs: + self._stubs["update_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/UpdateCluster", + request_serializer=cluster_service.UpdateClusterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["update_cluster"] + + @property + def update_node_pool( + self, + ) -> Callable[ + [cluster_service.UpdateNodePoolRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the update node pool method over gRPC. + + Updates the version and/or image type of a specific + node pool. + + Returns: + Callable[[~.UpdateNodePoolRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_node_pool" not in self._stubs: + self._stubs["update_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/UpdateNodePool", + request_serializer=cluster_service.UpdateNodePoolRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["update_node_pool"] + + @property + def set_node_pool_autoscaling( + self, + ) -> Callable[ + [cluster_service.SetNodePoolAutoscalingRequest], + Awaitable[cluster_service.Operation], + ]: + r"""Return a callable for the set node pool autoscaling method over gRPC. + + Sets the autoscaling settings of a specific node + pool. + + Returns: + Callable[[~.SetNodePoolAutoscalingRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_node_pool_autoscaling" not in self._stubs: + self._stubs["set_node_pool_autoscaling"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetNodePoolAutoscaling", + request_serializer=cluster_service.SetNodePoolAutoscalingRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_node_pool_autoscaling"] + + @property + def set_logging_service( + self, + ) -> Callable[ + [cluster_service.SetLoggingServiceRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set logging service method over gRPC. + + Sets the logging service for a specific cluster. + + Returns: + Callable[[~.SetLoggingServiceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_logging_service" not in self._stubs: + self._stubs["set_logging_service"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetLoggingService", + request_serializer=cluster_service.SetLoggingServiceRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_logging_service"] + + @property + def set_monitoring_service( + self, + ) -> Callable[ + [cluster_service.SetMonitoringServiceRequest], + Awaitable[cluster_service.Operation], + ]: + r"""Return a callable for the set monitoring service method over gRPC. + + Sets the monitoring service for a specific cluster. + + Returns: + Callable[[~.SetMonitoringServiceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_monitoring_service" not in self._stubs: + self._stubs["set_monitoring_service"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetMonitoringService", + request_serializer=cluster_service.SetMonitoringServiceRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_monitoring_service"] + + @property + def set_addons_config( + self, + ) -> Callable[ + [cluster_service.SetAddonsConfigRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set addons config method over gRPC. + + Sets the addons for a specific cluster. + + Returns: + Callable[[~.SetAddonsConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_addons_config" not in self._stubs: + self._stubs["set_addons_config"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetAddonsConfig", + request_serializer=cluster_service.SetAddonsConfigRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_addons_config"] + + @property + def set_locations( + self, + ) -> Callable[ + [cluster_service.SetLocationsRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set locations method over gRPC. + + Sets the locations for a specific cluster. Deprecated. Use + `projects.locations.clusters.update `__ + instead. + + Returns: + Callable[[~.SetLocationsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_locations" not in self._stubs: + self._stubs["set_locations"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetLocations", + request_serializer=cluster_service.SetLocationsRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_locations"] + + @property + def update_master( + self, + ) -> Callable[ + [cluster_service.UpdateMasterRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the update master method over gRPC. + + Updates the master for a specific cluster. + + Returns: + Callable[[~.UpdateMasterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_master" not in self._stubs: + self._stubs["update_master"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/UpdateMaster", + request_serializer=cluster_service.UpdateMasterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["update_master"] + + @property + def set_master_auth( + self, + ) -> Callable[ + [cluster_service.SetMasterAuthRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set master auth method over gRPC. + + Sets master auth materials. Currently supports + changing the admin password or a specific cluster, + either via password generation or explicitly setting the + password. + + Returns: + Callable[[~.SetMasterAuthRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_master_auth" not in self._stubs: + self._stubs["set_master_auth"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetMasterAuth", + request_serializer=cluster_service.SetMasterAuthRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_master_auth"] + + @property + def delete_cluster( + self, + ) -> Callable[ + [cluster_service.DeleteClusterRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the delete cluster method over gRPC. + + Deletes the cluster, including the Kubernetes + endpoint and all worker nodes. + + Firewalls and routes that were configured during cluster + creation are also deleted. + + Other Google Compute Engine resources that might be in + use by the cluster, such as load balancer resources, are + not deleted if they weren't present when the cluster was + initially created. + + Returns: + Callable[[~.DeleteClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cluster" not in self._stubs: + self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/DeleteCluster", + request_serializer=cluster_service.DeleteClusterRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["delete_cluster"] + + @property + def list_operations( + self, + ) -> Callable[ + [cluster_service.ListOperationsRequest], + Awaitable[cluster_service.ListOperationsResponse], + ]: + r"""Return a callable for the list operations method over gRPC. + + Lists all operations in a project in the specified + zone or all zones. + + Returns: + Callable[[~.ListOperationsRequest], + Awaitable[~.ListOperationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/ListOperations", + request_serializer=cluster_service.ListOperationsRequest.serialize, + response_deserializer=cluster_service.ListOperationsResponse.deserialize, + ) + return self._stubs["list_operations"] + + @property + def get_operation( + self, + ) -> Callable[ + [cluster_service.GetOperationRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the get operation method over gRPC. + + Gets the specified operation. + + Returns: + Callable[[~.GetOperationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/GetOperation", + request_serializer=cluster_service.GetOperationRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["get_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[cluster_service.CancelOperationRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the cancel operation method over gRPC. + + Cancels the specified operation. + + Returns: + Callable[[~.CancelOperationRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/CancelOperation", + request_serializer=cluster_service.CancelOperationRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["cancel_operation"] + + @property + def get_server_config( + self, + ) -> Callable[ + [cluster_service.GetServerConfigRequest], + Awaitable[cluster_service.ServerConfig], + ]: + r"""Return a callable for the get server config method over gRPC. + + Returns configuration info about the Google + Kubernetes Engine service. + + Returns: + Callable[[~.GetServerConfigRequest], + Awaitable[~.ServerConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_server_config" not in self._stubs: + self._stubs["get_server_config"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/GetServerConfig", + request_serializer=cluster_service.GetServerConfigRequest.serialize, + response_deserializer=cluster_service.ServerConfig.deserialize, + ) + return self._stubs["get_server_config"] + + @property + def get_json_web_keys( + self, + ) -> Callable[ + [cluster_service.GetJSONWebKeysRequest], + Awaitable[cluster_service.GetJSONWebKeysResponse], + ]: + r"""Return a callable for the get json web keys method over gRPC. + + Gets the public component of the cluster signing keys + in JSON Web Key format. + This API is not yet intended for general use, and is not + available for all clusters. + + Returns: + Callable[[~.GetJSONWebKeysRequest], + Awaitable[~.GetJSONWebKeysResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_json_web_keys" not in self._stubs: + self._stubs["get_json_web_keys"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/GetJSONWebKeys", + request_serializer=cluster_service.GetJSONWebKeysRequest.serialize, + response_deserializer=cluster_service.GetJSONWebKeysResponse.deserialize, + ) + return self._stubs["get_json_web_keys"] + + @property + def list_node_pools( + self, + ) -> Callable[ + [cluster_service.ListNodePoolsRequest], + Awaitable[cluster_service.ListNodePoolsResponse], + ]: + r"""Return a callable for the list node pools method over gRPC. + + Lists the node pools for a cluster. + + Returns: + Callable[[~.ListNodePoolsRequest], + Awaitable[~.ListNodePoolsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_node_pools" not in self._stubs: + self._stubs["list_node_pools"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/ListNodePools", + request_serializer=cluster_service.ListNodePoolsRequest.serialize, + response_deserializer=cluster_service.ListNodePoolsResponse.deserialize, + ) + return self._stubs["list_node_pools"] + + @property + def get_node_pool( + self, + ) -> Callable[ + [cluster_service.GetNodePoolRequest], Awaitable[cluster_service.NodePool] + ]: + r"""Return a callable for the get node pool method over gRPC. + + Retrieves the requested node pool. + + Returns: + Callable[[~.GetNodePoolRequest], + Awaitable[~.NodePool]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_node_pool" not in self._stubs: + self._stubs["get_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/GetNodePool", + request_serializer=cluster_service.GetNodePoolRequest.serialize, + response_deserializer=cluster_service.NodePool.deserialize, + ) + return self._stubs["get_node_pool"] + + @property + def create_node_pool( + self, + ) -> Callable[ + [cluster_service.CreateNodePoolRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the create node pool method over gRPC. + + Creates a node pool for a cluster. + + Returns: + Callable[[~.CreateNodePoolRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_node_pool" not in self._stubs: + self._stubs["create_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/CreateNodePool", + request_serializer=cluster_service.CreateNodePoolRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["create_node_pool"] + + @property + def delete_node_pool( + self, + ) -> Callable[ + [cluster_service.DeleteNodePoolRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the delete node pool method over gRPC. + + Deletes a node pool from a cluster. + + Returns: + Callable[[~.DeleteNodePoolRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_node_pool" not in self._stubs: + self._stubs["delete_node_pool"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/DeleteNodePool", + request_serializer=cluster_service.DeleteNodePoolRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["delete_node_pool"] + + @property + def complete_node_pool_upgrade( + self, + ) -> Callable[ + [cluster_service.CompleteNodePoolUpgradeRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the complete node pool upgrade method over gRPC. + + CompleteNodePoolUpgrade will signal an on-going node + pool upgrade to complete. + + Returns: + Callable[[~.CompleteNodePoolUpgradeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "complete_node_pool_upgrade" not in self._stubs: + self._stubs["complete_node_pool_upgrade"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/CompleteNodePoolUpgrade", + request_serializer=cluster_service.CompleteNodePoolUpgradeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["complete_node_pool_upgrade"] + + @property + def rollback_node_pool_upgrade( + self, + ) -> Callable[ + [cluster_service.RollbackNodePoolUpgradeRequest], + Awaitable[cluster_service.Operation], + ]: + r"""Return a callable for the rollback node pool upgrade method over gRPC. + + Rolls back a previously Aborted or Failed NodePool + upgrade. This makes no changes if the last upgrade + successfully completed. + + Returns: + Callable[[~.RollbackNodePoolUpgradeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_node_pool_upgrade" not in self._stubs: + self._stubs["rollback_node_pool_upgrade"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/RollbackNodePoolUpgrade", + request_serializer=cluster_service.RollbackNodePoolUpgradeRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["rollback_node_pool_upgrade"] + + @property + def set_node_pool_management( + self, + ) -> Callable[ + [cluster_service.SetNodePoolManagementRequest], + Awaitable[cluster_service.Operation], + ]: + r"""Return a callable for the set node pool management method over gRPC. + + Sets the NodeManagement options for a node pool. + + Returns: + Callable[[~.SetNodePoolManagementRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_node_pool_management" not in self._stubs: + self._stubs["set_node_pool_management"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetNodePoolManagement", + request_serializer=cluster_service.SetNodePoolManagementRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_node_pool_management"] + + @property + def set_labels( + self, + ) -> Callable[ + [cluster_service.SetLabelsRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set labels method over gRPC. + + Sets labels on a cluster. + + Returns: + Callable[[~.SetLabelsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_labels" not in self._stubs: + self._stubs["set_labels"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetLabels", + request_serializer=cluster_service.SetLabelsRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_labels"] + + @property + def set_legacy_abac( + self, + ) -> Callable[ + [cluster_service.SetLegacyAbacRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set legacy abac method over gRPC. + + Enables or disables the ABAC authorization mechanism + on a cluster. + + Returns: + Callable[[~.SetLegacyAbacRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_legacy_abac" not in self._stubs: + self._stubs["set_legacy_abac"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetLegacyAbac", + request_serializer=cluster_service.SetLegacyAbacRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_legacy_abac"] + + @property + def start_ip_rotation( + self, + ) -> Callable[ + [cluster_service.StartIPRotationRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the start ip rotation method over gRPC. + + Starts master IP rotation. + + Returns: + Callable[[~.StartIPRotationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_ip_rotation" not in self._stubs: + self._stubs["start_ip_rotation"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/StartIPRotation", + request_serializer=cluster_service.StartIPRotationRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["start_ip_rotation"] + + @property + def complete_ip_rotation( + self, + ) -> Callable[ + [cluster_service.CompleteIPRotationRequest], + Awaitable[cluster_service.Operation], + ]: + r"""Return a callable for the complete ip rotation method over gRPC. + + Completes master IP rotation. + + Returns: + Callable[[~.CompleteIPRotationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "complete_ip_rotation" not in self._stubs: + self._stubs["complete_ip_rotation"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/CompleteIPRotation", + request_serializer=cluster_service.CompleteIPRotationRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["complete_ip_rotation"] + + @property + def set_node_pool_size( + self, + ) -> Callable[ + [cluster_service.SetNodePoolSizeRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set node pool size method over gRPC. + + SetNodePoolSizeRequest sets the size of a node pool. The new + size will be used for all replicas, including future replicas + created by modifying + [NodePool.locations][google.container.v1beta1.NodePool.locations]. + + Returns: + Callable[[~.SetNodePoolSizeRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_node_pool_size" not in self._stubs: + self._stubs["set_node_pool_size"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetNodePoolSize", + request_serializer=cluster_service.SetNodePoolSizeRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_node_pool_size"] + + @property + def set_network_policy( + self, + ) -> Callable[ + [cluster_service.SetNetworkPolicyRequest], Awaitable[cluster_service.Operation] + ]: + r"""Return a callable for the set network policy method over gRPC. + + Enables or disables Network Policy for a cluster. + + Returns: + Callable[[~.SetNetworkPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_network_policy" not in self._stubs: + self._stubs["set_network_policy"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetNetworkPolicy", + request_serializer=cluster_service.SetNetworkPolicyRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_network_policy"] + + @property + def set_maintenance_policy( + self, + ) -> Callable[ + [cluster_service.SetMaintenancePolicyRequest], + Awaitable[cluster_service.Operation], + ]: + r"""Return a callable for the set maintenance policy method over gRPC. + + Sets the maintenance policy for a cluster. + + Returns: + Callable[[~.SetMaintenancePolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_maintenance_policy" not in self._stubs: + self._stubs["set_maintenance_policy"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/SetMaintenancePolicy", + request_serializer=cluster_service.SetMaintenancePolicyRequest.serialize, + response_deserializer=cluster_service.Operation.deserialize, + ) + return self._stubs["set_maintenance_policy"] + + @property + def list_usable_subnetworks( + self, + ) -> Callable[ + [cluster_service.ListUsableSubnetworksRequest], + Awaitable[cluster_service.ListUsableSubnetworksResponse], + ]: + r"""Return a callable for the list usable subnetworks method over gRPC. + + Lists subnetworks that can be used for creating + clusters in a project. + + Returns: + Callable[[~.ListUsableSubnetworksRequest], + Awaitable[~.ListUsableSubnetworksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_usable_subnetworks" not in self._stubs: + self._stubs["list_usable_subnetworks"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/ListUsableSubnetworks", + request_serializer=cluster_service.ListUsableSubnetworksRequest.serialize, + response_deserializer=cluster_service.ListUsableSubnetworksResponse.deserialize, + ) + return self._stubs["list_usable_subnetworks"] + + @property + def check_autopilot_compatibility( + self, + ) -> Callable[ + [cluster_service.CheckAutopilotCompatibilityRequest], + Awaitable[cluster_service.CheckAutopilotCompatibilityResponse], + ]: + r"""Return a callable for the check autopilot compatibility method over gRPC. + + Checks the cluster compatibility with Autopilot mode, + and returns a list of compatibility issues. + + Returns: + Callable[[~.CheckAutopilotCompatibilityRequest], + Awaitable[~.CheckAutopilotCompatibilityResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_autopilot_compatibility" not in self._stubs: + self._stubs[ + "check_autopilot_compatibility" + ] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/CheckAutopilotCompatibility", + request_serializer=cluster_service.CheckAutopilotCompatibilityRequest.serialize, + response_deserializer=cluster_service.CheckAutopilotCompatibilityResponse.deserialize, + ) + return self._stubs["check_autopilot_compatibility"] + + @property + def list_locations( + self, + ) -> Callable[ + [cluster_service.ListLocationsRequest], + Awaitable[cluster_service.ListLocationsResponse], + ]: + r"""Return a callable for the list locations method over gRPC. + + Fetches locations that offer Google Kubernetes + Engine. + + Returns: + Callable[[~.ListLocationsRequest], + Awaitable[~.ListLocationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.container.v1beta1.ClusterManager/ListLocations", + request_serializer=cluster_service.ListLocationsRequest.serialize, + response_deserializer=cluster_service.ListLocationsResponse.deserialize, + ) + return self._stubs["list_locations"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("ClusterManagerGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/types/__init__.py b/packages/google-cloud-container/google/cloud/container_v1beta1/types/__init__.py new file mode 100644 index 000000000000..6d4f4ff1028a --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/types/__init__.py @@ -0,0 +1,374 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cluster_service import ( + AcceleratorConfig, + AdditionalNodeNetworkConfig, + AdditionalPodNetworkConfig, + AdditionalPodRangesConfig, + AddonsConfig, + AdvancedDatapathObservabilityConfig, + AdvancedMachineFeatures, + AuthenticatorGroupsConfig, + Autopilot, + AutopilotCompatibilityIssue, + AutoprovisioningNodePoolDefaults, + AutoUpgradeOptions, + BestEffortProvisioning, + BinaryAuthorization, + BlueGreenSettings, + CancelOperationRequest, + CheckAutopilotCompatibilityRequest, + CheckAutopilotCompatibilityResponse, + ClientCertificateConfig, + CloudRunConfig, + Cluster, + ClusterAutoscaling, + ClusterTelemetry, + ClusterUpdate, + CompleteIPRotationRequest, + CompleteNodePoolUpgradeRequest, + ConfidentialNodes, + ConfigConnectorConfig, + CostManagementConfig, + CreateClusterRequest, + CreateNodePoolRequest, + DailyMaintenanceWindow, + DatabaseEncryption, + DatapathProvider, + DefaultSnatStatus, + DeleteClusterRequest, + DeleteNodePoolRequest, + DnsCacheConfig, + DNSConfig, + EphemeralStorageConfig, + EphemeralStorageLocalSsdConfig, + FastSocket, + Fleet, + GatewayAPIConfig, + GcePersistentDiskCsiDriverConfig, + GcfsConfig, + GcpFilestoreCsiDriverConfig, + GcsFuseCsiDriverConfig, + GetClusterRequest, + GetJSONWebKeysRequest, + GetJSONWebKeysResponse, + GetNodePoolRequest, + GetOpenIDConfigRequest, + GetOpenIDConfigResponse, + GetOperationRequest, + GetServerConfigRequest, + GkeBackupAgentConfig, + GPUDriverInstallationConfig, + GPUSharingConfig, + HorizontalPodAutoscaling, + HostMaintenancePolicy, + HttpLoadBalancing, + IdentityServiceConfig, + ILBSubsettingConfig, + IntraNodeVisibilityConfig, + IPAllocationPolicy, + IstioConfig, + Jwk, + K8sBetaAPIConfig, + KalmConfig, + KubernetesDashboard, + LegacyAbac, + LinuxNodeConfig, + ListClustersRequest, + ListClustersResponse, + ListLocationsRequest, + ListLocationsResponse, + ListNodePoolsRequest, + ListNodePoolsResponse, + ListOperationsRequest, + ListOperationsResponse, + ListUsableSubnetworksRequest, + ListUsableSubnetworksResponse, + LocalNvmeSsdBlockConfig, + Location, + LoggingComponentConfig, + LoggingConfig, + LoggingVariantConfig, + MaintenanceExclusionOptions, + MaintenancePolicy, + MaintenanceWindow, + ManagedPrometheusConfig, + Master, + MasterAuth, + MasterAuthorizedNetworksConfig, + MaxPodsConstraint, + MeshCertificates, + MonitoringComponentConfig, + MonitoringConfig, + NetworkConfig, + NetworkPolicy, + NetworkPolicyConfig, + NetworkTags, + NodeConfig, + NodeConfigDefaults, + NodeKubeletConfig, + NodeLabels, + NodeManagement, + NodeNetworkConfig, + NodePool, + NodePoolAutoConfig, + NodePoolAutoscaling, + NodePoolDefaults, + NodePoolLoggingConfig, + NodePoolUpdateStrategy, + NodeTaint, + NodeTaints, + NotificationConfig, + Operation, + OperationProgress, + PodCIDROverprovisionConfig, + PodSecurityPolicyConfig, + PrivateClusterConfig, + PrivateClusterMasterGlobalAccessConfig, + PrivateIPv6GoogleAccess, + ProtectConfig, + RangeInfo, + RecurringTimeWindow, + ReleaseChannel, + ReservationAffinity, + ResourceLabels, + ResourceLimit, + ResourceUsageExportConfig, + RollbackNodePoolUpgradeRequest, + SandboxConfig, + SecurityBulletinEvent, + SecurityPostureConfig, + ServerConfig, + ServiceExternalIPsConfig, + SetAddonsConfigRequest, + SetLabelsRequest, + SetLegacyAbacRequest, + SetLocationsRequest, + SetLoggingServiceRequest, + SetMaintenancePolicyRequest, + SetMasterAuthRequest, + SetMonitoringServiceRequest, + SetNetworkPolicyRequest, + SetNodePoolAutoscalingRequest, + SetNodePoolManagementRequest, + SetNodePoolSizeRequest, + ShieldedInstanceConfig, + ShieldedNodes, + SoleTenantConfig, + StackType, + StartIPRotationRequest, + StatusCondition, + TimeWindow, + TpuConfig, + UpdateClusterRequest, + UpdateMasterRequest, + UpdateNodePoolRequest, + UpgradeAvailableEvent, + UpgradeEvent, + UpgradeResourceType, + UsableSubnetwork, + UsableSubnetworkSecondaryRange, + VerticalPodAutoscaling, + VirtualNIC, + WindowsNodeConfig, + WindowsVersions, + WorkloadALTSConfig, + WorkloadCertificates, + WorkloadConfig, + WorkloadIdentityConfig, + WorkloadMetadataConfig, + WorkloadPolicyConfig, +) + +__all__ = ( + "AcceleratorConfig", + "AdditionalNodeNetworkConfig", + "AdditionalPodNetworkConfig", + "AdditionalPodRangesConfig", + "AddonsConfig", + "AdvancedDatapathObservabilityConfig", + "AdvancedMachineFeatures", + "AuthenticatorGroupsConfig", + "Autopilot", + "AutopilotCompatibilityIssue", + "AutoprovisioningNodePoolDefaults", + "AutoUpgradeOptions", + "BestEffortProvisioning", + "BinaryAuthorization", + "BlueGreenSettings", + "CancelOperationRequest", + "CheckAutopilotCompatibilityRequest", + "CheckAutopilotCompatibilityResponse", + "ClientCertificateConfig", + "CloudRunConfig", + "Cluster", + "ClusterAutoscaling", + "ClusterTelemetry", + "ClusterUpdate", + "CompleteIPRotationRequest", + "CompleteNodePoolUpgradeRequest", + "ConfidentialNodes", + "ConfigConnectorConfig", + "CostManagementConfig", + "CreateClusterRequest", + "CreateNodePoolRequest", + "DailyMaintenanceWindow", + "DatabaseEncryption", + "DefaultSnatStatus", + "DeleteClusterRequest", + "DeleteNodePoolRequest", + "DnsCacheConfig", + "DNSConfig", + "EphemeralStorageConfig", + "EphemeralStorageLocalSsdConfig", + "FastSocket", + "Fleet", + "GatewayAPIConfig", + "GcePersistentDiskCsiDriverConfig", + "GcfsConfig", + "GcpFilestoreCsiDriverConfig", + "GcsFuseCsiDriverConfig", + "GetClusterRequest", + "GetJSONWebKeysRequest", + "GetJSONWebKeysResponse", + "GetNodePoolRequest", + "GetOpenIDConfigRequest", + "GetOpenIDConfigResponse", + "GetOperationRequest", + "GetServerConfigRequest", + "GkeBackupAgentConfig", + "GPUDriverInstallationConfig", + "GPUSharingConfig", + "HorizontalPodAutoscaling", + "HostMaintenancePolicy", + "HttpLoadBalancing", + "IdentityServiceConfig", + "ILBSubsettingConfig", + "IntraNodeVisibilityConfig", + "IPAllocationPolicy", + "IstioConfig", + "Jwk", + "K8sBetaAPIConfig", + "KalmConfig", + "KubernetesDashboard", + "LegacyAbac", + "LinuxNodeConfig", + "ListClustersRequest", + "ListClustersResponse", + "ListLocationsRequest", + "ListLocationsResponse", + "ListNodePoolsRequest", + "ListNodePoolsResponse", + "ListOperationsRequest", + "ListOperationsResponse", + "ListUsableSubnetworksRequest", + "ListUsableSubnetworksResponse", + "LocalNvmeSsdBlockConfig", + "Location", + "LoggingComponentConfig", + "LoggingConfig", + "LoggingVariantConfig", + "MaintenanceExclusionOptions", + "MaintenancePolicy", + "MaintenanceWindow", + "ManagedPrometheusConfig", + "Master", + "MasterAuth", + "MasterAuthorizedNetworksConfig", + "MaxPodsConstraint", + "MeshCertificates", + "MonitoringComponentConfig", + "MonitoringConfig", + "NetworkConfig", + "NetworkPolicy", + "NetworkPolicyConfig", + "NetworkTags", + "NodeConfig", + "NodeConfigDefaults", + "NodeKubeletConfig", + "NodeLabels", + "NodeManagement", + "NodeNetworkConfig", + "NodePool", + "NodePoolAutoConfig", + "NodePoolAutoscaling", + "NodePoolDefaults", + "NodePoolLoggingConfig", + "NodeTaint", + "NodeTaints", + "NotificationConfig", + "Operation", + "OperationProgress", + "PodCIDROverprovisionConfig", + "PodSecurityPolicyConfig", + "PrivateClusterConfig", + "PrivateClusterMasterGlobalAccessConfig", + "ProtectConfig", + "RangeInfo", + "RecurringTimeWindow", + "ReleaseChannel", + "ReservationAffinity", + "ResourceLabels", + "ResourceLimit", + "ResourceUsageExportConfig", + "RollbackNodePoolUpgradeRequest", + "SandboxConfig", + "SecurityBulletinEvent", + "SecurityPostureConfig", + "ServerConfig", + "ServiceExternalIPsConfig", + "SetAddonsConfigRequest", + "SetLabelsRequest", + "SetLegacyAbacRequest", + "SetLocationsRequest", + "SetLoggingServiceRequest", + "SetMaintenancePolicyRequest", + "SetMasterAuthRequest", + "SetMonitoringServiceRequest", + "SetNetworkPolicyRequest", + "SetNodePoolAutoscalingRequest", + "SetNodePoolManagementRequest", + "SetNodePoolSizeRequest", + "ShieldedInstanceConfig", + "ShieldedNodes", + "SoleTenantConfig", + "StartIPRotationRequest", + "StatusCondition", + "TimeWindow", + "TpuConfig", + "UpdateClusterRequest", + "UpdateMasterRequest", + "UpdateNodePoolRequest", + "UpgradeAvailableEvent", + "UpgradeEvent", + "UsableSubnetwork", + "UsableSubnetworkSecondaryRange", + "VerticalPodAutoscaling", + "VirtualNIC", + "WindowsNodeConfig", + "WindowsVersions", + "WorkloadALTSConfig", + "WorkloadCertificates", + "WorkloadConfig", + "WorkloadIdentityConfig", + "WorkloadMetadataConfig", + "WorkloadPolicyConfig", + "DatapathProvider", + "NodePoolUpdateStrategy", + "PrivateIPv6GoogleAccess", + "StackType", + "UpgradeResourceType", +) diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py new file mode 100644 index 000000000000..84c433af3ffe --- /dev/null +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py @@ -0,0 +1,9999 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import code_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.container.v1beta1", + manifest={ + "PrivateIPv6GoogleAccess", + "UpgradeResourceType", + "NodePoolUpdateStrategy", + "DatapathProvider", + "StackType", + "LinuxNodeConfig", + "WindowsNodeConfig", + "NodeKubeletConfig", + "NodeConfig", + "AdvancedMachineFeatures", + "NodeNetworkConfig", + "AdditionalNodeNetworkConfig", + "AdditionalPodNetworkConfig", + "ShieldedInstanceConfig", + "SandboxConfig", + "EphemeralStorageConfig", + "LocalNvmeSsdBlockConfig", + "EphemeralStorageLocalSsdConfig", + "GcfsConfig", + "ReservationAffinity", + "SoleTenantConfig", + "HostMaintenancePolicy", + "NodeTaint", + "NodeTaints", + "NodeLabels", + "ResourceLabels", + "NetworkTags", + "MasterAuth", + "ClientCertificateConfig", + "AddonsConfig", + "HttpLoadBalancing", + "HorizontalPodAutoscaling", + "KubernetesDashboard", + "NetworkPolicyConfig", + "DnsCacheConfig", + "KalmConfig", + "GkeBackupAgentConfig", + "ConfigConnectorConfig", + "GcePersistentDiskCsiDriverConfig", + "GcpFilestoreCsiDriverConfig", + "GcsFuseCsiDriverConfig", + "PrivateClusterMasterGlobalAccessConfig", + "PrivateClusterConfig", + "IstioConfig", + "CloudRunConfig", + "MasterAuthorizedNetworksConfig", + "LegacyAbac", + "NetworkPolicy", + "PodCIDROverprovisionConfig", + "IPAllocationPolicy", + "BinaryAuthorization", + "PodSecurityPolicyConfig", + "AuthenticatorGroupsConfig", + "ClusterTelemetry", + "Cluster", + "K8sBetaAPIConfig", + "WorkloadConfig", + "ProtectConfig", + "SecurityPostureConfig", + "NodePoolDefaults", + "NodeConfigDefaults", + "NodePoolAutoConfig", + "ClusterUpdate", + "AdditionalPodRangesConfig", + "RangeInfo", + "Operation", + "OperationProgress", + "CreateClusterRequest", + "GetClusterRequest", + "UpdateClusterRequest", + "UpdateNodePoolRequest", + "SetNodePoolAutoscalingRequest", + "SetLoggingServiceRequest", + "SetMonitoringServiceRequest", + "SetAddonsConfigRequest", + "SetLocationsRequest", + "UpdateMasterRequest", + "SetMasterAuthRequest", + "DeleteClusterRequest", + "ListClustersRequest", + "ListClustersResponse", + "GetOperationRequest", + "ListOperationsRequest", + "CancelOperationRequest", + "ListOperationsResponse", + "GetServerConfigRequest", + "ServerConfig", + "BestEffortProvisioning", + "WindowsVersions", + "CreateNodePoolRequest", + "DeleteNodePoolRequest", + "ListNodePoolsRequest", + "GetNodePoolRequest", + "BlueGreenSettings", + "NodePool", + "NodeManagement", + "AutoUpgradeOptions", + "MaintenancePolicy", + "MaintenanceWindow", + "TimeWindow", + "MaintenanceExclusionOptions", + "RecurringTimeWindow", + "DailyMaintenanceWindow", + "SetNodePoolManagementRequest", + "SetNodePoolSizeRequest", + "CompleteNodePoolUpgradeRequest", + "RollbackNodePoolUpgradeRequest", + "ListNodePoolsResponse", + "ClusterAutoscaling", + "AutoprovisioningNodePoolDefaults", + "ResourceLimit", + "NodePoolAutoscaling", + "SetLabelsRequest", + "SetLegacyAbacRequest", + "StartIPRotationRequest", + "CompleteIPRotationRequest", + "AcceleratorConfig", + "GPUSharingConfig", + "GPUDriverInstallationConfig", + "ManagedPrometheusConfig", + "WorkloadMetadataConfig", + "SetNetworkPolicyRequest", + "SetMaintenancePolicyRequest", + "ListLocationsRequest", + "ListLocationsResponse", + "Location", + "StatusCondition", + "NetworkConfig", + "GatewayAPIConfig", + "ServiceExternalIPsConfig", + "ListUsableSubnetworksRequest", + "ListUsableSubnetworksResponse", + "UsableSubnetworkSecondaryRange", + "UsableSubnetwork", + "VerticalPodAutoscaling", + "DefaultSnatStatus", + "IntraNodeVisibilityConfig", + "ILBSubsettingConfig", + "DNSConfig", + "MaxPodsConstraint", + "WorkloadIdentityConfig", + "WorkloadALTSConfig", + "WorkloadCertificates", + "MeshCertificates", + "DatabaseEncryption", + "ResourceUsageExportConfig", + "ShieldedNodes", + "VirtualNIC", + "FastSocket", + "GetOpenIDConfigRequest", + "GetOpenIDConfigResponse", + "GetJSONWebKeysRequest", + "Jwk", + "GetJSONWebKeysResponse", + "CheckAutopilotCompatibilityRequest", + "AutopilotCompatibilityIssue", + "CheckAutopilotCompatibilityResponse", + "ReleaseChannel", + "CostManagementConfig", + "TpuConfig", + "Master", + "Autopilot", + "WorkloadPolicyConfig", + "NotificationConfig", + "ConfidentialNodes", + "UpgradeEvent", + "UpgradeAvailableEvent", + "SecurityBulletinEvent", + "IdentityServiceConfig", + "LoggingConfig", + "LoggingComponentConfig", + "MonitoringConfig", + "AdvancedDatapathObservabilityConfig", + "NodePoolLoggingConfig", + "LoggingVariantConfig", + "MonitoringComponentConfig", + "Fleet", + }, +) + + +class PrivateIPv6GoogleAccess(proto.Enum): + r"""PrivateIPv6GoogleAccess controls whether and how the pods can + communicate with Google Services through gRPC over IPv6. + + Values: + PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED (0): + Default value. Same as DISABLED + PRIVATE_IPV6_GOOGLE_ACCESS_DISABLED (1): + No private access to or from Google Services + PRIVATE_IPV6_GOOGLE_ACCESS_TO_GOOGLE (2): + Enables private IPv6 access to Google + Services from GKE + PRIVATE_IPV6_GOOGLE_ACCESS_BIDIRECTIONAL (3): + Enables private IPv6 access to and from + Google Services + """ + PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED = 0 + PRIVATE_IPV6_GOOGLE_ACCESS_DISABLED = 1 + PRIVATE_IPV6_GOOGLE_ACCESS_TO_GOOGLE = 2 + PRIVATE_IPV6_GOOGLE_ACCESS_BIDIRECTIONAL = 3 + + +class UpgradeResourceType(proto.Enum): + r"""UpgradeResourceType is the resource type that is upgrading. + It is used in upgrade notifications. + + Values: + UPGRADE_RESOURCE_TYPE_UNSPECIFIED (0): + Default value. This shouldn't be used. + MASTER (1): + Master / control plane + NODE_POOL (2): + Node pool + """ + UPGRADE_RESOURCE_TYPE_UNSPECIFIED = 0 + MASTER = 1 + NODE_POOL = 2 + + +class NodePoolUpdateStrategy(proto.Enum): + r"""Strategy used for node pool update. + + Values: + NODE_POOL_UPDATE_STRATEGY_UNSPECIFIED (0): + Default value if unset. GKE internally + defaults the update strategy to SURGE for + unspecified strategies. + BLUE_GREEN (2): + blue-green upgrade. + SURGE (3): + SURGE is the traditional way of upgrading a node pool. + max_surge and max_unavailable determines the level of + upgrade parallelism. + """ + NODE_POOL_UPDATE_STRATEGY_UNSPECIFIED = 0 + BLUE_GREEN = 2 + SURGE = 3 + + +class DatapathProvider(proto.Enum): + r"""The datapath provider selects the implementation of the + Kubernetes networking model for service resolution and network + policy enforcement. + + Values: + DATAPATH_PROVIDER_UNSPECIFIED (0): + Default value. + LEGACY_DATAPATH (1): + Use the IPTables implementation based on + kube-proxy. + ADVANCED_DATAPATH (2): + Use the eBPF based GKE Dataplane V2 with additional + features. See the `GKE Dataplane V2 + documentation `__ + for more. + """ + DATAPATH_PROVIDER_UNSPECIFIED = 0 + LEGACY_DATAPATH = 1 + ADVANCED_DATAPATH = 2 + + +class StackType(proto.Enum): + r"""Possible values for IP stack type + + Values: + STACK_TYPE_UNSPECIFIED (0): + By default, the clusters will be IPV4 only + IPV4 (1): + The value used if the cluster is a IPV4 only + IPV4_IPV6 (2): + The value used if the cluster is a dual stack + cluster + """ + STACK_TYPE_UNSPECIFIED = 0 + IPV4 = 1 + IPV4_IPV6 = 2 + + +class LinuxNodeConfig(proto.Message): + r"""Parameters that can be configured on Linux nodes. + + Attributes: + sysctls (MutableMapping[str, str]): + The Linux kernel parameters to be applied to the nodes and + all pods running on the nodes. + + The following parameters are supported. + + net.core.busy_poll net.core.busy_read + net.core.netdev_max_backlog net.core.rmem_max + net.core.wmem_default net.core.wmem_max net.core.optmem_max + net.core.somaxconn net.ipv4.tcp_rmem net.ipv4.tcp_wmem + net.ipv4.tcp_tw_reuse + cgroup_mode (google.cloud.container_v1beta1.types.LinuxNodeConfig.CgroupMode): + cgroup_mode specifies the cgroup mode to be used on the + node. + """ + + class CgroupMode(proto.Enum): + r"""Possible cgroup modes that can be used. + + Values: + CGROUP_MODE_UNSPECIFIED (0): + CGROUP_MODE_UNSPECIFIED is when unspecified cgroup + configuration is used. The default for the GKE node OS image + will be used. + CGROUP_MODE_V1 (1): + CGROUP_MODE_V1 specifies to use cgroupv1 for the cgroup + configuration on the node image. + CGROUP_MODE_V2 (2): + CGROUP_MODE_V2 specifies to use cgroupv2 for the cgroup + configuration on the node image. + """ + CGROUP_MODE_UNSPECIFIED = 0 + CGROUP_MODE_V1 = 1 + CGROUP_MODE_V2 = 2 + + sysctls: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + cgroup_mode: CgroupMode = proto.Field( + proto.ENUM, + number=2, + enum=CgroupMode, + ) + + +class WindowsNodeConfig(proto.Message): + r"""Parameters that can be configured on Windows nodes. + Windows Node Config that define the parameters that will be used + to configure the Windows node pool settings + + Attributes: + os_version (google.cloud.container_v1beta1.types.WindowsNodeConfig.OSVersion): + OSVersion specifies the Windows node config + to be used on the node + """ + + class OSVersion(proto.Enum): + r"""Possible OS version that can be used. + + Values: + OS_VERSION_UNSPECIFIED (0): + When OSVersion is not specified + OS_VERSION_LTSC2019 (1): + LTSC2019 specifies to use LTSC2019 as the + Windows Servercore Base Image + OS_VERSION_LTSC2022 (2): + LTSC2022 specifies to use LTSC2022 as the + Windows Servercore Base Image + """ + OS_VERSION_UNSPECIFIED = 0 + OS_VERSION_LTSC2019 = 1 + OS_VERSION_LTSC2022 = 2 + + os_version: OSVersion = proto.Field( + proto.ENUM, + number=1, + enum=OSVersion, + ) + + +class NodeKubeletConfig(proto.Message): + r"""Node kubelet configs. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cpu_manager_policy (str): + Control the CPU management policy on the node. See + https://kubernetes.io/docs/tasks/administer-cluster/cpu-management-policies/ + + The following values are allowed. + + - "none": the default, which represents the existing + scheduling behavior. + - "static": allows pods with certain resource + characteristics to be granted increased CPU affinity and + exclusivity on the node. The default value is 'none' if + unspecified. + cpu_cfs_quota (google.protobuf.wrappers_pb2.BoolValue): + Enable CPU CFS quota enforcement for + containers that specify CPU limits. + This option is enabled by default which makes + kubelet use CFS quota + (https://www.kernel.org/doc/Documentation/scheduler/sched-bwc.txt) + to enforce container CPU limits. Otherwise, CPU + limits will not be enforced at all. + + Disable this option to mitigate CPU throttling + problems while still having your pods to be in + Guaranteed QoS class by specifying the CPU + limits. + + The default value is 'true' if unspecified. + cpu_cfs_quota_period (str): + Set the CPU CFS quota period value 'cpu.cfs_period_us'. + + The string must be a sequence of decimal numbers, each with + optional fraction and a unit suffix, such as "300ms". Valid + time units are "ns", "us" (or "µs"), "ms", "s", "m", "h". + The value must be a positive duration. + pod_pids_limit (int): + Set the Pod PID limits. See + https://kubernetes.io/docs/concepts/policy/pid-limiting/#pod-pid-limits + + Controls the maximum number of processes allowed + to run in a pod. The value must be greater than + or equal to 1024 and less than 4194304. + insecure_kubelet_readonly_port_enabled (bool): + Enable or disable Kubelet read only port. + + This field is a member of `oneof`_ ``_insecure_kubelet_readonly_port_enabled``. + """ + + cpu_manager_policy: str = proto.Field( + proto.STRING, + number=1, + ) + cpu_cfs_quota: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=2, + message=wrappers_pb2.BoolValue, + ) + cpu_cfs_quota_period: str = proto.Field( + proto.STRING, + number=3, + ) + pod_pids_limit: int = proto.Field( + proto.INT64, + number=4, + ) + insecure_kubelet_readonly_port_enabled: bool = proto.Field( + proto.BOOL, + number=7, + optional=True, + ) + + +class NodeConfig(proto.Message): + r"""Parameters that describe the nodes in a cluster. + + GKE Autopilot clusters do not recognize parameters in + ``NodeConfig``. Use + [AutoprovisioningNodePoolDefaults][google.container.v1beta1.AutoprovisioningNodePoolDefaults] + instead. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + machine_type (str): + The name of a Google Compute Engine `machine + type `__. + + If unspecified, the default machine type is ``e2-medium``. + disk_size_gb (int): + Size of the disk attached to each node, + specified in GB. The smallest allowed disk size + is 10GB. + + If unspecified, the default disk size is 100GB. + oauth_scopes (MutableSequence[str]): + The set of Google API scopes to be made available on all of + the node VMs under the "default" service account. + + The following scopes are recommended, but not required, and + by default are not included: + + - ``https://www.googleapis.com/auth/compute`` is required + for mounting persistent storage on your nodes. + - ``https://www.googleapis.com/auth/devstorage.read_only`` + is required for communicating with **gcr.io** (the + `Google Container + Registry `__). + + If unspecified, no scopes are added, unless Cloud Logging or + Cloud Monitoring are enabled, in which case their required + scopes will be added. + service_account (str): + The Google Cloud Platform Service Account to + be used by the node VMs. Specify the email + address of the Service Account; otherwise, if no + Service Account is specified, the "default" + service account is used. + metadata (MutableMapping[str, str]): + The metadata key/value pairs assigned to instances in the + cluster. + + Keys must conform to the regexp ``[a-zA-Z0-9-_]+`` and be + less than 128 bytes in length. These are reflected as part + of a URL in the metadata server. Additionally, to avoid + ambiguity, keys must not conflict with any other metadata + keys for the project or be one of the reserved keys: + + - "cluster-location" + - "cluster-name" + - "cluster-uid" + - "configure-sh" + - "containerd-configure-sh" + - "enable-oslogin" + - "gci-ensure-gke-docker" + - "gci-metrics-enabled" + - "gci-update-strategy" + - "instance-template" + - "kube-env" + - "startup-script" + - "user-data" + - "disable-address-manager" + - "windows-startup-script-ps1" + - "common-psm1" + - "k8s-node-setup-psm1" + - "install-ssh-psm1" + - "user-profile-psm1" + + Values are free-form strings, and only have meaning as + interpreted by the image running in the instance. The only + restriction placed on them is that each value's size must be + less than or equal to 32 KB. + + The total size of all keys and values must be less than 512 + KB. + image_type (str): + The image type to use for this node. Note + that for a given image type, the latest version + of it will be used. Please see + https://cloud.google.com/kubernetes-engine/docs/concepts/node-images + for available image types. + labels (MutableMapping[str, str]): + The map of Kubernetes labels (key/value + pairs) to be applied to each node. These will + added in addition to any default label(s) that + Kubernetes may apply to the node. + In case of conflict in label keys, the applied + set may differ depending on the Kubernetes + version -- it's best to assume the behavior is + undefined and conflicts should be avoided. + For more information, including usage and the + valid values, see: + + https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/ + local_ssd_count (int): + The number of local SSD disks to be attached + to the node. + The limit for this value is dependent upon the + maximum number of disks available on a machine + per zone. See: + + https://cloud.google.com/compute/docs/disks/local-ssd + for more information. + tags (MutableSequence[str]): + The list of instance tags applied to all + nodes. Tags are used to identify valid sources + or targets for network firewalls and are + specified by the client during cluster or node + pool creation. Each tag within the list must + comply with RFC1035. + preemptible (bool): + Whether the nodes are created as preemptible + VM instances. See: + https://cloud.google.com/compute/docs/instances/preemptible + for more information about preemptible VM + instances. + accelerators (MutableSequence[google.cloud.container_v1beta1.types.AcceleratorConfig]): + A list of hardware accelerators to be + attached to each node. See + https://cloud.google.com/compute/docs/gpus for + more information about support for GPUs. + sandbox_config (google.cloud.container_v1beta1.types.SandboxConfig): + Sandbox configuration for this node. + node_group (str): + Setting this field will assign instances of this pool to run + on the specified node group. This is useful for running + workloads on `sole tenant + nodes `__. + reservation_affinity (google.cloud.container_v1beta1.types.ReservationAffinity): + The optional reservation affinity. Setting this field will + apply the specified `Zonal Compute + Reservation `__ + to this node pool. + disk_type (str): + Type of the disk attached to each node (e.g. + 'pd-standard', 'pd-ssd' or 'pd-balanced') + + If unspecified, the default disk type is + 'pd-standard' + min_cpu_platform (str): + Minimum CPU platform to be used by this instance. The + instance may be scheduled on the specified or newer CPU + platform. Applicable values are the friendly names of CPU + platforms, such as ``minCpuPlatform: "Intel Haswell"`` or + ``minCpuPlatform: "Intel Sandy Bridge"``. For more + information, read `how to specify min CPU + platform `__. + workload_metadata_config (google.cloud.container_v1beta1.types.WorkloadMetadataConfig): + The workload metadata configuration for this + node. + taints (MutableSequence[google.cloud.container_v1beta1.types.NodeTaint]): + List of kubernetes taints to be applied to + each node. + For more information, including usage and the + valid values, see: + + https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/ + boot_disk_kms_key (str): + The Customer Managed Encryption Key used to encrypt the boot + disk attached to each node in the node pool. This should be + of the form + projects/[KEY_PROJECT_ID]/locations/[LOCATION]/keyRings/[RING_NAME]/cryptoKeys/[KEY_NAME]. + For more information about protecting resources with Cloud + KMS Keys please see: + https://cloud.google.com/compute/docs/disks/customer-managed-encryption + shielded_instance_config (google.cloud.container_v1beta1.types.ShieldedInstanceConfig): + Shielded Instance options. + linux_node_config (google.cloud.container_v1beta1.types.LinuxNodeConfig): + Parameters that can be configured on Linux + nodes. + kubelet_config (google.cloud.container_v1beta1.types.NodeKubeletConfig): + Node kubelet configs. + ephemeral_storage_config (google.cloud.container_v1beta1.types.EphemeralStorageConfig): + Parameters for the ephemeral storage + filesystem. If unspecified, ephemeral storage is + backed by the boot disk. + gcfs_config (google.cloud.container_v1beta1.types.GcfsConfig): + GCFS (Google Container File System) configs. + advanced_machine_features (google.cloud.container_v1beta1.types.AdvancedMachineFeatures): + Advanced features for the Compute Engine VM. + gvnic (google.cloud.container_v1beta1.types.VirtualNIC): + Enable or disable gvnic on the node pool. + spot (bool): + Spot flag for enabling Spot VM, which is a + rebrand of the existing preemptible flag. + confidential_nodes (google.cloud.container_v1beta1.types.ConfidentialNodes): + Confidential nodes config. + All the nodes in the node pool will be + Confidential VM once enabled. + fast_socket (google.cloud.container_v1beta1.types.FastSocket): + Enable or disable NCCL fast socket for the + node pool. + + This field is a member of `oneof`_ ``_fast_socket``. + resource_labels (MutableMapping[str, str]): + The resource labels for the node pool to use + to annotate any related Google Compute Engine + resources. + logging_config (google.cloud.container_v1beta1.types.NodePoolLoggingConfig): + Logging configuration. + windows_node_config (google.cloud.container_v1beta1.types.WindowsNodeConfig): + Parameters that can be configured on Windows + nodes. + local_nvme_ssd_block_config (google.cloud.container_v1beta1.types.LocalNvmeSsdBlockConfig): + Parameters for using raw-block Local NVMe + SSDs. + ephemeral_storage_local_ssd_config (google.cloud.container_v1beta1.types.EphemeralStorageLocalSsdConfig): + Parameters for the node ephemeral storage using Local SSDs. + If unspecified, ephemeral storage is backed by the boot + disk. This field is functionally equivalent to the + ephemeral_storage_config + sole_tenant_config (google.cloud.container_v1beta1.types.SoleTenantConfig): + Parameters for node pools to be backed by + shared sole tenant node groups. + host_maintenance_policy (google.cloud.container_v1beta1.types.HostMaintenancePolicy): + HostMaintenancePolicy contains the desired + maintenance policy for the Google Compute Engine + hosts. + enable_confidential_storage (bool): + Optional. Enable confidential storage on Hyperdisk. + boot_disk_kms_key is required when + enable_confidential_storage is true. This is only available + for private preview. + """ + + machine_type: str = proto.Field( + proto.STRING, + number=1, + ) + disk_size_gb: int = proto.Field( + proto.INT32, + number=2, + ) + oauth_scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + service_account: str = proto.Field( + proto.STRING, + number=9, + ) + metadata: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + image_type: str = proto.Field( + proto.STRING, + number=5, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + local_ssd_count: int = proto.Field( + proto.INT32, + number=7, + ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + preemptible: bool = proto.Field( + proto.BOOL, + number=10, + ) + accelerators: MutableSequence["AcceleratorConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="AcceleratorConfig", + ) + sandbox_config: "SandboxConfig" = proto.Field( + proto.MESSAGE, + number=17, + message="SandboxConfig", + ) + node_group: str = proto.Field( + proto.STRING, + number=18, + ) + reservation_affinity: "ReservationAffinity" = proto.Field( + proto.MESSAGE, + number=19, + message="ReservationAffinity", + ) + disk_type: str = proto.Field( + proto.STRING, + number=12, + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=13, + ) + workload_metadata_config: "WorkloadMetadataConfig" = proto.Field( + proto.MESSAGE, + number=14, + message="WorkloadMetadataConfig", + ) + taints: MutableSequence["NodeTaint"] = proto.RepeatedField( + proto.MESSAGE, + number=15, + message="NodeTaint", + ) + boot_disk_kms_key: str = proto.Field( + proto.STRING, + number=23, + ) + shielded_instance_config: "ShieldedInstanceConfig" = proto.Field( + proto.MESSAGE, + number=20, + message="ShieldedInstanceConfig", + ) + linux_node_config: "LinuxNodeConfig" = proto.Field( + proto.MESSAGE, + number=21, + message="LinuxNodeConfig", + ) + kubelet_config: "NodeKubeletConfig" = proto.Field( + proto.MESSAGE, + number=22, + message="NodeKubeletConfig", + ) + ephemeral_storage_config: "EphemeralStorageConfig" = proto.Field( + proto.MESSAGE, + number=24, + message="EphemeralStorageConfig", + ) + gcfs_config: "GcfsConfig" = proto.Field( + proto.MESSAGE, + number=25, + message="GcfsConfig", + ) + advanced_machine_features: "AdvancedMachineFeatures" = proto.Field( + proto.MESSAGE, + number=26, + message="AdvancedMachineFeatures", + ) + gvnic: "VirtualNIC" = proto.Field( + proto.MESSAGE, + number=29, + message="VirtualNIC", + ) + spot: bool = proto.Field( + proto.BOOL, + number=32, + ) + confidential_nodes: "ConfidentialNodes" = proto.Field( + proto.MESSAGE, + number=35, + message="ConfidentialNodes", + ) + fast_socket: "FastSocket" = proto.Field( + proto.MESSAGE, + number=36, + optional=True, + message="FastSocket", + ) + resource_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=37, + ) + logging_config: "NodePoolLoggingConfig" = proto.Field( + proto.MESSAGE, + number=38, + message="NodePoolLoggingConfig", + ) + windows_node_config: "WindowsNodeConfig" = proto.Field( + proto.MESSAGE, + number=39, + message="WindowsNodeConfig", + ) + local_nvme_ssd_block_config: "LocalNvmeSsdBlockConfig" = proto.Field( + proto.MESSAGE, + number=40, + message="LocalNvmeSsdBlockConfig", + ) + ephemeral_storage_local_ssd_config: "EphemeralStorageLocalSsdConfig" = proto.Field( + proto.MESSAGE, + number=41, + message="EphemeralStorageLocalSsdConfig", + ) + sole_tenant_config: "SoleTenantConfig" = proto.Field( + proto.MESSAGE, + number=42, + message="SoleTenantConfig", + ) + host_maintenance_policy: "HostMaintenancePolicy" = proto.Field( + proto.MESSAGE, + number=44, + message="HostMaintenancePolicy", + ) + enable_confidential_storage: bool = proto.Field( + proto.BOOL, + number=46, + ) + + +class AdvancedMachineFeatures(proto.Message): + r"""Specifies options for controlling advanced machine features. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + threads_per_core (int): + The number of threads per physical core. To + disable simultaneous multithreading (SMT) set + this to 1. If unset, the maximum number of + threads supported per core by the underlying + processor is assumed. + + This field is a member of `oneof`_ ``_threads_per_core``. + """ + + threads_per_core: int = proto.Field( + proto.INT64, + number=1, + optional=True, + ) + + +class NodeNetworkConfig(proto.Message): + r"""Parameters for node pool-level network config. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + create_pod_range (bool): + Input only. Whether to create a new range for pod IPs in + this node pool. Defaults are provided for ``pod_range`` and + ``pod_ipv4_cidr_block`` if they are not specified. + + If neither ``create_pod_range`` or ``pod_range`` are + specified, the cluster-level default + (``ip_allocation_policy.cluster_ipv4_cidr_block``) is used. + + Only applicable if ``ip_allocation_policy.use_ip_aliases`` + is true. + + This field cannot be changed after the node pool has been + created. + pod_range (str): + The ID of the secondary range for pod IPs. If + ``create_pod_range`` is true, this ID is used for the new + range. If ``create_pod_range`` is false, uses an existing + secondary range with this ID. + + Only applicable if ``ip_allocation_policy.use_ip_aliases`` + is true. + + This field cannot be changed after the node pool has been + created. + pod_ipv4_cidr_block (str): + The IP address range for pod IPs in this node pool. + + Only applicable if ``create_pod_range`` is true. + + Set to blank to have a range chosen with the default size. + + Set to /netmask (e.g. ``/14``) to have a range chosen with a + specific netmask. + + Set to a + `CIDR `__ + notation (e.g. ``10.96.0.0/14``) to pick a specific range to + use. + + Only applicable if ``ip_allocation_policy.use_ip_aliases`` + is true. + + This field cannot be changed after the node pool has been + created. + enable_private_nodes (bool): + Whether nodes have internal IP addresses only. If + enable_private_nodes is not specified, then the value is + derived from + [cluster.privateClusterConfig.enablePrivateNodes][google.container.v1beta1.PrivateClusterConfig.enablePrivateNodes] + + This field is a member of `oneof`_ ``_enable_private_nodes``. + network_performance_config (google.cloud.container_v1beta1.types.NodeNetworkConfig.NetworkPerformanceConfig): + Network bandwidth tier configuration. + + This field is a member of `oneof`_ ``_network_performance_config``. + pod_cidr_overprovision_config (google.cloud.container_v1beta1.types.PodCIDROverprovisionConfig): + [PRIVATE FIELD] Pod CIDR size overprovisioning config for + the nodepool. + + Pod CIDR size per node depends on max_pods_per_node. By + default, the value of max_pods_per_node is rounded off to + next power of 2 and we then double that to get the size of + pod CIDR block per node. Example: max_pods_per_node of 30 + would result in 64 IPs (/26). + + This config can disable the doubling of IPs (we still round + off to next power of 2) Example: max_pods_per_node of 30 + will result in 32 IPs (/27) when overprovisioning is + disabled. + additional_node_network_configs (MutableSequence[google.cloud.container_v1beta1.types.AdditionalNodeNetworkConfig]): + We specify the additional node networks for + this node pool using this list. Each node + network corresponds to an additional interface + additional_pod_network_configs (MutableSequence[google.cloud.container_v1beta1.types.AdditionalPodNetworkConfig]): + We specify the additional pod networks for + this node pool using this list. Each pod network + corresponds to an additional alias IP range for + the node + pod_ipv4_range_utilization (float): + Output only. [Output only] The utilization of the IPv4 range + for the pod. The ratio is Usage/[Total number of IPs in the + secondary range], Usage=numNodes\ *numZones*\ podIPsPerNode. + """ + + class NetworkPerformanceConfig(proto.Message): + r"""Configuration of all network bandwidth tiers + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + total_egress_bandwidth_tier (google.cloud.container_v1beta1.types.NodeNetworkConfig.NetworkPerformanceConfig.Tier): + Specifies the total network bandwidth tier + for the NodePool. + + This field is a member of `oneof`_ ``_total_egress_bandwidth_tier``. + external_ip_egress_bandwidth_tier (google.cloud.container_v1beta1.types.NodeNetworkConfig.NetworkPerformanceConfig.Tier): + Specifies the network bandwidth tier for the + NodePool for traffic to external/public IP + addresses. + + This field is a member of `oneof`_ ``_external_ip_egress_bandwidth_tier``. + """ + + class Tier(proto.Enum): + r"""Node network tier + + Values: + TIER_UNSPECIFIED (0): + Default value + TIER_1 (1): + Higher bandwidth, actual values based on VM + size. + """ + TIER_UNSPECIFIED = 0 + TIER_1 = 1 + + total_egress_bandwidth_tier: "NodeNetworkConfig.NetworkPerformanceConfig.Tier" = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum="NodeNetworkConfig.NetworkPerformanceConfig.Tier", + ) + external_ip_egress_bandwidth_tier: "NodeNetworkConfig.NetworkPerformanceConfig.Tier" = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum="NodeNetworkConfig.NetworkPerformanceConfig.Tier", + ) + + create_pod_range: bool = proto.Field( + proto.BOOL, + number=4, + ) + pod_range: str = proto.Field( + proto.STRING, + number=5, + ) + pod_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=6, + ) + enable_private_nodes: bool = proto.Field( + proto.BOOL, + number=9, + optional=True, + ) + network_performance_config: NetworkPerformanceConfig = proto.Field( + proto.MESSAGE, + number=11, + optional=True, + message=NetworkPerformanceConfig, + ) + pod_cidr_overprovision_config: "PodCIDROverprovisionConfig" = proto.Field( + proto.MESSAGE, + number=13, + message="PodCIDROverprovisionConfig", + ) + additional_node_network_configs: MutableSequence[ + "AdditionalNodeNetworkConfig" + ] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="AdditionalNodeNetworkConfig", + ) + additional_pod_network_configs: MutableSequence[ + "AdditionalPodNetworkConfig" + ] = proto.RepeatedField( + proto.MESSAGE, + number=15, + message="AdditionalPodNetworkConfig", + ) + pod_ipv4_range_utilization: float = proto.Field( + proto.DOUBLE, + number=16, + ) + + +class AdditionalNodeNetworkConfig(proto.Message): + r"""AdditionalNodeNetworkConfig is the configuration for + additional node networks within the NodeNetworkConfig message + + Attributes: + network (str): + Name of the VPC where the additional + interface belongs + subnetwork (str): + Name of the subnetwork where the additional + interface belongs + """ + + network: str = proto.Field( + proto.STRING, + number=1, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AdditionalPodNetworkConfig(proto.Message): + r"""AdditionalPodNetworkConfig is the configuration for + additional pod networks within the NodeNetworkConfig message + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + subnetwork (str): + Name of the subnetwork where the additional + pod network belongs + secondary_pod_range (str): + The name of the secondary range on the subnet + which provides IP address for this pod range + max_pods_per_node (google.cloud.container_v1beta1.types.MaxPodsConstraint): + The maximum number of pods per node which use + this pod network + + This field is a member of `oneof`_ ``_max_pods_per_node``. + """ + + subnetwork: str = proto.Field( + proto.STRING, + number=1, + ) + secondary_pod_range: str = proto.Field( + proto.STRING, + number=2, + ) + max_pods_per_node: "MaxPodsConstraint" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="MaxPodsConstraint", + ) + + +class ShieldedInstanceConfig(proto.Message): + r"""A set of Shielded Instance options. + + Attributes: + enable_secure_boot (bool): + Defines whether the instance has Secure Boot + enabled. + Secure Boot helps ensure that the system only + runs authentic software by verifying the digital + signature of all boot components, and halting + the boot process if signature verification + fails. + enable_integrity_monitoring (bool): + Defines whether the instance has integrity + monitoring enabled. + Enables monitoring and attestation of the boot + integrity of the instance. The attestation is + performed against the integrity policy baseline. + This baseline is initially derived from the + implicitly trusted boot image when the instance + is created. + """ + + enable_secure_boot: bool = proto.Field( + proto.BOOL, + number=1, + ) + enable_integrity_monitoring: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class SandboxConfig(proto.Message): + r"""SandboxConfig contains configurations of the sandbox to use + for the node. + + Attributes: + sandbox_type (str): + Type of the sandbox to use for the node (e.g. + 'gvisor') + type_ (google.cloud.container_v1beta1.types.SandboxConfig.Type): + Type of the sandbox to use for the node. + """ + + class Type(proto.Enum): + r"""Possible types of sandboxes. + + Values: + UNSPECIFIED (0): + Default value. This should not be used. + GVISOR (1): + Run sandbox using gvisor. + """ + UNSPECIFIED = 0 + GVISOR = 1 + + sandbox_type: str = proto.Field( + proto.STRING, + number=1, + ) + type_: Type = proto.Field( + proto.ENUM, + number=2, + enum=Type, + ) + + +class EphemeralStorageConfig(proto.Message): + r"""EphemeralStorageConfig contains configuration for the + ephemeral storage filesystem. + + Attributes: + local_ssd_count (int): + Number of local SSDs to use to back ephemeral + storage. Uses NVMe interfaces. Each local SSD is + 375 GB in size. If zero, it means to disable + using local SSDs as ephemeral storage. + """ + + local_ssd_count: int = proto.Field( + proto.INT32, + number=1, + ) + + +class LocalNvmeSsdBlockConfig(proto.Message): + r"""LocalNvmeSsdBlockConfig contains configuration for using + raw-block local NVMe SSDs + + Attributes: + local_ssd_count (int): + The number of raw-block local NVMe SSD disks + to be attached to the node. Each local SSD is + 375 GB in size. If zero, it means no raw-block + local NVMe SSD disks to be attached to the node. + The limit for this value is dependent upon the + maximum number of disks available on a machine + per zone. See: + + https://cloud.google.com/compute/docs/disks/local-ssd + for more information. + """ + + local_ssd_count: int = proto.Field( + proto.INT32, + number=1, + ) + + +class EphemeralStorageLocalSsdConfig(proto.Message): + r"""EphemeralStorageLocalSsdConfig contains configuration for the + node ephemeral storage using Local SSDs. + + Attributes: + local_ssd_count (int): + Number of local SSDs to use to back ephemeral + storage. Uses NVMe interfaces. Each local SSD is + 375 GB in size. If zero, it means to disable + using local SSDs as ephemeral storage. The limit + for this value is dependent upon the maximum + number of disks available on a machine per zone. + See: + + https://cloud.google.com/compute/docs/disks/local-ssd + for more information. + """ + + local_ssd_count: int = proto.Field( + proto.INT32, + number=1, + ) + + +class GcfsConfig(proto.Message): + r"""GcfsConfig contains configurations of Google Container File + System. + + Attributes: + enabled (bool): + Whether to use GCFS. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ReservationAffinity(proto.Message): + r"""`ReservationAffinity `__ + is the configuration of desired reservation which instances could + take capacity from. + + Attributes: + consume_reservation_type (google.cloud.container_v1beta1.types.ReservationAffinity.Type): + Corresponds to the type of reservation + consumption. + key (str): + Corresponds to the label key of a reservation resource. To + target a SPECIFIC_RESERVATION by name, specify + "compute.googleapis.com/reservation-name" as the key and + specify the name of your reservation as its value. + values (MutableSequence[str]): + Corresponds to the label value(s) of + reservation resource(s). + """ + + class Type(proto.Enum): + r"""Indicates whether to consume capacity from a reservation or + not. + + Values: + UNSPECIFIED (0): + Default value. This should not be used. + NO_RESERVATION (1): + Do not consume from any reserved capacity. + ANY_RESERVATION (2): + Consume any reservation available. + SPECIFIC_RESERVATION (3): + Must consume from a specific reservation. + Must specify key value fields for specifying the + reservations. + """ + UNSPECIFIED = 0 + NO_RESERVATION = 1 + ANY_RESERVATION = 2 + SPECIFIC_RESERVATION = 3 + + consume_reservation_type: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + key: str = proto.Field( + proto.STRING, + number=2, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class SoleTenantConfig(proto.Message): + r"""SoleTenantConfig contains the NodeAffinities to specify what + shared sole tenant node groups should back the node pool. + + Attributes: + node_affinities (MutableSequence[google.cloud.container_v1beta1.types.SoleTenantConfig.NodeAffinity]): + NodeAffinities used to match to a shared sole + tenant node group. + """ + + class NodeAffinity(proto.Message): + r"""Specifies the NodeAffinity key, values, and affinity operator + according to `shared sole tenant node group + affinities `__. + + Attributes: + key (str): + Key for NodeAffinity. + operator (google.cloud.container_v1beta1.types.SoleTenantConfig.NodeAffinity.Operator): + Operator for NodeAffinity. + values (MutableSequence[str]): + Values for NodeAffinity. + """ + + class Operator(proto.Enum): + r"""Operator allows user to specify affinity or anti-affinity for + the given key values. + + Values: + OPERATOR_UNSPECIFIED (0): + Invalid or unspecified affinity operator. + IN (1): + Affinity operator. + NOT_IN (2): + Anti-affinity operator. + """ + OPERATOR_UNSPECIFIED = 0 + IN = 1 + NOT_IN = 2 + + key: str = proto.Field( + proto.STRING, + number=1, + ) + operator: "SoleTenantConfig.NodeAffinity.Operator" = proto.Field( + proto.ENUM, + number=2, + enum="SoleTenantConfig.NodeAffinity.Operator", + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + node_affinities: MutableSequence[NodeAffinity] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=NodeAffinity, + ) + + +class HostMaintenancePolicy(proto.Message): + r"""HostMaintenancePolicy contains the maintenance policy for the + hosts on which the GKE VMs run on. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + maintenance_interval (google.cloud.container_v1beta1.types.HostMaintenancePolicy.MaintenanceInterval): + Specifies the frequency of planned + maintenance events. + + This field is a member of `oneof`_ ``_maintenance_interval``. + """ + + class MaintenanceInterval(proto.Enum): + r"""Allows selecting how infrastructure upgrades should be + applied to the cluster or node pool. + + Values: + MAINTENANCE_INTERVAL_UNSPECIFIED (0): + The maintenance interval is not explicitly + specified. + AS_NEEDED (1): + Nodes are eligible to receive infrastructure + and hypervisor updates as they become available. + This may result in more maintenance operations + (live migrations or terminations) for the node + than the PERIODIC option. + PERIODIC (2): + Nodes receive infrastructure and hypervisor updates on a + periodic basis, minimizing the number of maintenance + operations (live migrations or terminations) on an + individual VM. This may mean underlying VMs will take longer + to receive an update than if it was configured for + AS_NEEDED. Security updates will still be applied as soon as + they are available. + """ + MAINTENANCE_INTERVAL_UNSPECIFIED = 0 + AS_NEEDED = 1 + PERIODIC = 2 + + maintenance_interval: MaintenanceInterval = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=MaintenanceInterval, + ) + + +class NodeTaint(proto.Message): + r"""Kubernetes taint is composed of three fields: key, value, and + effect. Effect can only be one of three types: NoSchedule, + PreferNoSchedule or NoExecute. + + See + `here `__ + for more information, including usage and the valid values. + + Attributes: + key (str): + Key for taint. + value (str): + Value for taint. + effect (google.cloud.container_v1beta1.types.NodeTaint.Effect): + Effect for taint. + """ + + class Effect(proto.Enum): + r"""Possible values for Effect in taint. + + Values: + EFFECT_UNSPECIFIED (0): + Not set + NO_SCHEDULE (1): + NoSchedule + PREFER_NO_SCHEDULE (2): + PreferNoSchedule + NO_EXECUTE (3): + NoExecute + """ + EFFECT_UNSPECIFIED = 0 + NO_SCHEDULE = 1 + PREFER_NO_SCHEDULE = 2 + NO_EXECUTE = 3 + + key: str = proto.Field( + proto.STRING, + number=1, + ) + value: str = proto.Field( + proto.STRING, + number=2, + ) + effect: Effect = proto.Field( + proto.ENUM, + number=3, + enum=Effect, + ) + + +class NodeTaints(proto.Message): + r"""Collection of Kubernetes `node + taints `__. + + Attributes: + taints (MutableSequence[google.cloud.container_v1beta1.types.NodeTaint]): + List of node taints. + """ + + taints: MutableSequence["NodeTaint"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="NodeTaint", + ) + + +class NodeLabels(proto.Message): + r"""Collection of node-level `Kubernetes + labels `__. + + Attributes: + labels (MutableMapping[str, str]): + Map of node label keys and node label values. + """ + + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + +class ResourceLabels(proto.Message): + r"""Collection of `GCP + labels `__. + + Attributes: + labels (MutableMapping[str, str]): + Map of node label keys and node label values. + """ + + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + +class NetworkTags(proto.Message): + r"""Collection of Compute Engine network tags that can be applied to a + node's underlying VM instance. (See ``tags`` field in + ```NodeConfig`` `__). + + Attributes: + tags (MutableSequence[str]): + List of network tags. + """ + + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class MasterAuth(proto.Message): + r"""The authentication information for accessing the master + endpoint. Authentication can be done using HTTP basic auth or + using client certificates. + + Attributes: + username (str): + The username to use for HTTP basic + authentication to the master endpoint. For + clusters v1.6.0 and later, basic authentication + can be disabled by leaving username unspecified + (or setting it to the empty string). + + Warning: basic authentication is deprecated, and + will be removed in GKE control plane versions + 1.19 and newer. For a list of recommended + authentication methods, see: + + https://cloud.google.com/kubernetes-engine/docs/how-to/api-server-authentication + password (str): + The password to use for HTTP basic + authentication to the master endpoint. Because + the master endpoint is open to the Internet, you + should create a strong password. If a password + is provided for cluster creation, username must + be non-empty. + + Warning: basic authentication is deprecated, and + will be removed in GKE control plane versions + 1.19 and newer. For a list of recommended + authentication methods, see: + + https://cloud.google.com/kubernetes-engine/docs/how-to/api-server-authentication + client_certificate_config (google.cloud.container_v1beta1.types.ClientCertificateConfig): + Configuration for client certificate + authentication on the cluster. For clusters + before v1.12, if no configuration is specified, + a client certificate is issued. + cluster_ca_certificate (str): + + client_certificate (str): + [Output only] Base64-encoded public certificate used by + clients to authenticate to the cluster endpoint. + client_key (str): + [Output only] Base64-encoded private key used by clients to + authenticate to the cluster endpoint. + """ + + username: str = proto.Field( + proto.STRING, + number=1, + ) + password: str = proto.Field( + proto.STRING, + number=2, + ) + client_certificate_config: "ClientCertificateConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="ClientCertificateConfig", + ) + cluster_ca_certificate: str = proto.Field( + proto.STRING, + number=100, + ) + client_certificate: str = proto.Field( + proto.STRING, + number=101, + ) + client_key: str = proto.Field( + proto.STRING, + number=102, + ) + + +class ClientCertificateConfig(proto.Message): + r"""Configuration for client certificates on the cluster. + + Attributes: + issue_client_certificate (bool): + Issue a client certificate. + """ + + issue_client_certificate: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class AddonsConfig(proto.Message): + r"""Configuration for the addons that can be automatically spun + up in the cluster, enabling additional functionality. + + Attributes: + http_load_balancing (google.cloud.container_v1beta1.types.HttpLoadBalancing): + Configuration for the HTTP (L7) load + balancing controller addon, which makes it easy + to set up HTTP load balancers for services in a + cluster. + horizontal_pod_autoscaling (google.cloud.container_v1beta1.types.HorizontalPodAutoscaling): + Configuration for the horizontal pod + autoscaling feature, which increases or + decreases the number of replica pods a + replication controller has based on the resource + usage of the existing pods. + kubernetes_dashboard (google.cloud.container_v1beta1.types.KubernetesDashboard): + Configuration for the Kubernetes Dashboard. + This addon is deprecated, and will be disabled + in 1.15. It is recommended to use the Cloud + Console to manage and monitor your Kubernetes + clusters, workloads and applications. For more + information, see: + + https://cloud.google.com/kubernetes-engine/docs/concepts/dashboards + network_policy_config (google.cloud.container_v1beta1.types.NetworkPolicyConfig): + Configuration for NetworkPolicy. This only + tracks whether the addon is enabled or not on + the Master, it does not track whether network + policy is enabled for the nodes. + istio_config (google.cloud.container_v1beta1.types.IstioConfig): + Configuration for Istio, an open platform to + connect, manage, and secure microservices. + cloud_run_config (google.cloud.container_v1beta1.types.CloudRunConfig): + Configuration for the Cloud Run addon. The ``IstioConfig`` + addon must be enabled in order to enable Cloud Run addon. + This option can only be enabled at cluster creation time. + dns_cache_config (google.cloud.container_v1beta1.types.DnsCacheConfig): + Configuration for NodeLocalDNS, a dns cache + running on cluster nodes + config_connector_config (google.cloud.container_v1beta1.types.ConfigConnectorConfig): + Configuration for the ConfigConnector add-on, + a Kubernetes extension to manage hosted GCP + services through the Kubernetes API + gce_persistent_disk_csi_driver_config (google.cloud.container_v1beta1.types.GcePersistentDiskCsiDriverConfig): + Configuration for the Compute Engine + Persistent Disk CSI driver. + kalm_config (google.cloud.container_v1beta1.types.KalmConfig): + Configuration for the KALM addon, which + manages the lifecycle of k8s applications. + gcp_filestore_csi_driver_config (google.cloud.container_v1beta1.types.GcpFilestoreCsiDriverConfig): + Configuration for the GCP Filestore CSI + driver. + gke_backup_agent_config (google.cloud.container_v1beta1.types.GkeBackupAgentConfig): + Configuration for the Backup for GKE agent + addon. + gcs_fuse_csi_driver_config (google.cloud.container_v1beta1.types.GcsFuseCsiDriverConfig): + Configuration for the Cloud Storage Fuse CSI + driver. + """ + + http_load_balancing: "HttpLoadBalancing" = proto.Field( + proto.MESSAGE, + number=1, + message="HttpLoadBalancing", + ) + horizontal_pod_autoscaling: "HorizontalPodAutoscaling" = proto.Field( + proto.MESSAGE, + number=2, + message="HorizontalPodAutoscaling", + ) + kubernetes_dashboard: "KubernetesDashboard" = proto.Field( + proto.MESSAGE, + number=3, + message="KubernetesDashboard", + ) + network_policy_config: "NetworkPolicyConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="NetworkPolicyConfig", + ) + istio_config: "IstioConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="IstioConfig", + ) + cloud_run_config: "CloudRunConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="CloudRunConfig", + ) + dns_cache_config: "DnsCacheConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="DnsCacheConfig", + ) + config_connector_config: "ConfigConnectorConfig" = proto.Field( + proto.MESSAGE, + number=10, + message="ConfigConnectorConfig", + ) + gce_persistent_disk_csi_driver_config: "GcePersistentDiskCsiDriverConfig" = ( + proto.Field( + proto.MESSAGE, + number=11, + message="GcePersistentDiskCsiDriverConfig", + ) + ) + kalm_config: "KalmConfig" = proto.Field( + proto.MESSAGE, + number=12, + message="KalmConfig", + ) + gcp_filestore_csi_driver_config: "GcpFilestoreCsiDriverConfig" = proto.Field( + proto.MESSAGE, + number=14, + message="GcpFilestoreCsiDriverConfig", + ) + gke_backup_agent_config: "GkeBackupAgentConfig" = proto.Field( + proto.MESSAGE, + number=16, + message="GkeBackupAgentConfig", + ) + gcs_fuse_csi_driver_config: "GcsFuseCsiDriverConfig" = proto.Field( + proto.MESSAGE, + number=17, + message="GcsFuseCsiDriverConfig", + ) + + +class HttpLoadBalancing(proto.Message): + r"""Configuration options for the HTTP (L7) load balancing + controller addon, which makes it easy to set up HTTP load + balancers for services in a cluster. + + Attributes: + disabled (bool): + Whether the HTTP Load Balancing controller is + enabled in the cluster. When enabled, it runs a + small pod in the cluster that manages the load + balancers. + """ + + disabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class HorizontalPodAutoscaling(proto.Message): + r"""Configuration options for the horizontal pod autoscaling + feature, which increases or decreases the number of replica pods + a replication controller has based on the resource usage of the + existing pods. + + Attributes: + disabled (bool): + Whether the Horizontal Pod Autoscaling + feature is enabled in the cluster. When enabled, + it ensures that metrics are collected into + Stackdriver Monitoring. + """ + + disabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class KubernetesDashboard(proto.Message): + r"""Configuration for the Kubernetes Dashboard. + + Attributes: + disabled (bool): + Whether the Kubernetes Dashboard is enabled + for this cluster. + """ + + disabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class NetworkPolicyConfig(proto.Message): + r"""Configuration for NetworkPolicy. This only tracks whether the + addon is enabled or not on the Master, it does not track whether + network policy is enabled for the nodes. + + Attributes: + disabled (bool): + Whether NetworkPolicy is enabled for this + cluster. + """ + + disabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class DnsCacheConfig(proto.Message): + r"""Configuration for NodeLocal DNSCache + + Attributes: + enabled (bool): + Whether NodeLocal DNSCache is enabled for + this cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class KalmConfig(proto.Message): + r"""Configuration options for the KALM addon. + + Attributes: + enabled (bool): + Whether KALM is enabled for this cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class GkeBackupAgentConfig(proto.Message): + r"""Configuration for the Backup for GKE Agent. + + Attributes: + enabled (bool): + Whether the Backup for GKE agent is enabled + for this cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ConfigConnectorConfig(proto.Message): + r"""Configuration options for the Config Connector add-on. + + Attributes: + enabled (bool): + Whether Cloud Connector is enabled for this + cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class GcePersistentDiskCsiDriverConfig(proto.Message): + r"""Configuration for the Compute Engine PD CSI driver. + + Attributes: + enabled (bool): + Whether the Compute Engine PD CSI driver is + enabled for this cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class GcpFilestoreCsiDriverConfig(proto.Message): + r"""Configuration for the GCP Filestore CSI driver. + + Attributes: + enabled (bool): + Whether the GCP Filestore CSI driver is + enabled for this cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class GcsFuseCsiDriverConfig(proto.Message): + r"""Configuration for the Cloud Storage Fuse CSI driver. + + Attributes: + enabled (bool): + Whether the Cloud Storage Fuse CSI driver is + enabled for this cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class PrivateClusterMasterGlobalAccessConfig(proto.Message): + r"""Configuration for controlling master global access settings. + + Attributes: + enabled (bool): + Whenever master is accessible globally or + not. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class PrivateClusterConfig(proto.Message): + r"""Configuration options for private clusters. + + Attributes: + enable_private_nodes (bool): + Whether nodes have internal IP addresses + only. If enabled, all nodes are given only RFC + 1918 private addresses and communicate with the + master via private networking. + enable_private_endpoint (bool): + Whether the master's internal IP address is + used as the cluster endpoint. + master_ipv4_cidr_block (str): + The IP range in CIDR notation to use for the + hosted master network. This range will be used + for assigning internal IP addresses to the + master or set of masters, as well as the ILB + VIP. This range must not overlap with any other + ranges in use within the cluster's network. + private_endpoint (str): + Output only. The internal IP address of this + cluster's master endpoint. + public_endpoint (str): + Output only. The external IP address of this + cluster's master endpoint. + peering_name (str): + Output only. The peering name in the customer + VPC used by this cluster. + master_global_access_config (google.cloud.container_v1beta1.types.PrivateClusterMasterGlobalAccessConfig): + Controls master global access settings. + private_endpoint_subnetwork (str): + Subnet to provision the master's private endpoint during + cluster creation. Specified in + projects/\ */regions/*/subnetworks/\* format. + """ + + enable_private_nodes: bool = proto.Field( + proto.BOOL, + number=1, + ) + enable_private_endpoint: bool = proto.Field( + proto.BOOL, + number=2, + ) + master_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=3, + ) + private_endpoint: str = proto.Field( + proto.STRING, + number=4, + ) + public_endpoint: str = proto.Field( + proto.STRING, + number=5, + ) + peering_name: str = proto.Field( + proto.STRING, + number=7, + ) + master_global_access_config: "PrivateClusterMasterGlobalAccessConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="PrivateClusterMasterGlobalAccessConfig", + ) + private_endpoint_subnetwork: str = proto.Field( + proto.STRING, + number=10, + ) + + +class IstioConfig(proto.Message): + r"""Configuration options for Istio addon. + + Attributes: + disabled (bool): + Whether Istio is enabled for this cluster. + auth (google.cloud.container_v1beta1.types.IstioConfig.IstioAuthMode): + The specified Istio auth mode, either none, + or mutual TLS. + """ + + class IstioAuthMode(proto.Enum): + r"""Istio auth mode, + https://istio.io/docs/concepts/security/mutual-tls.html + + Values: + AUTH_NONE (0): + auth not enabled + AUTH_MUTUAL_TLS (1): + auth mutual TLS enabled + """ + AUTH_NONE = 0 + AUTH_MUTUAL_TLS = 1 + + disabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + auth: IstioAuthMode = proto.Field( + proto.ENUM, + number=2, + enum=IstioAuthMode, + ) + + +class CloudRunConfig(proto.Message): + r"""Configuration options for the Cloud Run feature. + + Attributes: + disabled (bool): + Whether Cloud Run addon is enabled for this + cluster. + load_balancer_type (google.cloud.container_v1beta1.types.CloudRunConfig.LoadBalancerType): + Which load balancer type is installed for + Cloud Run. + """ + + class LoadBalancerType(proto.Enum): + r"""Load balancer type of ingress service of Cloud Run. + + Values: + LOAD_BALANCER_TYPE_UNSPECIFIED (0): + Load balancer type for Cloud Run is + unspecified. + LOAD_BALANCER_TYPE_EXTERNAL (1): + Install external load balancer for Cloud Run. + LOAD_BALANCER_TYPE_INTERNAL (2): + Install internal load balancer for Cloud Run. + """ + LOAD_BALANCER_TYPE_UNSPECIFIED = 0 + LOAD_BALANCER_TYPE_EXTERNAL = 1 + LOAD_BALANCER_TYPE_INTERNAL = 2 + + disabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + load_balancer_type: LoadBalancerType = proto.Field( + proto.ENUM, + number=3, + enum=LoadBalancerType, + ) + + +class MasterAuthorizedNetworksConfig(proto.Message): + r"""Configuration options for the master authorized networks + feature. Enabled master authorized networks will disallow all + external traffic to access Kubernetes master through HTTPS + except traffic from the given CIDR blocks, Google Compute Engine + Public IPs and Google Prod IPs. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enabled (bool): + Whether or not master authorized networks is + enabled. + cidr_blocks (MutableSequence[google.cloud.container_v1beta1.types.MasterAuthorizedNetworksConfig.CidrBlock]): + cidr_blocks define up to 10 external networks that could + access Kubernetes master through HTTPS. + gcp_public_cidrs_access_enabled (bool): + Whether master is accessbile via Google + Compute Engine Public IP addresses. + + This field is a member of `oneof`_ ``_gcp_public_cidrs_access_enabled``. + """ + + class CidrBlock(proto.Message): + r"""CidrBlock contains an optional name and one CIDR block. + + Attributes: + display_name (str): + display_name is an optional field for users to identify CIDR + blocks. + cidr_block (str): + cidr_block must be specified in CIDR notation. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + cidr_block: str = proto.Field( + proto.STRING, + number=2, + ) + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + cidr_blocks: MutableSequence[CidrBlock] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=CidrBlock, + ) + gcp_public_cidrs_access_enabled: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + + +class LegacyAbac(proto.Message): + r"""Configuration for the legacy Attribute Based Access Control + authorization mode. + + Attributes: + enabled (bool): + Whether the ABAC authorizer is enabled for + this cluster. When enabled, identities in the + system, including service accounts, nodes, and + controllers, will have statically granted + permissions beyond those provided by the RBAC + configuration or IAM. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class NetworkPolicy(proto.Message): + r"""Configuration options for the NetworkPolicy feature. + https://kubernetes.io/docs/concepts/services-networking/networkpolicies/ + + Attributes: + provider (google.cloud.container_v1beta1.types.NetworkPolicy.Provider): + The selected network policy provider. + enabled (bool): + Whether network policy is enabled on the + cluster. + """ + + class Provider(proto.Enum): + r"""Allowed Network Policy providers. + + Values: + PROVIDER_UNSPECIFIED (0): + Not set + CALICO (1): + Tigera (Calico Felix). + """ + PROVIDER_UNSPECIFIED = 0 + CALICO = 1 + + provider: Provider = proto.Field( + proto.ENUM, + number=1, + enum=Provider, + ) + enabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class PodCIDROverprovisionConfig(proto.Message): + r"""[PRIVATE FIELD] Config for pod CIDR size overprovisioning. + + Attributes: + disable (bool): + Whether Pod CIDR overprovisioning is + disabled. Note: Pod CIDR overprovisioning is + enabled by default. + """ + + disable: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class IPAllocationPolicy(proto.Message): + r"""Configuration for controlling how IPs are allocated in the + cluster. + + Attributes: + use_ip_aliases (bool): + Whether alias IPs will be used for pod IPs in the cluster. + This is used in conjunction with use_routes. It cannot be + true if use_routes is true. If both use_ip_aliases and + use_routes are false, then the server picks the default IP + allocation mode + create_subnetwork (bool): + Whether a new subnetwork will be created automatically for + the cluster. + + This field is only applicable when ``use_ip_aliases`` is + true. + subnetwork_name (str): + A custom subnetwork name to be used if ``create_subnetwork`` + is true. If this field is empty, then an automatic name will + be chosen for the new subnetwork. + cluster_ipv4_cidr (str): + This field is deprecated, use cluster_ipv4_cidr_block. + node_ipv4_cidr (str): + This field is deprecated, use node_ipv4_cidr_block. + services_ipv4_cidr (str): + This field is deprecated, use services_ipv4_cidr_block. + cluster_secondary_range_name (str): + The name of the secondary range to be used for the cluster + CIDR block. The secondary range will be used for pod IP + addresses. This must be an existing secondary range + associated with the cluster subnetwork. + + This field is only applicable with use_ip_aliases and + create_subnetwork is false. + services_secondary_range_name (str): + The name of the secondary range to be used as for the + services CIDR block. The secondary range will be used for + service ClusterIPs. This must be an existing secondary range + associated with the cluster subnetwork. + + This field is only applicable with use_ip_aliases and + create_subnetwork is false. + cluster_ipv4_cidr_block (str): + The IP address range for the cluster pod IPs. If this field + is set, then ``cluster.cluster_ipv4_cidr`` must be left + blank. + + This field is only applicable when ``use_ip_aliases`` is + true. + + Set to blank to have a range chosen with the default size. + + Set to /netmask (e.g. ``/14``) to have a range chosen with a + specific netmask. + + Set to a + `CIDR `__ + notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private + networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, + ``192.168.0.0/16``) to pick a specific range to use. + node_ipv4_cidr_block (str): + The IP address range of the instance IPs in this cluster. + + This is applicable only if ``create_subnetwork`` is true. + + Set to blank to have a range chosen with the default size. + + Set to /netmask (e.g. ``/14``) to have a range chosen with a + specific netmask. + + Set to a + `CIDR `__ + notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private + networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, + ``192.168.0.0/16``) to pick a specific range to use. + services_ipv4_cidr_block (str): + The IP address range of the services IPs in this cluster. If + blank, a range will be automatically chosen with the default + size. + + This field is only applicable when ``use_ip_aliases`` is + true. + + Set to blank to have a range chosen with the default size. + + Set to /netmask (e.g. ``/14``) to have a range chosen with a + specific netmask. + + Set to a + `CIDR `__ + notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private + networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, + ``192.168.0.0/16``) to pick a specific range to use. + allow_route_overlap (bool): + If true, allow allocation of cluster CIDR ranges that + overlap with certain kinds of network routes. By default we + do not allow cluster CIDR ranges to intersect with any user + declared routes. With allow_route_overlap == true, we allow + overlapping with CIDR ranges that are larger than the + cluster CIDR range. + + If this field is set to true, then cluster and services + CIDRs must be fully-specified (e.g. ``10.96.0.0/14``, but + not ``/14``), which means: + + 1) When ``use_ip_aliases`` is true, + ``cluster_ipv4_cidr_block`` and + ``services_ipv4_cidr_block`` must be fully-specified. + 2) When ``use_ip_aliases`` is false, + ``cluster.cluster_ipv4_cidr`` muse be fully-specified. + tpu_ipv4_cidr_block (str): + The IP address range of the Cloud TPUs in this cluster. If + unspecified, a range will be automatically chosen with the + default size. + + This field is only applicable when ``use_ip_aliases`` is + true. + + If unspecified, the range will use the default size. + + Set to /netmask (e.g. ``/14``) to have a range chosen with a + specific netmask. + + Set to a + `CIDR `__ + notation (e.g. ``10.96.0.0/14``) from the RFC-1918 private + networks (e.g. ``10.0.0.0/8``, ``172.16.0.0/12``, + ``192.168.0.0/16``) to pick a specific range to use. This + field is deprecated, use cluster.tpu_config.ipv4_cidr_block + instead. + use_routes (bool): + Whether routes will be used for pod IPs in the cluster. This + is used in conjunction with use_ip_aliases. It cannot be + true if use_ip_aliases is true. If both use_ip_aliases and + use_routes are false, then the server picks the default IP + allocation mode + stack_type (google.cloud.container_v1beta1.types.IPAllocationPolicy.StackType): + IP stack type + ipv6_access_type (google.cloud.container_v1beta1.types.IPAllocationPolicy.IPv6AccessType): + The ipv6 access type (internal or external) when + create_subnetwork is true + pod_cidr_overprovision_config (google.cloud.container_v1beta1.types.PodCIDROverprovisionConfig): + [PRIVATE FIELD] Pod CIDR size overprovisioning config for + the cluster. + + Pod CIDR size per node depends on max_pods_per_node. By + default, the value of max_pods_per_node is doubled and then + rounded off to next power of 2 to get the size of pod CIDR + block per node. Example: max_pods_per_node of 30 would + result in 64 IPs (/26). + + This config can disable the doubling of IPs (we still round + off to next power of 2) Example: max_pods_per_node of 30 + will result in 32 IPs (/27) when overprovisioning is + disabled. + subnet_ipv6_cidr_block (str): + Output only. [Output only] The subnet's IPv6 CIDR block used + by nodes and pods. + services_ipv6_cidr_block (str): + Output only. [Output only] The services IPv6 CIDR block for + the cluster. + additional_pod_ranges_config (google.cloud.container_v1beta1.types.AdditionalPodRangesConfig): + Output only. [Output only] The additional pod ranges that + are added to the cluster. These pod ranges can be used by + new node pools to allocate pod IPs automatically. Once the + range is removed it will not show up in IPAllocationPolicy. + default_pod_ipv4_range_utilization (float): + Output only. [Output only] The utilization of the cluster + default IPv4 range for the pod. The ratio is Usage/[Total + number of IPs in the secondary range], + Usage=numNodes\ *numZones*\ podIPsPerNode. + """ + + class StackType(proto.Enum): + r"""Possible values for IP stack type + + Values: + STACK_TYPE_UNSPECIFIED (0): + By default, the clusters will be IPV4 only + IPV4 (1): + The value used if the cluster is a IPV4 only + IPV4_IPV6 (2): + The value used if the cluster is a dual stack + cluster + """ + STACK_TYPE_UNSPECIFIED = 0 + IPV4 = 1 + IPV4_IPV6 = 2 + + class IPv6AccessType(proto.Enum): + r"""IPv6 access type + + Values: + IPV6_ACCESS_TYPE_UNSPECIFIED (0): + Default value, will be defaulted as type + external. + INTERNAL (1): + Access type internal (all v6 addresses are + internal IPs) + EXTERNAL (2): + Access type external (all v6 addresses are + external IPs) + """ + IPV6_ACCESS_TYPE_UNSPECIFIED = 0 + INTERNAL = 1 + EXTERNAL = 2 + + use_ip_aliases: bool = proto.Field( + proto.BOOL, + number=1, + ) + create_subnetwork: bool = proto.Field( + proto.BOOL, + number=2, + ) + subnetwork_name: str = proto.Field( + proto.STRING, + number=3, + ) + cluster_ipv4_cidr: str = proto.Field( + proto.STRING, + number=4, + ) + node_ipv4_cidr: str = proto.Field( + proto.STRING, + number=5, + ) + services_ipv4_cidr: str = proto.Field( + proto.STRING, + number=6, + ) + cluster_secondary_range_name: str = proto.Field( + proto.STRING, + number=7, + ) + services_secondary_range_name: str = proto.Field( + proto.STRING, + number=8, + ) + cluster_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=9, + ) + node_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=10, + ) + services_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=11, + ) + allow_route_overlap: bool = proto.Field( + proto.BOOL, + number=12, + ) + tpu_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=13, + ) + use_routes: bool = proto.Field( + proto.BOOL, + number=15, + ) + stack_type: StackType = proto.Field( + proto.ENUM, + number=16, + enum=StackType, + ) + ipv6_access_type: IPv6AccessType = proto.Field( + proto.ENUM, + number=17, + enum=IPv6AccessType, + ) + pod_cidr_overprovision_config: "PodCIDROverprovisionConfig" = proto.Field( + proto.MESSAGE, + number=21, + message="PodCIDROverprovisionConfig", + ) + subnet_ipv6_cidr_block: str = proto.Field( + proto.STRING, + number=22, + ) + services_ipv6_cidr_block: str = proto.Field( + proto.STRING, + number=23, + ) + additional_pod_ranges_config: "AdditionalPodRangesConfig" = proto.Field( + proto.MESSAGE, + number=24, + message="AdditionalPodRangesConfig", + ) + default_pod_ipv4_range_utilization: float = proto.Field( + proto.DOUBLE, + number=25, + ) + + +class BinaryAuthorization(proto.Message): + r"""Configuration for Binary Authorization. + + Attributes: + enabled (bool): + This field is deprecated. Leave this unset and instead + configure BinaryAuthorization using evaluation_mode. If + evaluation_mode is set to anything other than + EVALUATION_MODE_UNSPECIFIED, this field is ignored. + evaluation_mode (google.cloud.container_v1beta1.types.BinaryAuthorization.EvaluationMode): + Mode of operation for binauthz policy + evaluation. If unspecified, defaults to + DISABLED. + policy_bindings (MutableSequence[google.cloud.container_v1beta1.types.BinaryAuthorization.PolicyBinding]): + Optional. Binauthz policies that apply to + this cluster. + """ + + class EvaluationMode(proto.Enum): + r"""Binary Authorization mode of operation. + + Values: + EVALUATION_MODE_UNSPECIFIED (0): + Default value + DISABLED (1): + Disable BinaryAuthorization + PROJECT_SINGLETON_POLICY_ENFORCE (2): + Enforce Kubernetes admission requests with + BinaryAuthorization using the project's + singleton policy. This is equivalent to setting + the enabled boolean to true. + POLICY_BINDINGS (5): + Use Binary Authorization with the policies specified in + policy_bindings. + POLICY_BINDINGS_AND_PROJECT_SINGLETON_POLICY_ENFORCE (6): + Use Binary Authorization with the policies specified in + policy_bindings, and also with the project's singleton + policy in enforcement mode. + """ + EVALUATION_MODE_UNSPECIFIED = 0 + DISABLED = 1 + PROJECT_SINGLETON_POLICY_ENFORCE = 2 + POLICY_BINDINGS = 5 + POLICY_BINDINGS_AND_PROJECT_SINGLETON_POLICY_ENFORCE = 6 + + class PolicyBinding(proto.Message): + r"""Binauthz policy that applies to this cluster. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The relative resource name of the binauthz platform policy + to audit. GKE platform policies have the following format: + ``projects/{project_number}/platforms/gke/policies/{policy_id}``. + + This field is a member of `oneof`_ ``_name``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + evaluation_mode: EvaluationMode = proto.Field( + proto.ENUM, + number=2, + enum=EvaluationMode, + ) + policy_bindings: MutableSequence[PolicyBinding] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=PolicyBinding, + ) + + +class PodSecurityPolicyConfig(proto.Message): + r"""Configuration for the PodSecurityPolicy feature. + + Attributes: + enabled (bool): + Enable the PodSecurityPolicy controller for + this cluster. If enabled, pods must be valid + under a PodSecurityPolicy to be created. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class AuthenticatorGroupsConfig(proto.Message): + r"""Configuration for returning group information from + authenticators. + + Attributes: + enabled (bool): + Whether this cluster should return group + membership lookups during authentication using a + group of security groups. + security_group (str): + The name of the security group-of-groups to + be used. Only relevant if enabled = true. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + security_group: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ClusterTelemetry(proto.Message): + r"""Telemetry integration for the cluster. + + Attributes: + type_ (google.cloud.container_v1beta1.types.ClusterTelemetry.Type): + Type of the integration. + """ + + class Type(proto.Enum): + r"""Type of the integration. + + Values: + UNSPECIFIED (0): + Not set. + DISABLED (1): + Monitoring integration is disabled. + ENABLED (2): + Monitoring integration is enabled. + SYSTEM_ONLY (3): + Only system components are monitored and + logged. + """ + UNSPECIFIED = 0 + DISABLED = 1 + ENABLED = 2 + SYSTEM_ONLY = 3 + + type_: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + + +class Cluster(proto.Message): + r"""A Google Kubernetes Engine cluster. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The name of this cluster. The name must be unique within + this project and location (e.g. zone or region), and can be + up to 40 characters with the following restrictions: + + - Lowercase letters, numbers, and hyphens only. + - Must start with a letter. + - Must end with a number or a letter. + description (str): + An optional description of this cluster. + initial_node_count (int): + The number of nodes to create in this cluster. You must + ensure that your Compute Engine `resource + quota `__ is + sufficient for this number of instances. You must also have + available firewall and routes quota. For requests, this + field should only be used in lieu of a "node_pool" object, + since this configuration (along with the "node_config") will + be used to create a "NodePool" object with an auto-generated + name. Do not use this and a node_pool at the same time. + + This field is deprecated, use node_pool.initial_node_count + instead. + node_config (google.cloud.container_v1beta1.types.NodeConfig): + Parameters used in creating the cluster's nodes. For + requests, this field should only be used in lieu of a + "node_pool" object, since this configuration (along with the + "initial_node_count") will be used to create a "NodePool" + object with an auto-generated name. Do not use this and a + node_pool at the same time. For responses, this field will + be populated with the node configuration of the first node + pool. (For configuration of each node pool, see + ``node_pool.config``) + + If unspecified, the defaults are used. This field is + deprecated, use node_pool.config instead. + master_auth (google.cloud.container_v1beta1.types.MasterAuth): + The authentication information for accessing the master + endpoint. If unspecified, the defaults are used: For + clusters before v1.12, if master_auth is unspecified, + ``username`` will be set to "admin", a random password will + be generated, and a client certificate will be issued. + logging_service (str): + The logging service the cluster should use to write logs. + Currently available options: + + - ``logging.googleapis.com/kubernetes`` - The Cloud Logging + service with a Kubernetes-native resource model + - ``logging.googleapis.com`` - The legacy Cloud Logging + service (no longer available as of GKE 1.15). + - ``none`` - no logs will be exported from the cluster. + + If left as an empty + string,\ ``logging.googleapis.com/kubernetes`` will be used + for GKE 1.14+ or ``logging.googleapis.com`` for earlier + versions. + monitoring_service (str): + The monitoring service the cluster should use to write + metrics. Currently available options: + + - "monitoring.googleapis.com/kubernetes" - The Cloud + Monitoring service with a Kubernetes-native resource + model + - ``monitoring.googleapis.com`` - The legacy Cloud + Monitoring service (no longer available as of GKE 1.15). + - ``none`` - No metrics will be exported from the cluster. + + If left as an empty + string,\ ``monitoring.googleapis.com/kubernetes`` will be + used for GKE 1.14+ or ``monitoring.googleapis.com`` for + earlier versions. + network (str): + The name of the Google Compute Engine + `network `__ + to which the cluster is connected. If left unspecified, the + ``default`` network will be used. On output this shows the + network ID instead of the name. + cluster_ipv4_cidr (str): + The IP address range of the container pods in this cluster, + in + `CIDR `__ + notation (e.g. ``10.96.0.0/14``). Leave blank to have one + automatically chosen or specify a ``/14`` block in + ``10.0.0.0/8``. + addons_config (google.cloud.container_v1beta1.types.AddonsConfig): + Configurations for the various addons + available to run in the cluster. + subnetwork (str): + The name of the Google Compute Engine + `subnetwork `__ + to which the cluster is connected. On output this shows the + subnetwork ID instead of the name. + node_pools (MutableSequence[google.cloud.container_v1beta1.types.NodePool]): + The node pools associated with this cluster. This field + should not be set if "node_config" or "initial_node_count" + are specified. + locations (MutableSequence[str]): + The list of Google Compute Engine + `zones `__ + in which the cluster's nodes should be located. + + This field provides a default value if + `NodePool.Locations `__ + are not specified during node pool creation. + + Warning: changing cluster locations will update the + `NodePool.Locations `__ + of all node pools and will result in nodes being added + and/or removed. + enable_kubernetes_alpha (bool): + Kubernetes alpha features are enabled on this + cluster. This includes alpha API groups (e.g. + v1beta1) and features that may not be production + ready in the kubernetes version of the master + and nodes. The cluster has no SLA for uptime and + master/node upgrades are disabled. Alpha enabled + clusters are automatically deleted thirty days + after creation. + enable_k8s_beta_apis (google.cloud.container_v1beta1.types.K8sBetaAPIConfig): + Kubernetes open source beta apis enabled on + the cluster. Only beta apis. + resource_labels (MutableMapping[str, str]): + The resource labels for the cluster to use to + annotate any related Google Compute Engine + resources. + label_fingerprint (str): + The fingerprint of the set of labels for this + cluster. + legacy_abac (google.cloud.container_v1beta1.types.LegacyAbac): + Configuration for the legacy ABAC + authorization mode. + network_policy (google.cloud.container_v1beta1.types.NetworkPolicy): + Configuration options for the NetworkPolicy + feature. + ip_allocation_policy (google.cloud.container_v1beta1.types.IPAllocationPolicy): + Configuration for cluster IP allocation. + master_authorized_networks_config (google.cloud.container_v1beta1.types.MasterAuthorizedNetworksConfig): + The configuration options for master + authorized networks feature. + maintenance_policy (google.cloud.container_v1beta1.types.MaintenancePolicy): + Configure the maintenance policy for this + cluster. + binary_authorization (google.cloud.container_v1beta1.types.BinaryAuthorization): + Configuration for Binary Authorization. + pod_security_policy_config (google.cloud.container_v1beta1.types.PodSecurityPolicyConfig): + Configuration for the PodSecurityPolicy + feature. + autoscaling (google.cloud.container_v1beta1.types.ClusterAutoscaling): + Cluster-level autoscaling configuration. + network_config (google.cloud.container_v1beta1.types.NetworkConfig): + Configuration for cluster networking. + private_cluster (bool): + If this is a private cluster setup. Private clusters are + clusters that, by default have no external IP addresses on + the nodes and where nodes and the master communicate over + private IP addresses. This field is deprecated, use + private_cluster_config.enable_private_nodes instead. + master_ipv4_cidr_block (str): + The IP prefix in CIDR notation to use for the hosted master + network. This prefix will be used for assigning private IP + addresses to the master or set of masters, as well as the + ILB VIP. This field is deprecated, use + private_cluster_config.master_ipv4_cidr_block instead. + default_max_pods_constraint (google.cloud.container_v1beta1.types.MaxPodsConstraint): + The default constraint on the maximum number + of pods that can be run simultaneously on a node + in the node pool of this cluster. Only honored + if cluster created with IP Alias support. + resource_usage_export_config (google.cloud.container_v1beta1.types.ResourceUsageExportConfig): + Configuration for exporting resource usages. + Resource usage export is disabled when this + config unspecified. + authenticator_groups_config (google.cloud.container_v1beta1.types.AuthenticatorGroupsConfig): + Configuration controlling RBAC group + membership information. + private_cluster_config (google.cloud.container_v1beta1.types.PrivateClusterConfig): + Configuration for private cluster. + vertical_pod_autoscaling (google.cloud.container_v1beta1.types.VerticalPodAutoscaling): + Cluster-level Vertical Pod Autoscaling + configuration. + shielded_nodes (google.cloud.container_v1beta1.types.ShieldedNodes): + Shielded Nodes configuration. + release_channel (google.cloud.container_v1beta1.types.ReleaseChannel): + Release channel configuration. If left + unspecified on cluster creation and a version is + specified, the cluster is enrolled in the most + mature release channel where the version is + available (first checking STABLE, then REGULAR, + and finally RAPID). Otherwise, if no release + channel configuration and no version is + specified, the cluster is enrolled in the + REGULAR channel with its default version. + workload_identity_config (google.cloud.container_v1beta1.types.WorkloadIdentityConfig): + Configuration for the use of Kubernetes + Service Accounts in GCP IAM policies. + workload_certificates (google.cloud.container_v1beta1.types.WorkloadCertificates): + Configuration for issuance of mTLS keys and + certificates to Kubernetes pods. + mesh_certificates (google.cloud.container_v1beta1.types.MeshCertificates): + Configuration for issuance of mTLS keys and + certificates to Kubernetes pods. + workload_alts_config (google.cloud.container_v1beta1.types.WorkloadALTSConfig): + Configuration for direct-path (via ALTS) with + workload identity. + cost_management_config (google.cloud.container_v1beta1.types.CostManagementConfig): + Configuration for the fine-grained cost + management feature. + cluster_telemetry (google.cloud.container_v1beta1.types.ClusterTelemetry): + Telemetry integration for the cluster. + tpu_config (google.cloud.container_v1beta1.types.TpuConfig): + Configuration for Cloud TPU support; + notification_config (google.cloud.container_v1beta1.types.NotificationConfig): + Notification configuration of the cluster. + confidential_nodes (google.cloud.container_v1beta1.types.ConfidentialNodes): + Configuration of Confidential Nodes. + All the nodes in the cluster will be + Confidential VM once enabled. + identity_service_config (google.cloud.container_v1beta1.types.IdentityServiceConfig): + Configuration for Identity Service component. + self_link (str): + [Output only] Server-defined URL for the resource. + zone (str): + [Output only] The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field is deprecated, use + location instead. + endpoint (str): + [Output only] The IP address of this cluster's master + endpoint. The endpoint can be accessed from the internet at + ``https://username:password@endpoint/``. + + See the ``masterAuth`` property of this resource for + username and password information. + initial_cluster_version (str): + The initial Kubernetes version for this + cluster. Valid versions are those found in + validMasterVersions returned by getServerConfig. + The version can be upgraded over time; such + upgrades are reflected in currentMasterVersion + and currentNodeVersion. + + Users may specify either explicit versions + offered by Kubernetes Engine or version aliases, + which have the following behavior: + + - "latest": picks the highest valid Kubernetes + version + - "1.X": picks the highest valid patch+gke.N + patch in the 1.X version + - "1.X.Y": picks the highest valid gke.N patch + in the 1.X.Y version + - "1.X.Y-gke.N": picks an explicit Kubernetes + version + - "","-": picks the default Kubernetes version + current_master_version (str): + [Output only] The current software version of the master + endpoint. + current_node_version (str): + [Output only] Deprecated, use + `NodePool.version `__ + instead. The current version of the node software + components. If they are currently at multiple versions + because they're in the process of being upgraded, this + reflects the minimum version of all nodes. + create_time (str): + [Output only] The time the cluster was created, in + `RFC3339 `__ text + format. + status (google.cloud.container_v1beta1.types.Cluster.Status): + [Output only] The current status of this cluster. + status_message (str): + [Output only] Deprecated. Use conditions instead. Additional + information about the current status of this cluster, if + available. + node_ipv4_cidr_size (int): + [Output only] The size of the address space on each node for + hosting containers. This is provisioned from within the + ``container_ipv4_cidr`` range. This field will only be set + when cluster is in route-based network mode. + services_ipv4_cidr (str): + [Output only] The IP address range of the Kubernetes + services in this cluster, in + `CIDR `__ + notation (e.g. ``1.2.3.4/29``). Service addresses are + typically put in the last ``/16`` from the container CIDR. + instance_group_urls (MutableSequence[str]): + Deprecated. Use node_pools.instance_group_urls. + current_node_count (int): + [Output only] The number of nodes currently in the cluster. + Deprecated. Call Kubernetes API directly to retrieve node + information. + expire_time (str): + [Output only] The time the cluster will be automatically + deleted in + `RFC3339 `__ text + format. + location (str): + [Output only] The name of the Google Compute Engine + `zone `__ + or + `region `__ + in which the cluster resides. + enable_tpu (bool): + Enable the ability to use Cloud TPUs in this cluster. This + field is deprecated, use tpu_config.enabled instead. + tpu_ipv4_cidr_block (str): + [Output only] The IP address range of the Cloud TPUs in this + cluster, in + `CIDR `__ + notation (e.g. ``1.2.3.4/29``). + database_encryption (google.cloud.container_v1beta1.types.DatabaseEncryption): + Configuration of etcd encryption. + conditions (MutableSequence[google.cloud.container_v1beta1.types.StatusCondition]): + Which conditions caused the current cluster + state. + master (google.cloud.container_v1beta1.types.Master): + Configuration for master components. + autopilot (google.cloud.container_v1beta1.types.Autopilot): + Autopilot configuration for the cluster. + id (str): + Output only. Unique id for the cluster. + node_pool_defaults (google.cloud.container_v1beta1.types.NodePoolDefaults): + Default NodePool settings for the entire + cluster. These settings are overridden if + specified on the specific NodePool object. + + This field is a member of `oneof`_ ``_node_pool_defaults``. + logging_config (google.cloud.container_v1beta1.types.LoggingConfig): + Logging configuration for the cluster. + monitoring_config (google.cloud.container_v1beta1.types.MonitoringConfig): + Monitoring configuration for the cluster. + node_pool_auto_config (google.cloud.container_v1beta1.types.NodePoolAutoConfig): + Node pool configs that apply to all + auto-provisioned node pools in autopilot + clusters and node auto-provisioning enabled + clusters. + protect_config (google.cloud.container_v1beta1.types.ProtectConfig): + Deprecated: Use SecurityPostureConfig + instead. Enable/Disable Protect API features for + the cluster. + + This field is a member of `oneof`_ ``_protect_config``. + etag (str): + This checksum is computed by the server based + on the value of cluster fields, and may be sent + on update requests to ensure the client has an + up-to-date value before proceeding. + fleet (google.cloud.container_v1beta1.types.Fleet): + Fleet information for the cluster. + security_posture_config (google.cloud.container_v1beta1.types.SecurityPostureConfig): + Enable/Disable Security Posture API features + for the cluster. + """ + + class Status(proto.Enum): + r"""The current status of the cluster. + + Values: + STATUS_UNSPECIFIED (0): + Not set. + PROVISIONING (1): + The PROVISIONING state indicates the cluster + is being created. + RUNNING (2): + The RUNNING state indicates the cluster has + been created and is fully usable. + RECONCILING (3): + The RECONCILING state indicates that some work is actively + being done on the cluster, such as upgrading the master or + node software. Details can be found in the ``statusMessage`` + field. + STOPPING (4): + The STOPPING state indicates the cluster is + being deleted. + ERROR (5): + The ERROR state indicates the cluster may be unusable. + Details can be found in the ``statusMessage`` field. + DEGRADED (6): + The DEGRADED state indicates the cluster requires user + action to restore full functionality. Details can be found + in the ``statusMessage`` field. + """ + STATUS_UNSPECIFIED = 0 + PROVISIONING = 1 + RUNNING = 2 + RECONCILING = 3 + STOPPING = 4 + ERROR = 5 + DEGRADED = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + initial_node_count: int = proto.Field( + proto.INT32, + number=3, + ) + node_config: "NodeConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="NodeConfig", + ) + master_auth: "MasterAuth" = proto.Field( + proto.MESSAGE, + number=5, + message="MasterAuth", + ) + logging_service: str = proto.Field( + proto.STRING, + number=6, + ) + monitoring_service: str = proto.Field( + proto.STRING, + number=7, + ) + network: str = proto.Field( + proto.STRING, + number=8, + ) + cluster_ipv4_cidr: str = proto.Field( + proto.STRING, + number=9, + ) + addons_config: "AddonsConfig" = proto.Field( + proto.MESSAGE, + number=10, + message="AddonsConfig", + ) + subnetwork: str = proto.Field( + proto.STRING, + number=11, + ) + node_pools: MutableSequence["NodePool"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="NodePool", + ) + locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + enable_kubernetes_alpha: bool = proto.Field( + proto.BOOL, + number=14, + ) + enable_k8s_beta_apis: "K8sBetaAPIConfig" = proto.Field( + proto.MESSAGE, + number=143, + message="K8sBetaAPIConfig", + ) + resource_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=15, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=16, + ) + legacy_abac: "LegacyAbac" = proto.Field( + proto.MESSAGE, + number=18, + message="LegacyAbac", + ) + network_policy: "NetworkPolicy" = proto.Field( + proto.MESSAGE, + number=19, + message="NetworkPolicy", + ) + ip_allocation_policy: "IPAllocationPolicy" = proto.Field( + proto.MESSAGE, + number=20, + message="IPAllocationPolicy", + ) + master_authorized_networks_config: "MasterAuthorizedNetworksConfig" = proto.Field( + proto.MESSAGE, + number=22, + message="MasterAuthorizedNetworksConfig", + ) + maintenance_policy: "MaintenancePolicy" = proto.Field( + proto.MESSAGE, + number=23, + message="MaintenancePolicy", + ) + binary_authorization: "BinaryAuthorization" = proto.Field( + proto.MESSAGE, + number=24, + message="BinaryAuthorization", + ) + pod_security_policy_config: "PodSecurityPolicyConfig" = proto.Field( + proto.MESSAGE, + number=25, + message="PodSecurityPolicyConfig", + ) + autoscaling: "ClusterAutoscaling" = proto.Field( + proto.MESSAGE, + number=26, + message="ClusterAutoscaling", + ) + network_config: "NetworkConfig" = proto.Field( + proto.MESSAGE, + number=27, + message="NetworkConfig", + ) + private_cluster: bool = proto.Field( + proto.BOOL, + number=28, + ) + master_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=29, + ) + default_max_pods_constraint: "MaxPodsConstraint" = proto.Field( + proto.MESSAGE, + number=30, + message="MaxPodsConstraint", + ) + resource_usage_export_config: "ResourceUsageExportConfig" = proto.Field( + proto.MESSAGE, + number=33, + message="ResourceUsageExportConfig", + ) + authenticator_groups_config: "AuthenticatorGroupsConfig" = proto.Field( + proto.MESSAGE, + number=34, + message="AuthenticatorGroupsConfig", + ) + private_cluster_config: "PrivateClusterConfig" = proto.Field( + proto.MESSAGE, + number=37, + message="PrivateClusterConfig", + ) + vertical_pod_autoscaling: "VerticalPodAutoscaling" = proto.Field( + proto.MESSAGE, + number=39, + message="VerticalPodAutoscaling", + ) + shielded_nodes: "ShieldedNodes" = proto.Field( + proto.MESSAGE, + number=40, + message="ShieldedNodes", + ) + release_channel: "ReleaseChannel" = proto.Field( + proto.MESSAGE, + number=41, + message="ReleaseChannel", + ) + workload_identity_config: "WorkloadIdentityConfig" = proto.Field( + proto.MESSAGE, + number=43, + message="WorkloadIdentityConfig", + ) + workload_certificates: "WorkloadCertificates" = proto.Field( + proto.MESSAGE, + number=52, + message="WorkloadCertificates", + ) + mesh_certificates: "MeshCertificates" = proto.Field( + proto.MESSAGE, + number=67, + message="MeshCertificates", + ) + workload_alts_config: "WorkloadALTSConfig" = proto.Field( + proto.MESSAGE, + number=53, + message="WorkloadALTSConfig", + ) + cost_management_config: "CostManagementConfig" = proto.Field( + proto.MESSAGE, + number=45, + message="CostManagementConfig", + ) + cluster_telemetry: "ClusterTelemetry" = proto.Field( + proto.MESSAGE, + number=46, + message="ClusterTelemetry", + ) + tpu_config: "TpuConfig" = proto.Field( + proto.MESSAGE, + number=47, + message="TpuConfig", + ) + notification_config: "NotificationConfig" = proto.Field( + proto.MESSAGE, + number=49, + message="NotificationConfig", + ) + confidential_nodes: "ConfidentialNodes" = proto.Field( + proto.MESSAGE, + number=50, + message="ConfidentialNodes", + ) + identity_service_config: "IdentityServiceConfig" = proto.Field( + proto.MESSAGE, + number=54, + message="IdentityServiceConfig", + ) + self_link: str = proto.Field( + proto.STRING, + number=100, + ) + zone: str = proto.Field( + proto.STRING, + number=101, + ) + endpoint: str = proto.Field( + proto.STRING, + number=102, + ) + initial_cluster_version: str = proto.Field( + proto.STRING, + number=103, + ) + current_master_version: str = proto.Field( + proto.STRING, + number=104, + ) + current_node_version: str = proto.Field( + proto.STRING, + number=105, + ) + create_time: str = proto.Field( + proto.STRING, + number=106, + ) + status: Status = proto.Field( + proto.ENUM, + number=107, + enum=Status, + ) + status_message: str = proto.Field( + proto.STRING, + number=108, + ) + node_ipv4_cidr_size: int = proto.Field( + proto.INT32, + number=109, + ) + services_ipv4_cidr: str = proto.Field( + proto.STRING, + number=110, + ) + instance_group_urls: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=111, + ) + current_node_count: int = proto.Field( + proto.INT32, + number=112, + ) + expire_time: str = proto.Field( + proto.STRING, + number=113, + ) + location: str = proto.Field( + proto.STRING, + number=114, + ) + enable_tpu: bool = proto.Field( + proto.BOOL, + number=115, + ) + tpu_ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=116, + ) + database_encryption: "DatabaseEncryption" = proto.Field( + proto.MESSAGE, + number=38, + message="DatabaseEncryption", + ) + conditions: MutableSequence["StatusCondition"] = proto.RepeatedField( + proto.MESSAGE, + number=118, + message="StatusCondition", + ) + master: "Master" = proto.Field( + proto.MESSAGE, + number=124, + message="Master", + ) + autopilot: "Autopilot" = proto.Field( + proto.MESSAGE, + number=128, + message="Autopilot", + ) + id: str = proto.Field( + proto.STRING, + number=129, + ) + node_pool_defaults: "NodePoolDefaults" = proto.Field( + proto.MESSAGE, + number=131, + optional=True, + message="NodePoolDefaults", + ) + logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=132, + message="LoggingConfig", + ) + monitoring_config: "MonitoringConfig" = proto.Field( + proto.MESSAGE, + number=133, + message="MonitoringConfig", + ) + node_pool_auto_config: "NodePoolAutoConfig" = proto.Field( + proto.MESSAGE, + number=136, + message="NodePoolAutoConfig", + ) + protect_config: "ProtectConfig" = proto.Field( + proto.MESSAGE, + number=137, + optional=True, + message="ProtectConfig", + ) + etag: str = proto.Field( + proto.STRING, + number=139, + ) + fleet: "Fleet" = proto.Field( + proto.MESSAGE, + number=140, + message="Fleet", + ) + security_posture_config: "SecurityPostureConfig" = proto.Field( + proto.MESSAGE, + number=145, + message="SecurityPostureConfig", + ) + + +class K8sBetaAPIConfig(proto.Message): + r"""Kubernetes open source beta apis enabled on the cluster. + + Attributes: + enabled_apis (MutableSequence[str]): + api name, e.g. + storage.k8s.io/v1beta1/csistoragecapacities. + """ + + enabled_apis: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class WorkloadConfig(proto.Message): + r"""WorkloadConfig defines the flags to enable or disable the + workload configurations for the cluster. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + audit_mode (google.cloud.container_v1beta1.types.WorkloadConfig.Mode): + Sets which mode of auditing should be used + for the cluster's workloads. + + This field is a member of `oneof`_ ``_audit_mode``. + """ + + class Mode(proto.Enum): + r"""Mode defines how to audit the workload configs. + + Values: + MODE_UNSPECIFIED (0): + Default value meaning that no mode has been + specified. + DISABLED (1): + This disables Workload Configuration auditing + on the cluster, meaning that nothing is + surfaced. + BASIC (4): + Applies the default set of policy auditing to + a cluster's workloads. + BASELINE (2): + Surfaces configurations that are not in line + with the Pod Security Standard Baseline policy. + RESTRICTED (3): + Surfaces configurations that are not in line + with the Pod Security Standard Restricted + policy. + """ + MODE_UNSPECIFIED = 0 + DISABLED = 1 + BASIC = 4 + BASELINE = 2 + RESTRICTED = 3 + + audit_mode: Mode = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=Mode, + ) + + +class ProtectConfig(proto.Message): + r"""ProtectConfig defines the flags needed to enable/disable + features for the Protect API. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + workload_config (google.cloud.container_v1beta1.types.WorkloadConfig): + WorkloadConfig defines which actions are + enabled for a cluster's workload configurations. + + This field is a member of `oneof`_ ``_workload_config``. + workload_vulnerability_mode (google.cloud.container_v1beta1.types.ProtectConfig.WorkloadVulnerabilityMode): + Sets which mode to use for Protect workload + vulnerability scanning feature. + + This field is a member of `oneof`_ ``_workload_vulnerability_mode``. + """ + + class WorkloadVulnerabilityMode(proto.Enum): + r"""WorkloadVulnerabilityMode defines mode to perform + vulnerability scanning. + + Values: + WORKLOAD_VULNERABILITY_MODE_UNSPECIFIED (0): + Default value not specified. + DISABLED (1): + Disables Workload Vulnerability Scanning + feature on the cluster. + BASIC (2): + Applies basic vulnerability scanning settings + for cluster workloads. + """ + WORKLOAD_VULNERABILITY_MODE_UNSPECIFIED = 0 + DISABLED = 1 + BASIC = 2 + + workload_config: "WorkloadConfig" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="WorkloadConfig", + ) + workload_vulnerability_mode: WorkloadVulnerabilityMode = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum=WorkloadVulnerabilityMode, + ) + + +class SecurityPostureConfig(proto.Message): + r"""SecurityPostureConfig defines the flags needed to + enable/disable features for the Security Posture API. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + mode (google.cloud.container_v1beta1.types.SecurityPostureConfig.Mode): + Sets which mode to use for Security Posture + features. + + This field is a member of `oneof`_ ``_mode``. + vulnerability_mode (google.cloud.container_v1beta1.types.SecurityPostureConfig.VulnerabilityMode): + Sets which mode to use for vulnerability + scanning. + + This field is a member of `oneof`_ ``_vulnerability_mode``. + """ + + class Mode(proto.Enum): + r"""Mode defines enablement mode for GKE Security posture + features. + + Values: + MODE_UNSPECIFIED (0): + Default value not specified. + DISABLED (1): + Disables Security Posture features on the + cluster. + BASIC (2): + Applies Security Posture features on the + cluster. + """ + MODE_UNSPECIFIED = 0 + DISABLED = 1 + BASIC = 2 + + class VulnerabilityMode(proto.Enum): + r"""VulnerabilityMode defines enablement mode for vulnerability + scanning. + + Values: + VULNERABILITY_MODE_UNSPECIFIED (0): + Default value not specified. + VULNERABILITY_DISABLED (1): + Disables vulnerability scanning on the + cluster. + VULNERABILITY_BASIC (2): + Applies basic vulnerability scanning on the + cluster. + VULNERABILITY_ENTERPRISE (3): + Applies the Security Posture's vulnerability + on cluster Enterprise level features. + """ + VULNERABILITY_MODE_UNSPECIFIED = 0 + VULNERABILITY_DISABLED = 1 + VULNERABILITY_BASIC = 2 + VULNERABILITY_ENTERPRISE = 3 + + mode: Mode = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=Mode, + ) + vulnerability_mode: VulnerabilityMode = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum=VulnerabilityMode, + ) + + +class NodePoolDefaults(proto.Message): + r"""Subset of Nodepool message that has defaults. + + Attributes: + node_config_defaults (google.cloud.container_v1beta1.types.NodeConfigDefaults): + Subset of NodeConfig message that has + defaults. + """ + + node_config_defaults: "NodeConfigDefaults" = proto.Field( + proto.MESSAGE, + number=1, + message="NodeConfigDefaults", + ) + + +class NodeConfigDefaults(proto.Message): + r"""Subset of NodeConfig message that has defaults. + + Attributes: + gcfs_config (google.cloud.container_v1beta1.types.GcfsConfig): + GCFS (Google Container File System, also + known as Riptide) options. + logging_config (google.cloud.container_v1beta1.types.NodePoolLoggingConfig): + Logging configuration for node pools. + host_maintenance_policy (google.cloud.container_v1beta1.types.HostMaintenancePolicy): + HostMaintenancePolicy contains the desired + maintenance policy for the Google Compute Engine + hosts. + """ + + gcfs_config: "GcfsConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="GcfsConfig", + ) + logging_config: "NodePoolLoggingConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="NodePoolLoggingConfig", + ) + host_maintenance_policy: "HostMaintenancePolicy" = proto.Field( + proto.MESSAGE, + number=5, + message="HostMaintenancePolicy", + ) + + +class NodePoolAutoConfig(proto.Message): + r"""node pool configs that apply to all auto-provisioned node + pools in autopilot clusters and node auto-provisioning enabled + clusters + + Attributes: + network_tags (google.cloud.container_v1beta1.types.NetworkTags): + The list of instance tags applied to all + nodes. Tags are used to identify valid sources + or targets for network firewalls and are + specified by the client during cluster creation. + Each tag within the list must comply with + RFC1035. + """ + + network_tags: "NetworkTags" = proto.Field( + proto.MESSAGE, + number=1, + message="NetworkTags", + ) + + +class ClusterUpdate(proto.Message): + r"""ClusterUpdate describes an update to the cluster. Exactly one + update can be applied to a cluster with each request, so at most + one field can be provided. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + desired_node_version (str): + The Kubernetes version to change the nodes to + (typically an upgrade). + + Users may specify either explicit versions + offered by Kubernetes Engine or version aliases, + which have the following behavior: + + - "latest": picks the highest valid Kubernetes + version + - "1.X": picks the highest valid patch+gke.N + patch in the 1.X version + - "1.X.Y": picks the highest valid gke.N patch + in the 1.X.Y version + - "1.X.Y-gke.N": picks an explicit Kubernetes + version + - "-": picks the Kubernetes master version + desired_monitoring_service (str): + The monitoring service the cluster should use to write + metrics. Currently available options: + + - "monitoring.googleapis.com/kubernetes" - The Cloud + Monitoring service with a Kubernetes-native resource + model + - ``monitoring.googleapis.com`` - The legacy Cloud + Monitoring service (no longer available as of GKE 1.15). + - ``none`` - No metrics will be exported from the cluster. + + If left as an empty + string,\ ``monitoring.googleapis.com/kubernetes`` will be + used for GKE 1.14+ or ``monitoring.googleapis.com`` for + earlier versions. + desired_addons_config (google.cloud.container_v1beta1.types.AddonsConfig): + Configurations for the various addons + available to run in the cluster. + desired_node_pool_id (str): + The node pool to be upgraded. This field is mandatory if + "desired_node_version", "desired_image_family", + "desired_node_pool_autoscaling", or + "desired_workload_metadata_config" is specified and there is + more than one node pool on the cluster. + desired_image_type (str): + The desired image type for the node pool. NOTE: Set the + "desired_node_pool" field as well. + desired_node_pool_autoscaling (google.cloud.container_v1beta1.types.NodePoolAutoscaling): + Autoscaler configuration for the node pool specified in + desired_node_pool_id. If there is only one pool in the + cluster and desired_node_pool_id is not provided then the + change applies to that single node pool. + desired_locations (MutableSequence[str]): + The desired list of Google Compute Engine + `zones `__ + in which the cluster's nodes should be located. + + This list must always include the cluster's primary zone. + + Warning: changing cluster locations will update the + locations of all node pools and will result in nodes being + added and/or removed. + desired_master_authorized_networks_config (google.cloud.container_v1beta1.types.MasterAuthorizedNetworksConfig): + The desired configuration options for master + authorized networks feature. + desired_pod_security_policy_config (google.cloud.container_v1beta1.types.PodSecurityPolicyConfig): + The desired configuration options for the + PodSecurityPolicy feature. + desired_cluster_autoscaling (google.cloud.container_v1beta1.types.ClusterAutoscaling): + Cluster-level autoscaling configuration. + desired_binary_authorization (google.cloud.container_v1beta1.types.BinaryAuthorization): + The desired configuration options for the + Binary Authorization feature. + desired_logging_service (str): + The logging service the cluster should use to write logs. + Currently available options: + + - ``logging.googleapis.com/kubernetes`` - The Cloud Logging + service with a Kubernetes-native resource model + - ``logging.googleapis.com`` - The legacy Cloud Logging + service (no longer available as of GKE 1.15). + - ``none`` - no logs will be exported from the cluster. + + If left as an empty + string,\ ``logging.googleapis.com/kubernetes`` will be used + for GKE 1.14+ or ``logging.googleapis.com`` for earlier + versions. + desired_resource_usage_export_config (google.cloud.container_v1beta1.types.ResourceUsageExportConfig): + The desired configuration for exporting + resource usage. + desired_vertical_pod_autoscaling (google.cloud.container_v1beta1.types.VerticalPodAutoscaling): + Cluster-level Vertical Pod Autoscaling + configuration. + desired_private_cluster_config (google.cloud.container_v1beta1.types.PrivateClusterConfig): + The desired private cluster configuration. + desired_intra_node_visibility_config (google.cloud.container_v1beta1.types.IntraNodeVisibilityConfig): + The desired config of Intra-node visibility. + desired_default_snat_status (google.cloud.container_v1beta1.types.DefaultSnatStatus): + The desired status of whether to disable + default sNAT for this cluster. + desired_cluster_telemetry (google.cloud.container_v1beta1.types.ClusterTelemetry): + The desired telemetry integration for the + cluster. + desired_release_channel (google.cloud.container_v1beta1.types.ReleaseChannel): + The desired release channel configuration. + desired_tpu_config (google.cloud.container_v1beta1.types.TpuConfig): + The desired Cloud TPU configuration. + desired_l4ilb_subsetting_config (google.cloud.container_v1beta1.types.ILBSubsettingConfig): + The desired L4 Internal Load Balancer + Subsetting configuration. + desired_datapath_provider (google.cloud.container_v1beta1.types.DatapathProvider): + The desired datapath provider for the + cluster. + desired_private_ipv6_google_access (google.cloud.container_v1beta1.types.PrivateIPv6GoogleAccess): + The desired state of IPv6 connectivity to + Google Services. + desired_notification_config (google.cloud.container_v1beta1.types.NotificationConfig): + The desired notification configuration. + desired_master_version (str): + The Kubernetes version to change the master + to. The only valid value is the latest supported + version. + + Users may specify either explicit versions + offered by Kubernetes Engine or version aliases, + which have the following behavior: + + - "latest": picks the highest valid Kubernetes + version + - "1.X": picks the highest valid patch+gke.N + patch in the 1.X version + - "1.X.Y": picks the highest valid gke.N patch + in the 1.X.Y version + - "1.X.Y-gke.N": picks an explicit Kubernetes + version + - "-": picks the default Kubernetes version + desired_gcfs_config (google.cloud.container_v1beta1.types.GcfsConfig): + The desired GCFS config for the cluster. + desired_database_encryption (google.cloud.container_v1beta1.types.DatabaseEncryption): + Configuration of etcd encryption. + desired_workload_identity_config (google.cloud.container_v1beta1.types.WorkloadIdentityConfig): + Configuration for Workload Identity. + desired_workload_certificates (google.cloud.container_v1beta1.types.WorkloadCertificates): + Configuration for issuance of mTLS keys and + certificates to Kubernetes pods. + desired_mesh_certificates (google.cloud.container_v1beta1.types.MeshCertificates): + Configuration for issuance of mTLS keys and + certificates to Kubernetes pods. + desired_workload_alts_config (google.cloud.container_v1beta1.types.WorkloadALTSConfig): + Configuration for direct-path (via ALTS) with + workload identity. + desired_shielded_nodes (google.cloud.container_v1beta1.types.ShieldedNodes): + Configuration for Shielded Nodes. + desired_cost_management_config (google.cloud.container_v1beta1.types.CostManagementConfig): + The desired configuration for the + fine-grained cost management feature. + desired_master (google.cloud.container_v1beta1.types.Master): + Configuration for master components. + desired_dns_config (google.cloud.container_v1beta1.types.DNSConfig): + DNSConfig contains clusterDNS config for this + cluster. + desired_service_external_ips_config (google.cloud.container_v1beta1.types.ServiceExternalIPsConfig): + ServiceExternalIPsConfig specifies the config + for the use of Services with ExternalIPs field. + desired_authenticator_groups_config (google.cloud.container_v1beta1.types.AuthenticatorGroupsConfig): + AuthenticatorGroupsConfig specifies the + config for the cluster security groups settings. + desired_logging_config (google.cloud.container_v1beta1.types.LoggingConfig): + The desired logging configuration. + desired_monitoring_config (google.cloud.container_v1beta1.types.MonitoringConfig): + The desired monitoring configuration. + desired_identity_service_config (google.cloud.container_v1beta1.types.IdentityServiceConfig): + The desired Identity Service component + configuration. + desired_enable_private_endpoint (bool): + Enable/Disable private endpoint for the + cluster's master. + + This field is a member of `oneof`_ ``_desired_enable_private_endpoint``. + desired_node_pool_auto_config_network_tags (google.cloud.container_v1beta1.types.NetworkTags): + The desired network tags that apply to all + auto-provisioned node pools in autopilot + clusters and node auto-provisioning enabled + clusters. + desired_protect_config (google.cloud.container_v1beta1.types.ProtectConfig): + Deprecated: Use DesiredSecurityPostureConfig + instead. Enable/Disable Protect API features for + the cluster. + + This field is a member of `oneof`_ ``_desired_protect_config``. + desired_gateway_api_config (google.cloud.container_v1beta1.types.GatewayAPIConfig): + The desired config of Gateway API on this + cluster. + etag (str): + The current etag of the cluster. + If an etag is provided and does not match the + current etag of the cluster, update will be + blocked and an ABORTED error will be returned. + desired_node_pool_logging_config (google.cloud.container_v1beta1.types.NodePoolLoggingConfig): + The desired node pool logging configuration + defaults for the cluster. + desired_fleet (google.cloud.container_v1beta1.types.Fleet): + The desired fleet configuration for the + cluster. + desired_stack_type (google.cloud.container_v1beta1.types.StackType): + The desired stack type of the cluster. + If a stack type is provided and does not match + the current stack type of the cluster, update + will attempt to change the stack type to the new + type. + additional_pod_ranges_config (google.cloud.container_v1beta1.types.AdditionalPodRangesConfig): + The additional pod ranges to be added to the + cluster. These pod ranges can be used by node + pools to allocate pod IPs. + removed_additional_pod_ranges_config (google.cloud.container_v1beta1.types.AdditionalPodRangesConfig): + The additional pod ranges that are to be removed from the + cluster. The pod ranges specified here must have been + specified earlier in the 'additional_pod_ranges_config' + argument. + enable_k8s_beta_apis (google.cloud.container_v1beta1.types.K8sBetaAPIConfig): + Kubernetes open source beta apis enabled on + the cluster. Only beta apis + desired_security_posture_config (google.cloud.container_v1beta1.types.SecurityPostureConfig): + Enable/Disable Security Posture API features + for the cluster. + desired_network_performance_config (google.cloud.container_v1beta1.types.NetworkConfig.ClusterNetworkPerformanceConfig): + The desired network performance config. + desired_enable_fqdn_network_policy (bool): + Enable/Disable FQDN Network Policy for the + cluster. + + This field is a member of `oneof`_ ``_desired_enable_fqdn_network_policy``. + desired_autopilot_workload_policy_config (google.cloud.container_v1beta1.types.WorkloadPolicyConfig): + The desired workload policy configuration for + the autopilot cluster. + desired_k8s_beta_apis (google.cloud.container_v1beta1.types.K8sBetaAPIConfig): + Beta APIs enabled for cluster. + desired_host_maintenance_policy (google.cloud.container_v1beta1.types.HostMaintenancePolicy): + HostMaintenancePolicy contains the desired + maintenance policy for the Google Compute Engine + hosts. + """ + + desired_node_version: str = proto.Field( + proto.STRING, + number=4, + ) + desired_monitoring_service: str = proto.Field( + proto.STRING, + number=5, + ) + desired_addons_config: "AddonsConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="AddonsConfig", + ) + desired_node_pool_id: str = proto.Field( + proto.STRING, + number=7, + ) + desired_image_type: str = proto.Field( + proto.STRING, + number=8, + ) + desired_node_pool_autoscaling: "NodePoolAutoscaling" = proto.Field( + proto.MESSAGE, + number=9, + message="NodePoolAutoscaling", + ) + desired_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + desired_master_authorized_networks_config: "MasterAuthorizedNetworksConfig" = ( + proto.Field( + proto.MESSAGE, + number=12, + message="MasterAuthorizedNetworksConfig", + ) + ) + desired_pod_security_policy_config: "PodSecurityPolicyConfig" = proto.Field( + proto.MESSAGE, + number=14, + message="PodSecurityPolicyConfig", + ) + desired_cluster_autoscaling: "ClusterAutoscaling" = proto.Field( + proto.MESSAGE, + number=15, + message="ClusterAutoscaling", + ) + desired_binary_authorization: "BinaryAuthorization" = proto.Field( + proto.MESSAGE, + number=16, + message="BinaryAuthorization", + ) + desired_logging_service: str = proto.Field( + proto.STRING, + number=19, + ) + desired_resource_usage_export_config: "ResourceUsageExportConfig" = proto.Field( + proto.MESSAGE, + number=21, + message="ResourceUsageExportConfig", + ) + desired_vertical_pod_autoscaling: "VerticalPodAutoscaling" = proto.Field( + proto.MESSAGE, + number=22, + message="VerticalPodAutoscaling", + ) + desired_private_cluster_config: "PrivateClusterConfig" = proto.Field( + proto.MESSAGE, + number=25, + message="PrivateClusterConfig", + ) + desired_intra_node_visibility_config: "IntraNodeVisibilityConfig" = proto.Field( + proto.MESSAGE, + number=26, + message="IntraNodeVisibilityConfig", + ) + desired_default_snat_status: "DefaultSnatStatus" = proto.Field( + proto.MESSAGE, + number=28, + message="DefaultSnatStatus", + ) + desired_cluster_telemetry: "ClusterTelemetry" = proto.Field( + proto.MESSAGE, + number=30, + message="ClusterTelemetry", + ) + desired_release_channel: "ReleaseChannel" = proto.Field( + proto.MESSAGE, + number=31, + message="ReleaseChannel", + ) + desired_tpu_config: "TpuConfig" = proto.Field( + proto.MESSAGE, + number=38, + message="TpuConfig", + ) + desired_l4ilb_subsetting_config: "ILBSubsettingConfig" = proto.Field( + proto.MESSAGE, + number=39, + message="ILBSubsettingConfig", + ) + desired_datapath_provider: "DatapathProvider" = proto.Field( + proto.ENUM, + number=50, + enum="DatapathProvider", + ) + desired_private_ipv6_google_access: "PrivateIPv6GoogleAccess" = proto.Field( + proto.ENUM, + number=51, + enum="PrivateIPv6GoogleAccess", + ) + desired_notification_config: "NotificationConfig" = proto.Field( + proto.MESSAGE, + number=55, + message="NotificationConfig", + ) + desired_master_version: str = proto.Field( + proto.STRING, + number=100, + ) + desired_gcfs_config: "GcfsConfig" = proto.Field( + proto.MESSAGE, + number=109, + message="GcfsConfig", + ) + desired_database_encryption: "DatabaseEncryption" = proto.Field( + proto.MESSAGE, + number=46, + message="DatabaseEncryption", + ) + desired_workload_identity_config: "WorkloadIdentityConfig" = proto.Field( + proto.MESSAGE, + number=47, + message="WorkloadIdentityConfig", + ) + desired_workload_certificates: "WorkloadCertificates" = proto.Field( + proto.MESSAGE, + number=61, + message="WorkloadCertificates", + ) + desired_mesh_certificates: "MeshCertificates" = proto.Field( + proto.MESSAGE, + number=67, + message="MeshCertificates", + ) + desired_workload_alts_config: "WorkloadALTSConfig" = proto.Field( + proto.MESSAGE, + number=62, + message="WorkloadALTSConfig", + ) + desired_shielded_nodes: "ShieldedNodes" = proto.Field( + proto.MESSAGE, + number=48, + message="ShieldedNodes", + ) + desired_cost_management_config: "CostManagementConfig" = proto.Field( + proto.MESSAGE, + number=49, + message="CostManagementConfig", + ) + desired_master: "Master" = proto.Field( + proto.MESSAGE, + number=52, + message="Master", + ) + desired_dns_config: "DNSConfig" = proto.Field( + proto.MESSAGE, + number=53, + message="DNSConfig", + ) + desired_service_external_ips_config: "ServiceExternalIPsConfig" = proto.Field( + proto.MESSAGE, + number=60, + message="ServiceExternalIPsConfig", + ) + desired_authenticator_groups_config: "AuthenticatorGroupsConfig" = proto.Field( + proto.MESSAGE, + number=63, + message="AuthenticatorGroupsConfig", + ) + desired_logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=64, + message="LoggingConfig", + ) + desired_monitoring_config: "MonitoringConfig" = proto.Field( + proto.MESSAGE, + number=65, + message="MonitoringConfig", + ) + desired_identity_service_config: "IdentityServiceConfig" = proto.Field( + proto.MESSAGE, + number=66, + message="IdentityServiceConfig", + ) + desired_enable_private_endpoint: bool = proto.Field( + proto.BOOL, + number=71, + optional=True, + ) + desired_node_pool_auto_config_network_tags: "NetworkTags" = proto.Field( + proto.MESSAGE, + number=110, + message="NetworkTags", + ) + desired_protect_config: "ProtectConfig" = proto.Field( + proto.MESSAGE, + number=112, + optional=True, + message="ProtectConfig", + ) + desired_gateway_api_config: "GatewayAPIConfig" = proto.Field( + proto.MESSAGE, + number=114, + message="GatewayAPIConfig", + ) + etag: str = proto.Field( + proto.STRING, + number=115, + ) + desired_node_pool_logging_config: "NodePoolLoggingConfig" = proto.Field( + proto.MESSAGE, + number=116, + message="NodePoolLoggingConfig", + ) + desired_fleet: "Fleet" = proto.Field( + proto.MESSAGE, + number=117, + message="Fleet", + ) + desired_stack_type: "StackType" = proto.Field( + proto.ENUM, + number=119, + enum="StackType", + ) + additional_pod_ranges_config: "AdditionalPodRangesConfig" = proto.Field( + proto.MESSAGE, + number=120, + message="AdditionalPodRangesConfig", + ) + removed_additional_pod_ranges_config: "AdditionalPodRangesConfig" = proto.Field( + proto.MESSAGE, + number=121, + message="AdditionalPodRangesConfig", + ) + enable_k8s_beta_apis: "K8sBetaAPIConfig" = proto.Field( + proto.MESSAGE, + number=122, + message="K8sBetaAPIConfig", + ) + desired_security_posture_config: "SecurityPostureConfig" = proto.Field( + proto.MESSAGE, + number=124, + message="SecurityPostureConfig", + ) + desired_network_performance_config: "NetworkConfig.ClusterNetworkPerformanceConfig" = proto.Field( + proto.MESSAGE, + number=125, + message="NetworkConfig.ClusterNetworkPerformanceConfig", + ) + desired_enable_fqdn_network_policy: bool = proto.Field( + proto.BOOL, + number=126, + optional=True, + ) + desired_autopilot_workload_policy_config: "WorkloadPolicyConfig" = proto.Field( + proto.MESSAGE, + number=128, + message="WorkloadPolicyConfig", + ) + desired_k8s_beta_apis: "K8sBetaAPIConfig" = proto.Field( + proto.MESSAGE, + number=131, + message="K8sBetaAPIConfig", + ) + desired_host_maintenance_policy: "HostMaintenancePolicy" = proto.Field( + proto.MESSAGE, + number=132, + message="HostMaintenancePolicy", + ) + + +class AdditionalPodRangesConfig(proto.Message): + r"""AdditionalPodRangesConfig is the configuration for additional + pod secondary ranges supporting the ClusterUpdate message. + + Attributes: + pod_range_names (MutableSequence[str]): + Name for pod secondary ipv4 range which has + the actual range defined ahead. + pod_range_info (MutableSequence[google.cloud.container_v1beta1.types.RangeInfo]): + Output only. [Output only] Information for additional pod + range. + """ + + pod_range_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + pod_range_info: MutableSequence["RangeInfo"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="RangeInfo", + ) + + +class RangeInfo(proto.Message): + r"""RangeInfo contains the range name and the range utilization + by this cluster. + + Attributes: + range_name (str): + Output only. [Output only] Name of a range. + utilization (float): + Output only. [Output only] The utilization of the range. + """ + + range_name: str = proto.Field( + proto.STRING, + number=1, + ) + utilization: float = proto.Field( + proto.DOUBLE, + number=2, + ) + + +class Operation(proto.Message): + r"""This operation resource represents operations that may have + happened or are happening on the cluster. All fields are output + only. + + Attributes: + name (str): + The server-assigned ID for the operation. + zone (str): + The name of the Google Compute Engine + `zone `__ + in which the operation is taking place. This field is + deprecated, use location instead. + operation_type (google.cloud.container_v1beta1.types.Operation.Type): + The operation type. + status (google.cloud.container_v1beta1.types.Operation.Status): + The current status of the operation. + detail (str): + Detailed operation progress, if available. + status_message (str): + Output only. If an error has occurred, a + textual description of the error. Deprecated. + Use field error instead. + self_link (str): + Server-defined URI for the operation. Example: + ``https://container.googleapis.com/v1alpha1/projects/123/locations/us-central1/operations/operation-123``. + target_link (str): + Server-defined URI for the target of the operation. The + format of this is a URI to the resource being modified (such + as a cluster, node pool, or node). For node pool repairs, + there may be multiple nodes being repaired, but only one + will be the target. + + Examples: + + - + + ``https://container.googleapis.com/v1/projects/123/locations/us-central1/clusters/my-cluster`` + + ``https://container.googleapis.com/v1/projects/123/zones/us-central1-c/clusters/my-cluster/nodePools/my-np`` + + ``https://container.googleapis.com/v1/projects/123/zones/us-central1-c/clusters/my-cluster/nodePools/my-np/node/my-node`` + location (str): + [Output only] The name of the Google Compute Engine + `zone `__ + or + `region `__ + in which the cluster resides. + start_time (str): + [Output only] The time the operation started, in + `RFC3339 `__ text + format. + end_time (str): + [Output only] The time the operation completed, in + `RFC3339 `__ text + format. + progress (google.cloud.container_v1beta1.types.OperationProgress): + Output only. [Output only] Progress information for an + operation. + cluster_conditions (MutableSequence[google.cloud.container_v1beta1.types.StatusCondition]): + Which conditions caused the current cluster + state. Deprecated. Use field error instead. + nodepool_conditions (MutableSequence[google.cloud.container_v1beta1.types.StatusCondition]): + Which conditions caused the current node pool + state. Deprecated. Use field error instead. + error (google.rpc.status_pb2.Status): + The error result of the operation in case of + failure. + """ + + class Status(proto.Enum): + r"""Current status of the operation. + + Values: + STATUS_UNSPECIFIED (0): + Not set. + PENDING (1): + The operation has been created. + RUNNING (2): + The operation is currently running. + DONE (3): + The operation is done, either cancelled or + completed. + ABORTING (4): + The operation is aborting. + """ + STATUS_UNSPECIFIED = 0 + PENDING = 1 + RUNNING = 2 + DONE = 3 + ABORTING = 4 + + class Type(proto.Enum): + r"""Operation type categorizes the operation. + + Values: + TYPE_UNSPECIFIED (0): + Not set. + CREATE_CLUSTER (1): + The cluster is being created. The cluster should be assumed + to be unusable until the operation finishes. + + In the event of the operation failing, the cluster will + enter the [ERROR state][Cluster.Status.ERROR] and eventually + be deleted. + DELETE_CLUSTER (2): + The cluster is being deleted. The cluster should be assumed + to be unusable as soon as this operation starts. + + In the event of the operation failing, the cluster will + enter the [ERROR state][Cluster.Status.ERROR] and the + deletion will be automatically retried until completed. + UPGRADE_MASTER (3): + The [cluster + version][google.container.v1beta1.ClusterUpdate.desired_master_version] + is being updated. Note that this includes "upgrades" to the + same version, which are simply a recreation. This also + includes + `auto-upgrades `__. + For more details, see `documentation on cluster + upgrades `__. + UPGRADE_NODES (4): + A node pool is being updated. Despite calling this an + "upgrade", this includes most forms of updates to node + pools. This also includes + `auto-upgrades `__. + + This operation sets the + [progress][google.container.v1beta1.Operation.progress] + field and may be + [canceled][google.container.v1beta1.ClusterManager.CancelOperation]. + + The upgrade strategy depends on `node pool + configuration `__. + The nodes are generally still usable during this operation. + REPAIR_CLUSTER (5): + A problem has been detected with the control plane and is + being repaired. This operation type is initiated by GKE. For + more details, see `documentation on + repairs `__. + UPDATE_CLUSTER (6): + The cluster is being updated. This is a broad category of + operations and includes operations that only change metadata + as well as those that must recreate the entire cluster. If + the control plane must be recreated, this will cause + temporary downtime for zonal clusters. + + Some features require recreating the nodes as well. Those + will be recreated as separate operations and the update may + not be completely functional until the node pools + recreations finish. Node recreations will generally follow + `maintenance + policies `__. + + Some GKE-initiated operations use this type. This includes + certain types of auto-upgrades and incident mitigations. + CREATE_NODE_POOL (7): + A node pool is being created. The node pool should be + assumed to be unusable until this operation finishes. In the + event of an error, the node pool may be partially created. + + If enabled, `node + autoprovisioning `__ + may have automatically initiated such operations. + DELETE_NODE_POOL (8): + The node pool is being deleted. The node pool + should be assumed to be unusable as soon as this + operation starts. + SET_NODE_POOL_MANAGEMENT (9): + The node pool's + [manamagent][google.container.v1beta1.NodePool.management] + field is being updated. These operations only update + metadata and may be concurrent with most other operations. + AUTO_REPAIR_NODES (10): + A problem has been detected with nodes and `they are being + repaired `__. + This operation type is initiated by GKE, typically + automatically. This operation may be concurrent with other + operations and there may be multiple repairs occurring on + the same node pool. + AUTO_UPGRADE_NODES (11): + Unused. Automatic node upgrade uses + [UPGRADE_NODES][google.container.v1beta1.Operation.Type.UPGRADE_NODES]. + SET_LABELS (12): + Unused. Updating labels uses + [UPDATE_CLUSTER][google.container.v1beta1.Operation.Type.UPDATE_CLUSTER]. + SET_MASTER_AUTH (13): + Unused. Updating master auth uses + [UPDATE_CLUSTER][google.container.v1beta1.Operation.Type.UPDATE_CLUSTER]. + SET_NODE_POOL_SIZE (14): + The node pool is being resized. With the + exception of resizing to or from size zero, the + node pool is generally usable during this + operation. + SET_NETWORK_POLICY (15): + Unused. Updating network policy uses + [UPDATE_CLUSTER][google.container.v1beta1.Operation.Type.UPDATE_CLUSTER]. + SET_MAINTENANCE_POLICY (16): + Unused. Updating maintenance policy uses + [UPDATE_CLUSTER][google.container.v1beta1.Operation.Type.UPDATE_CLUSTER]. + RESIZE_CLUSTER (18): + The control plane is being resized. This operation type is + initiated by GKE. These operations are often performed + preemptively to ensure that the control plane has sufficient + resources and is not typically an indication of issues. For + more details, see `documentation on + resizes `__. + """ + TYPE_UNSPECIFIED = 0 + CREATE_CLUSTER = 1 + DELETE_CLUSTER = 2 + UPGRADE_MASTER = 3 + UPGRADE_NODES = 4 + REPAIR_CLUSTER = 5 + UPDATE_CLUSTER = 6 + CREATE_NODE_POOL = 7 + DELETE_NODE_POOL = 8 + SET_NODE_POOL_MANAGEMENT = 9 + AUTO_REPAIR_NODES = 10 + AUTO_UPGRADE_NODES = 11 + SET_LABELS = 12 + SET_MASTER_AUTH = 13 + SET_NODE_POOL_SIZE = 14 + SET_NETWORK_POLICY = 15 + SET_MAINTENANCE_POLICY = 16 + RESIZE_CLUSTER = 18 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + operation_type: Type = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) + status: Status = proto.Field( + proto.ENUM, + number=4, + enum=Status, + ) + detail: str = proto.Field( + proto.STRING, + number=8, + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + self_link: str = proto.Field( + proto.STRING, + number=6, + ) + target_link: str = proto.Field( + proto.STRING, + number=7, + ) + location: str = proto.Field( + proto.STRING, + number=9, + ) + start_time: str = proto.Field( + proto.STRING, + number=10, + ) + end_time: str = proto.Field( + proto.STRING, + number=11, + ) + progress: "OperationProgress" = proto.Field( + proto.MESSAGE, + number=12, + message="OperationProgress", + ) + cluster_conditions: MutableSequence["StatusCondition"] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message="StatusCondition", + ) + nodepool_conditions: MutableSequence["StatusCondition"] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="StatusCondition", + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=15, + message=status_pb2.Status, + ) + + +class OperationProgress(proto.Message): + r"""Information about operation (or operation stage) progress. + + Attributes: + name (str): + A non-parameterized string describing an + operation stage. Unset for single-stage + operations. + status (google.cloud.container_v1beta1.types.Operation.Status): + Status of an operation stage. + Unset for single-stage operations. + metrics (MutableSequence[google.cloud.container_v1beta1.types.OperationProgress.Metric]): + Progress metric bundle, for example: metrics: [{name: "nodes + done", int_value: 15}, {name: "nodes total", int_value: 32}] + or metrics: [{name: "progress", double_value: 0.56}, {name: + "progress scale", double_value: 1.0}] + stages (MutableSequence[google.cloud.container_v1beta1.types.OperationProgress]): + Substages of an operation or a stage. + """ + + class Metric(proto.Message): + r"""Progress metric is (string, int|float|string) pair. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. Metric name, e.g., "nodes total", + "percent done". + int_value (int): + For metrics with integer value. + + This field is a member of `oneof`_ ``value``. + double_value (float): + For metrics with floating point value. + + This field is a member of `oneof`_ ``value``. + string_value (str): + For metrics with custom values (ratios, + visual progress, etc.). + + This field is a member of `oneof`_ ``value``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + int_value: int = proto.Field( + proto.INT64, + number=2, + oneof="value", + ) + double_value: float = proto.Field( + proto.DOUBLE, + number=3, + oneof="value", + ) + string_value: str = proto.Field( + proto.STRING, + number=4, + oneof="value", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + status: "Operation.Status" = proto.Field( + proto.ENUM, + number=2, + enum="Operation.Status", + ) + metrics: MutableSequence[Metric] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Metric, + ) + stages: MutableSequence["OperationProgress"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="OperationProgress", + ) + + +class CreateClusterRequest(proto.Message): + r"""CreateClusterRequest creates a cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the parent + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the parent field. + cluster (google.cloud.container_v1beta1.types.Cluster): + Required. A `cluster + resource `__ + parent (str): + The parent (project and location) where the cluster will be + created. Specified in the format ``projects/*/locations/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster: "Cluster" = proto.Field( + proto.MESSAGE, + number=3, + message="Cluster", + ) + parent: str = proto.Field( + proto.STRING, + number=5, + ) + + +class GetClusterRequest(proto.Message): + r"""GetClusterRequest gets the settings of a cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to retrieve. This field has been deprecated and + replaced by the name field. + name (str): + The name (project, location, cluster) of the cluster to + retrieve. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( + proto.STRING, + number=5, + ) + + +class UpdateClusterRequest(proto.Message): + r"""UpdateClusterRequest updates the settings of a cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to upgrade. This field has been deprecated and + replaced by the name field. + update (google.cloud.container_v1beta1.types.ClusterUpdate): + Required. A description of the update. + name (str): + The name (project, location, cluster) of the cluster to + update. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + update: "ClusterUpdate" = proto.Field( + proto.MESSAGE, + number=4, + message="ClusterUpdate", + ) + name: str = proto.Field( + proto.STRING, + number=5, + ) + + +class UpdateNodePoolRequest(proto.Message): + r"""SetNodePoolVersionRequest updates the version of a node pool. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to upgrade. This field has been deprecated and + replaced by the name field. + node_pool_id (str): + Required. Deprecated. The name of the node + pool to upgrade. This field has been deprecated + and replaced by the name field. + node_version (str): + Required. The Kubernetes version to change + the nodes to (typically an upgrade). + + Users may specify either explicit versions + offered by Kubernetes Engine or version aliases, + which have the following behavior: + + - "latest": picks the highest valid Kubernetes + version + - "1.X": picks the highest valid patch+gke.N + patch in the 1.X version + - "1.X.Y": picks the highest valid gke.N patch + in the 1.X.Y version + - "1.X.Y-gke.N": picks an explicit Kubernetes + version + - "-": picks the Kubernetes master version + image_type (str): + Required. The desired image type for the node + pool. Please see + https://cloud.google.com/kubernetes-engine/docs/concepts/node-images + for available image types. + locations (MutableSequence[str]): + The desired list of Google Compute Engine + `zones `__ + in which the node pool's nodes should be located. Changing + the locations for a node pool will result in nodes being + either created or removed from the node pool, depending on + whether locations are being added or removed. + workload_metadata_config (google.cloud.container_v1beta1.types.WorkloadMetadataConfig): + The desired workload metadata config for the + node pool. + name (str): + The name (project, location, cluster, node pool) of the node + pool to update. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + upgrade_settings (google.cloud.container_v1beta1.types.NodePool.UpgradeSettings): + Upgrade settings control disruption and speed + of the upgrade. + tags (google.cloud.container_v1beta1.types.NetworkTags): + The desired network tags to be applied to all nodes in the + node pool. If this field is not present, the tags will not + be changed. Otherwise, the existing network tags will be + *replaced* with the provided tags. + taints (google.cloud.container_v1beta1.types.NodeTaints): + The desired node taints to be applied to all nodes in the + node pool. If this field is not present, the taints will not + be changed. Otherwise, the existing node taints will be + *replaced* with the provided taints. + labels (google.cloud.container_v1beta1.types.NodeLabels): + The desired node labels to be applied to all nodes in the + node pool. If this field is not present, the labels will not + be changed. Otherwise, the existing node labels will be + *replaced* with the provided labels. + linux_node_config (google.cloud.container_v1beta1.types.LinuxNodeConfig): + Parameters that can be configured on Linux + nodes. + kubelet_config (google.cloud.container_v1beta1.types.NodeKubeletConfig): + Node kubelet configs. + node_network_config (google.cloud.container_v1beta1.types.NodeNetworkConfig): + Node network config. + gcfs_config (google.cloud.container_v1beta1.types.GcfsConfig): + GCFS config. + confidential_nodes (google.cloud.container_v1beta1.types.ConfidentialNodes): + Confidential nodes config. + All the nodes in the node pool will be + Confidential VM once enabled. + gvnic (google.cloud.container_v1beta1.types.VirtualNIC): + Enable or disable gvnic on the node pool. + etag (str): + The current etag of the node pool. + If an etag is provided and does not match the + current etag of the node pool, update will be + blocked and an ABORTED error will be returned. + fast_socket (google.cloud.container_v1beta1.types.FastSocket): + Enable or disable NCCL fast socket for the + node pool. + logging_config (google.cloud.container_v1beta1.types.NodePoolLoggingConfig): + Logging configuration. + resource_labels (google.cloud.container_v1beta1.types.ResourceLabels): + The resource labels for the node pool to use + to annotate any related Google Compute Engine + resources. + windows_node_config (google.cloud.container_v1beta1.types.WindowsNodeConfig): + Parameters that can be configured on Windows + nodes. + machine_type (str): + Optional. The desired machine type for nodes + in the node pool. Initiates an upgrade operation + that migrates the nodes in the node pool to the + specified machine type. + disk_type (str): + Optional. The desired disk type for nodes in + the node pool. Initiates an upgrade operation + that migrates the nodes in the node pool to the + specified disk type. + disk_size_gb (int): + Optional. The desired disk size for nodes in + the node pool. Initiates an upgrade operation + that migrates the nodes in the node pool to the + specified disk size. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=4, + ) + node_version: str = proto.Field( + proto.STRING, + number=5, + ) + image_type: str = proto.Field( + proto.STRING, + number=6, + ) + locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + workload_metadata_config: "WorkloadMetadataConfig" = proto.Field( + proto.MESSAGE, + number=14, + message="WorkloadMetadataConfig", + ) + name: str = proto.Field( + proto.STRING, + number=8, + ) + upgrade_settings: "NodePool.UpgradeSettings" = proto.Field( + proto.MESSAGE, + number=15, + message="NodePool.UpgradeSettings", + ) + tags: "NetworkTags" = proto.Field( + proto.MESSAGE, + number=16, + message="NetworkTags", + ) + taints: "NodeTaints" = proto.Field( + proto.MESSAGE, + number=17, + message="NodeTaints", + ) + labels: "NodeLabels" = proto.Field( + proto.MESSAGE, + number=18, + message="NodeLabels", + ) + linux_node_config: "LinuxNodeConfig" = proto.Field( + proto.MESSAGE, + number=19, + message="LinuxNodeConfig", + ) + kubelet_config: "NodeKubeletConfig" = proto.Field( + proto.MESSAGE, + number=20, + message="NodeKubeletConfig", + ) + node_network_config: "NodeNetworkConfig" = proto.Field( + proto.MESSAGE, + number=21, + message="NodeNetworkConfig", + ) + gcfs_config: "GcfsConfig" = proto.Field( + proto.MESSAGE, + number=22, + message="GcfsConfig", + ) + confidential_nodes: "ConfidentialNodes" = proto.Field( + proto.MESSAGE, + number=23, + message="ConfidentialNodes", + ) + gvnic: "VirtualNIC" = proto.Field( + proto.MESSAGE, + number=29, + message="VirtualNIC", + ) + etag: str = proto.Field( + proto.STRING, + number=30, + ) + fast_socket: "FastSocket" = proto.Field( + proto.MESSAGE, + number=31, + message="FastSocket", + ) + logging_config: "NodePoolLoggingConfig" = proto.Field( + proto.MESSAGE, + number=32, + message="NodePoolLoggingConfig", + ) + resource_labels: "ResourceLabels" = proto.Field( + proto.MESSAGE, + number=33, + message="ResourceLabels", + ) + windows_node_config: "WindowsNodeConfig" = proto.Field( + proto.MESSAGE, + number=34, + message="WindowsNodeConfig", + ) + machine_type: str = proto.Field( + proto.STRING, + number=36, + ) + disk_type: str = proto.Field( + proto.STRING, + number=37, + ) + disk_size_gb: int = proto.Field( + proto.INT64, + number=38, + ) + + +class SetNodePoolAutoscalingRequest(proto.Message): + r"""SetNodePoolAutoscalingRequest sets the autoscaler settings of + a node pool. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to upgrade. This field has been deprecated and + replaced by the name field. + node_pool_id (str): + Required. Deprecated. The name of the node + pool to upgrade. This field has been deprecated + and replaced by the name field. + autoscaling (google.cloud.container_v1beta1.types.NodePoolAutoscaling): + Required. Autoscaling configuration for the + node pool. + name (str): + The name (project, location, cluster, node pool) of the node + pool to set autoscaler settings. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=4, + ) + autoscaling: "NodePoolAutoscaling" = proto.Field( + proto.MESSAGE, + number=5, + message="NodePoolAutoscaling", + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class SetLoggingServiceRequest(proto.Message): + r"""SetLoggingServiceRequest sets the logging service of a + cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to upgrade. This field has been deprecated and + replaced by the name field. + logging_service (str): + Required. The logging service the cluster should use to + write logs. Currently available options: + + - ``logging.googleapis.com/kubernetes`` - The Cloud Logging + service with a Kubernetes-native resource model + - ``logging.googleapis.com`` - The legacy Cloud Logging + service (no longer available as of GKE 1.15). + - ``none`` - no logs will be exported from the cluster. + + If left as an empty + string,\ ``logging.googleapis.com/kubernetes`` will be used + for GKE 1.14+ or ``logging.googleapis.com`` for earlier + versions. + name (str): + The name (project, location, cluster) of the cluster to set + logging. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + logging_service: str = proto.Field( + proto.STRING, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=5, + ) + + +class SetMonitoringServiceRequest(proto.Message): + r"""SetMonitoringServiceRequest sets the monitoring service of a + cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to upgrade. This field has been deprecated and + replaced by the name field. + monitoring_service (str): + Required. The monitoring service the cluster should use to + write metrics. Currently available options: + + - "monitoring.googleapis.com/kubernetes" - The Cloud + Monitoring service with a Kubernetes-native resource + model + - ``monitoring.googleapis.com`` - The legacy Cloud + Monitoring service (no longer available as of GKE 1.15). + - ``none`` - No metrics will be exported from the cluster. + + If left as an empty + string,\ ``monitoring.googleapis.com/kubernetes`` will be + used for GKE 1.14+ or ``monitoring.googleapis.com`` for + earlier versions. + name (str): + The name (project, location, cluster) of the cluster to set + monitoring. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + monitoring_service: str = proto.Field( + proto.STRING, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class SetAddonsConfigRequest(proto.Message): + r"""SetAddonsRequest sets the addons associated with the cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to upgrade. This field has been deprecated and + replaced by the name field. + addons_config (google.cloud.container_v1beta1.types.AddonsConfig): + Required. The desired configurations for the + various addons available to run in the cluster. + name (str): + The name (project, location, cluster) of the cluster to set + addons. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + addons_config: "AddonsConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="AddonsConfig", + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class SetLocationsRequest(proto.Message): + r"""SetLocationsRequest sets the locations of the cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to upgrade. This field has been deprecated and + replaced by the name field. + locations (MutableSequence[str]): + Required. The desired list of Google Compute Engine + `zones `__ + in which the cluster's nodes should be located. Changing the + locations a cluster is in will result in nodes being either + created or removed from the cluster, depending on whether + locations are being added or removed. + + This list must always include the cluster's primary zone. + name (str): + The name (project, location, cluster) of the cluster to set + locations. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class UpdateMasterRequest(proto.Message): + r"""UpdateMasterRequest updates the master of the cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to upgrade. This field has been deprecated and + replaced by the name field. + master_version (str): + Required. The Kubernetes version to change + the master to. + Users may specify either explicit versions + offered by Kubernetes Engine or version aliases, + which have the following behavior: + + - "latest": picks the highest valid Kubernetes + version + - "1.X": picks the highest valid patch+gke.N + patch in the 1.X version + - "1.X.Y": picks the highest valid gke.N patch + in the 1.X.Y version + - "1.X.Y-gke.N": picks an explicit Kubernetes + version + - "-": picks the default Kubernetes version + name (str): + The name (project, location, cluster) of the cluster to + update. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + master_version: str = proto.Field( + proto.STRING, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class SetMasterAuthRequest(proto.Message): + r"""SetMasterAuthRequest updates the admin password of a cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to upgrade. This field has been deprecated and + replaced by the name field. + action (google.cloud.container_v1beta1.types.SetMasterAuthRequest.Action): + Required. The exact form of action to be + taken on the master auth. + update (google.cloud.container_v1beta1.types.MasterAuth): + Required. A description of the update. + name (str): + The name (project, location, cluster) of the cluster to set + auth. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + class Action(proto.Enum): + r"""Operation type: what type update to perform. + + Values: + UNKNOWN (0): + Operation is unknown and will error out. + SET_PASSWORD (1): + Set the password to a user generated value. + GENERATE_PASSWORD (2): + Generate a new password and set it to that. + SET_USERNAME (3): + Set the username. If an empty username is + provided, basic authentication is disabled for + the cluster. If a non-empty username is + provided, basic authentication is enabled, with + either a provided password or a generated one. + """ + UNKNOWN = 0 + SET_PASSWORD = 1 + GENERATE_PASSWORD = 2 + SET_USERNAME = 3 + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + action: Action = proto.Field( + proto.ENUM, + number=4, + enum=Action, + ) + update: "MasterAuth" = proto.Field( + proto.MESSAGE, + number=5, + message="MasterAuth", + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class DeleteClusterRequest(proto.Message): + r"""DeleteClusterRequest deletes a cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to delete. This field has been deprecated and + replaced by the name field. + name (str): + The name (project, location, cluster) of the cluster to + delete. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListClustersRequest(proto.Message): + r"""ListClustersRequest lists clusters. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the parent + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides, or "-" for all zones. This + field has been deprecated and replaced by the parent field. + parent (str): + The parent (project and location) where the clusters will be + listed. Specified in the format ``projects/*/locations/*``. + Location "-" matches all zones and all regions. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + parent: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListClustersResponse(proto.Message): + r"""ListClustersResponse is the result of ListClustersRequest. + + Attributes: + clusters (MutableSequence[google.cloud.container_v1beta1.types.Cluster]): + A list of clusters in the project in the + specified zone, or across all ones. + missing_zones (MutableSequence[str]): + If any zones are listed here, the list of + clusters returned may be missing those zones. + """ + + clusters: MutableSequence["Cluster"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Cluster", + ) + missing_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class GetOperationRequest(proto.Message): + r"""GetOperationRequest gets a single operation. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + operation_id (str): + Required. Deprecated. The server-assigned ``name`` of the + operation. This field has been deprecated and replaced by + the name field. + name (str): + The name (project, location, operation id) of the operation + to get. Specified in the format + ``projects/*/locations/*/operations/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + operation_id: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListOperationsRequest(proto.Message): + r"""ListOperationsRequest lists operations. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the parent + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + to return operations for, or ``-`` for all zones. This field + has been deprecated and replaced by the parent field. + parent (str): + The parent (project and location) where the operations will + be listed. Specified in the format + ``projects/*/locations/*``. Location "-" matches all zones + and all regions. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + parent: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CancelOperationRequest(proto.Message): + r"""CancelOperationRequest cancels a single operation. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the operation resides. This field has been + deprecated and replaced by the name field. + operation_id (str): + Required. Deprecated. The server-assigned ``name`` of the + operation. This field has been deprecated and replaced by + the name field. + name (str): + The name (project, location, operation id) of the operation + to cancel. Specified in the format + ``projects/*/locations/*/operations/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + operation_id: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListOperationsResponse(proto.Message): + r"""ListOperationsResponse is the result of + ListOperationsRequest. + + Attributes: + operations (MutableSequence[google.cloud.container_v1beta1.types.Operation]): + A list of operations in the project in the + specified zone. + missing_zones (MutableSequence[str]): + If any zones are listed here, the list of + operations returned may be missing the + operations from those zones. + """ + + operations: MutableSequence["Operation"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Operation", + ) + missing_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class GetServerConfigRequest(proto.Message): + r"""Gets the current Kubernetes Engine service configuration. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + to return operations for. This field has been deprecated and + replaced by the name field. + name (str): + The name (project and location) of the server config to get, + specified in the format ``projects/*/locations/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + name: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ServerConfig(proto.Message): + r"""Kubernetes Engine service configuration. + + Attributes: + default_cluster_version (str): + Version of Kubernetes the service deploys by + default. + valid_node_versions (MutableSequence[str]): + List of valid node upgrade target versions, + in descending order. + default_image_type (str): + Default image type. + valid_image_types (MutableSequence[str]): + List of valid image types. + valid_master_versions (MutableSequence[str]): + List of valid master versions, in descending + order. + channels (MutableSequence[google.cloud.container_v1beta1.types.ServerConfig.ReleaseChannelConfig]): + List of release channel configurations. + windows_version_maps (MutableMapping[str, google.cloud.container_v1beta1.types.WindowsVersions]): + Maps of Kubernetes version and supported + Windows server versions. + """ + + class ReleaseChannelConfig(proto.Message): + r"""ReleaseChannelConfig exposes configuration for a release + channel. + + Attributes: + channel (google.cloud.container_v1beta1.types.ReleaseChannel.Channel): + The release channel this configuration + applies to. + default_version (str): + The default version for newly created + clusters on the channel. + available_versions (MutableSequence[google.cloud.container_v1beta1.types.ServerConfig.ReleaseChannelConfig.AvailableVersion]): + Deprecated. This field has been deprecated and replaced with + the valid_versions field. + valid_versions (MutableSequence[str]): + List of valid versions for the channel. + """ + + class AvailableVersion(proto.Message): + r"""Deprecated. + + Attributes: + version (str): + Kubernetes version. + reason (str): + Reason for availability. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + reason: str = proto.Field( + proto.STRING, + number=2, + ) + + channel: "ReleaseChannel.Channel" = proto.Field( + proto.ENUM, + number=1, + enum="ReleaseChannel.Channel", + ) + default_version: str = proto.Field( + proto.STRING, + number=2, + ) + available_versions: MutableSequence[ + "ServerConfig.ReleaseChannelConfig.AvailableVersion" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="ServerConfig.ReleaseChannelConfig.AvailableVersion", + ) + valid_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + default_cluster_version: str = proto.Field( + proto.STRING, + number=1, + ) + valid_node_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + default_image_type: str = proto.Field( + proto.STRING, + number=4, + ) + valid_image_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + valid_master_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + channels: MutableSequence[ReleaseChannelConfig] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message=ReleaseChannelConfig, + ) + windows_version_maps: MutableMapping[str, "WindowsVersions"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=10, + message="WindowsVersions", + ) + + +class BestEffortProvisioning(proto.Message): + r"""Best effort provisioning. + + Attributes: + enabled (bool): + When this is enabled, cluster/node pool + creations will ignore non-fatal errors like + stockout to best provision as many nodes as + possible right now and eventually bring up all + target number of nodes + min_provision_nodes (int): + Minimum number of nodes to be provisioned to + be considered as succeeded, and the rest of + nodes will be provisioned gradually and + eventually when stockout issue has been + resolved. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + min_provision_nodes: int = proto.Field( + proto.INT32, + number=2, + ) + + +class WindowsVersions(proto.Message): + r"""Windows server versions. + + Attributes: + windows_versions (MutableSequence[google.cloud.container_v1beta1.types.WindowsVersions.WindowsVersion]): + List of Windows server versions. + """ + + class WindowsVersion(proto.Message): + r"""Windows server version. + + Attributes: + image_type (str): + Windows server image type + os_version (str): + Windows server build number + support_end_date (google.type.date_pb2.Date): + Mainstream support end date + """ + + image_type: str = proto.Field( + proto.STRING, + number=1, + ) + os_version: str = proto.Field( + proto.STRING, + number=2, + ) + support_end_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=3, + message=date_pb2.Date, + ) + + windows_versions: MutableSequence[WindowsVersion] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=WindowsVersion, + ) + + +class CreateNodePoolRequest(proto.Message): + r"""CreateNodePoolRequest creates a node pool for a cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the parent + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the parent field. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated and + replaced by the parent field. + node_pool (google.cloud.container_v1beta1.types.NodePool): + Required. The node pool to create. + parent (str): + The parent (project, location, cluster name) where the node + pool will be created. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool: "NodePool" = proto.Field( + proto.MESSAGE, + number=4, + message="NodePool", + ) + parent: str = proto.Field( + proto.STRING, + number=6, + ) + + +class DeleteNodePoolRequest(proto.Message): + r"""DeleteNodePoolRequest deletes a node pool for a cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated and + replaced by the name field. + node_pool_id (str): + Required. Deprecated. The name of the node + pool to delete. This field has been deprecated + and replaced by the name field. + name (str): + The name (project, location, cluster, node pool id) of the + node pool to delete. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class ListNodePoolsRequest(proto.Message): + r"""ListNodePoolsRequest lists the node pool(s) for a cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the parent + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the parent field. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated and + replaced by the parent field. + parent (str): + The parent (project, location, cluster name) where the node + pools will be listed. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + parent: str = proto.Field( + proto.STRING, + number=5, + ) + + +class GetNodePoolRequest(proto.Message): + r"""GetNodePoolRequest retrieves a node pool for a cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated and + replaced by the name field. + node_pool_id (str): + Required. Deprecated. The name of the node + pool. This field has been deprecated and + replaced by the name field. + name (str): + The name (project, location, cluster, node pool id) of the + node pool to get. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class BlueGreenSettings(proto.Message): + r"""Settings for blue-green upgrade. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + standard_rollout_policy (google.cloud.container_v1beta1.types.BlueGreenSettings.StandardRolloutPolicy): + Standard policy for the blue-green upgrade. + + This field is a member of `oneof`_ ``rollout_policy``. + node_pool_soak_duration (google.protobuf.duration_pb2.Duration): + Time needed after draining entire blue pool. + After this period, blue pool will be cleaned up. + + This field is a member of `oneof`_ ``_node_pool_soak_duration``. + """ + + class StandardRolloutPolicy(proto.Message): + r"""Standard rollout policy is the default policy for blue-green. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + batch_percentage (float): + Percentage of the blue pool nodes to drain in a batch. The + range of this field should be (0.0, 1.0]. + + This field is a member of `oneof`_ ``update_batch_size``. + batch_node_count (int): + Number of blue nodes to drain in a batch. + + This field is a member of `oneof`_ ``update_batch_size``. + batch_soak_duration (google.protobuf.duration_pb2.Duration): + Soak time after each batch gets drained. + Default to zero. + + This field is a member of `oneof`_ ``_batch_soak_duration``. + """ + + batch_percentage: float = proto.Field( + proto.FLOAT, + number=1, + oneof="update_batch_size", + ) + batch_node_count: int = proto.Field( + proto.INT32, + number=2, + oneof="update_batch_size", + ) + batch_soak_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=duration_pb2.Duration, + ) + + standard_rollout_policy: StandardRolloutPolicy = proto.Field( + proto.MESSAGE, + number=1, + oneof="rollout_policy", + message=StandardRolloutPolicy, + ) + node_pool_soak_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=duration_pb2.Duration, + ) + + +class NodePool(proto.Message): + r"""NodePool contains the name and configuration for a cluster's + node pool. Node pools are a set of nodes (i.e. VM's), with a + common configuration and specification, under the control of the + cluster master. They may have a set of Kubernetes labels applied + to them, which may be used to reference them during pod + scheduling. They may also be resized up or down, to accommodate + the workload. + These upgrade settings control the level of parallelism and the + level of disruption caused by an upgrade. + + maxUnavailable controls the number of nodes that can be + simultaneously unavailable. + + maxSurge controls the number of additional nodes that can be + added to the node pool temporarily for the time of the upgrade + to increase the number of available nodes. + + (maxUnavailable + maxSurge) determines the level of parallelism + (how many nodes are being upgraded at the same time). + + Note: upgrades inevitably introduce some disruption since + workloads need to be moved from old nodes to new, upgraded ones. + Even if maxUnavailable=0, this holds true. (Disruption stays + within the limits of PodDisruptionBudget, if it is configured.) + + Consider a hypothetical node pool with 5 nodes having + maxSurge=2, maxUnavailable=1. This means the upgrade process + upgrades 3 nodes simultaneously. It creates 2 additional + (upgraded) nodes, then it brings down 3 old (not yet upgraded) + nodes at the same time. This ensures that there are always at + least 4 nodes available. + + Attributes: + name (str): + The name of the node pool. + config (google.cloud.container_v1beta1.types.NodeConfig): + The node configuration of the pool. + initial_node_count (int): + The initial node count for the pool. You must ensure that + your Compute Engine `resource + quota `__ is + sufficient for this number of instances. You must also have + available firewall and routes quota. + locations (MutableSequence[str]): + The list of Google Compute Engine + `zones `__ + in which the NodePool's nodes should be located. + + If this value is unspecified during node pool creation, the + `Cluster.Locations `__ + value will be used, instead. + + Warning: changing node pool locations will result in nodes + being added and/or removed. + network_config (google.cloud.container_v1beta1.types.NodeNetworkConfig): + Networking configuration for this NodePool. + If specified, it overrides the cluster-level + defaults. + self_link (str): + [Output only] Server-defined URL for the resource. + version (str): + The version of Kubernetes running on this NodePool's nodes. + If unspecified, it defaults as described + `here `__. + instance_group_urls (MutableSequence[str]): + [Output only] The resource URLs of the `managed instance + groups `__ + associated with this node pool. During the node pool + blue-green upgrade operation, the URLs contain both blue and + green resources. + status (google.cloud.container_v1beta1.types.NodePool.Status): + [Output only] The status of the nodes in this pool instance. + status_message (str): + [Output only] Deprecated. Use conditions instead. Additional + information about the current status of this node pool + instance, if available. + autoscaling (google.cloud.container_v1beta1.types.NodePoolAutoscaling): + Autoscaler configuration for this NodePool. + Autoscaler is enabled only if a valid + configuration is present. + management (google.cloud.container_v1beta1.types.NodeManagement): + NodeManagement configuration for this + NodePool. + max_pods_constraint (google.cloud.container_v1beta1.types.MaxPodsConstraint): + The constraint on the maximum number of pods + that can be run simultaneously on a node in the + node pool. + conditions (MutableSequence[google.cloud.container_v1beta1.types.StatusCondition]): + Which conditions caused the current node pool + state. + pod_ipv4_cidr_size (int): + [Output only] The pod CIDR block size per node in this node + pool. + upgrade_settings (google.cloud.container_v1beta1.types.NodePool.UpgradeSettings): + Upgrade settings control disruption and speed + of the upgrade. + placement_policy (google.cloud.container_v1beta1.types.NodePool.PlacementPolicy): + Specifies the node placement policy. + update_info (google.cloud.container_v1beta1.types.NodePool.UpdateInfo): + Output only. [Output only] Update info contains relevant + information during a node pool update. + etag (str): + This checksum is computed by the server based + on the value of node pool fields, and may be + sent on update requests to ensure the client has + an up-to-date value before proceeding. + best_effort_provisioning (google.cloud.container_v1beta1.types.BestEffortProvisioning): + Enable best effort provisioning for nodes + """ + + class Status(proto.Enum): + r"""The current status of the node pool instance. + + Values: + STATUS_UNSPECIFIED (0): + Not set. + PROVISIONING (1): + The PROVISIONING state indicates the node + pool is being created. + RUNNING (2): + The RUNNING state indicates the node pool has + been created and is fully usable. + RUNNING_WITH_ERROR (3): + The RUNNING_WITH_ERROR state indicates the node pool has + been created and is partially usable. Some error state has + occurred and some functionality may be impaired. Customer + may need to reissue a request or trigger a new update. + RECONCILING (4): + The RECONCILING state indicates that some work is actively + being done on the node pool, such as upgrading node + software. Details can be found in the ``statusMessage`` + field. + STOPPING (5): + The STOPPING state indicates the node pool is + being deleted. + ERROR (6): + The ERROR state indicates the node pool may be unusable. + Details can be found in the ``statusMessage`` field. + """ + STATUS_UNSPECIFIED = 0 + PROVISIONING = 1 + RUNNING = 2 + RUNNING_WITH_ERROR = 3 + RECONCILING = 4 + STOPPING = 5 + ERROR = 6 + + class UpgradeSettings(proto.Message): + r"""These upgrade settings configure the upgrade strategy for the node + pool. Use strategy to switch between the strategies applied to the + node pool. + + If the strategy is SURGE, use max_surge and max_unavailable to + control the level of parallelism and the level of disruption caused + by upgrade. + + 1. maxSurge controls the number of additional nodes that can be + added to the node pool temporarily for the time of the upgrade to + increase the number of available nodes. + 2. maxUnavailable controls the number of nodes that can be + simultaneously unavailable. + 3. (maxUnavailable + maxSurge) determines the level of parallelism + (how many nodes are being upgraded at the same time). + + If the strategy is BLUE_GREEN, use blue_green_settings to configure + the blue-green upgrade related settings. + + 1. standard_rollout_policy is the default policy. The policy is used + to control the way blue pool gets drained. The draining is + executed in the batch mode. The batch size could be specified as + either percentage of the node pool size or the number of nodes. + batch_soak_duration is the soak time after each batch gets + drained. + 2. node_pool_soak_duration is the soak time after all blue nodes are + drained. After this period, the blue pool nodes will be deleted. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + max_surge (int): + The maximum number of nodes that can be + created beyond the current size of the node pool + during the upgrade process. + max_unavailable (int): + The maximum number of nodes that can be + simultaneously unavailable during the upgrade + process. A node is considered available if its + status is Ready. + strategy (google.cloud.container_v1beta1.types.NodePoolUpdateStrategy): + Update strategy of the node pool. + + This field is a member of `oneof`_ ``_strategy``. + blue_green_settings (google.cloud.container_v1beta1.types.BlueGreenSettings): + Settings for blue-green upgrade strategy. + + This field is a member of `oneof`_ ``_blue_green_settings``. + """ + + max_surge: int = proto.Field( + proto.INT32, + number=1, + ) + max_unavailable: int = proto.Field( + proto.INT32, + number=2, + ) + strategy: "NodePoolUpdateStrategy" = proto.Field( + proto.ENUM, + number=3, + optional=True, + enum="NodePoolUpdateStrategy", + ) + blue_green_settings: "BlueGreenSettings" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="BlueGreenSettings", + ) + + class UpdateInfo(proto.Message): + r"""UpdateInfo contains resource (instance groups, etc), status + and other intermediate information relevant to a node pool + upgrade. + + Attributes: + blue_green_info (google.cloud.container_v1beta1.types.NodePool.UpdateInfo.BlueGreenInfo): + Information of a blue-green upgrade. + """ + + class BlueGreenInfo(proto.Message): + r"""Information relevant to blue-green upgrade. + + Attributes: + phase (google.cloud.container_v1beta1.types.NodePool.UpdateInfo.BlueGreenInfo.Phase): + Current blue-green upgrade phase. + blue_instance_group_urls (MutableSequence[str]): + The resource URLs of the [managed instance groups] + (/compute/docs/instance-groups/creating-groups-of-managed-instances) + associated with blue pool. + green_instance_group_urls (MutableSequence[str]): + The resource URLs of the [managed instance groups] + (/compute/docs/instance-groups/creating-groups-of-managed-instances) + associated with green pool. + blue_pool_deletion_start_time (str): + Time to start deleting blue pool to complete blue-green + upgrade, in + `RFC3339 `__ text + format. + green_pool_version (str): + Version of green pool. + """ + + class Phase(proto.Enum): + r"""Phase represents the different stages blue-green upgrade is + running in. + + Values: + PHASE_UNSPECIFIED (0): + Unspecified phase. + UPDATE_STARTED (1): + blue-green upgrade has been initiated. + CREATING_GREEN_POOL (2): + Start creating green pool nodes. + CORDONING_BLUE_POOL (3): + Start cordoning blue pool nodes. + DRAINING_BLUE_POOL (4): + Start draining blue pool nodes. + NODE_POOL_SOAKING (5): + Start soaking time after draining entire blue + pool. + DELETING_BLUE_POOL (6): + Start deleting blue nodes. + ROLLBACK_STARTED (7): + Rollback has been initiated. + """ + PHASE_UNSPECIFIED = 0 + UPDATE_STARTED = 1 + CREATING_GREEN_POOL = 2 + CORDONING_BLUE_POOL = 3 + DRAINING_BLUE_POOL = 4 + NODE_POOL_SOAKING = 5 + DELETING_BLUE_POOL = 6 + ROLLBACK_STARTED = 7 + + phase: "NodePool.UpdateInfo.BlueGreenInfo.Phase" = proto.Field( + proto.ENUM, + number=1, + enum="NodePool.UpdateInfo.BlueGreenInfo.Phase", + ) + blue_instance_group_urls: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + green_instance_group_urls: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + blue_pool_deletion_start_time: str = proto.Field( + proto.STRING, + number=4, + ) + green_pool_version: str = proto.Field( + proto.STRING, + number=5, + ) + + blue_green_info: "NodePool.UpdateInfo.BlueGreenInfo" = proto.Field( + proto.MESSAGE, + number=1, + message="NodePool.UpdateInfo.BlueGreenInfo", + ) + + class PlacementPolicy(proto.Message): + r"""PlacementPolicy defines the placement policy used by the node + pool. + + Attributes: + type_ (google.cloud.container_v1beta1.types.NodePool.PlacementPolicy.Type): + The type of placement. + tpu_topology (str): + TPU placement topology for pod slice node pool. + https://cloud.google.com/tpu/docs/types-topologies#tpu_topologies + policy_name (str): + If set, refers to the name of a custom + resource policy supplied by the user. The + resource policy must be in the same project and + region as the node pool. If not found, + InvalidArgument error is returned. + """ + + class Type(proto.Enum): + r"""Type defines the type of placement policy. + + Values: + TYPE_UNSPECIFIED (0): + TYPE_UNSPECIFIED specifies no requirements on nodes + placement. + COMPACT (1): + COMPACT specifies node placement in the same + availability domain to ensure low communication + latency. + """ + TYPE_UNSPECIFIED = 0 + COMPACT = 1 + + type_: "NodePool.PlacementPolicy.Type" = proto.Field( + proto.ENUM, + number=1, + enum="NodePool.PlacementPolicy.Type", + ) + tpu_topology: str = proto.Field( + proto.STRING, + number=2, + ) + policy_name: str = proto.Field( + proto.STRING, + number=3, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + config: "NodeConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="NodeConfig", + ) + initial_node_count: int = proto.Field( + proto.INT32, + number=3, + ) + locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + network_config: "NodeNetworkConfig" = proto.Field( + proto.MESSAGE, + number=14, + message="NodeNetworkConfig", + ) + self_link: str = proto.Field( + proto.STRING, + number=100, + ) + version: str = proto.Field( + proto.STRING, + number=101, + ) + instance_group_urls: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=102, + ) + status: Status = proto.Field( + proto.ENUM, + number=103, + enum=Status, + ) + status_message: str = proto.Field( + proto.STRING, + number=104, + ) + autoscaling: "NodePoolAutoscaling" = proto.Field( + proto.MESSAGE, + number=4, + message="NodePoolAutoscaling", + ) + management: "NodeManagement" = proto.Field( + proto.MESSAGE, + number=5, + message="NodeManagement", + ) + max_pods_constraint: "MaxPodsConstraint" = proto.Field( + proto.MESSAGE, + number=6, + message="MaxPodsConstraint", + ) + conditions: MutableSequence["StatusCondition"] = proto.RepeatedField( + proto.MESSAGE, + number=105, + message="StatusCondition", + ) + pod_ipv4_cidr_size: int = proto.Field( + proto.INT32, + number=7, + ) + upgrade_settings: UpgradeSettings = proto.Field( + proto.MESSAGE, + number=107, + message=UpgradeSettings, + ) + placement_policy: PlacementPolicy = proto.Field( + proto.MESSAGE, + number=108, + message=PlacementPolicy, + ) + update_info: UpdateInfo = proto.Field( + proto.MESSAGE, + number=109, + message=UpdateInfo, + ) + etag: str = proto.Field( + proto.STRING, + number=110, + ) + best_effort_provisioning: "BestEffortProvisioning" = proto.Field( + proto.MESSAGE, + number=113, + message="BestEffortProvisioning", + ) + + +class NodeManagement(proto.Message): + r"""NodeManagement defines the set of node management services + turned on for the node pool. + + Attributes: + auto_upgrade (bool): + Whether the nodes will be automatically + upgraded. + auto_repair (bool): + Whether the nodes will be automatically + repaired. + upgrade_options (google.cloud.container_v1beta1.types.AutoUpgradeOptions): + Specifies the Auto Upgrade knobs for the node + pool. + """ + + auto_upgrade: bool = proto.Field( + proto.BOOL, + number=1, + ) + auto_repair: bool = proto.Field( + proto.BOOL, + number=2, + ) + upgrade_options: "AutoUpgradeOptions" = proto.Field( + proto.MESSAGE, + number=10, + message="AutoUpgradeOptions", + ) + + +class AutoUpgradeOptions(proto.Message): + r"""AutoUpgradeOptions defines the set of options for the user to + control how the Auto Upgrades will proceed. + + Attributes: + auto_upgrade_start_time (str): + [Output only] This field is set when upgrades are about to + commence with the approximate start time for the upgrades, + in `RFC3339 `__ text + format. + description (str): + [Output only] This field is set when upgrades are about to + commence with the description of the upgrade. + """ + + auto_upgrade_start_time: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MaintenancePolicy(proto.Message): + r"""MaintenancePolicy defines the maintenance policy to be used + for the cluster. + + Attributes: + window (google.cloud.container_v1beta1.types.MaintenanceWindow): + Specifies the maintenance window in which + maintenance may be performed. + resource_version (str): + A hash identifying the version of this policy, so that + updates to fields of the policy won't accidentally undo + intermediate changes (and so that users of the API unaware + of some fields won't accidentally remove other fields). Make + a ``get()`` request to the cluster to get the current + resource version and include it with requests to set the + policy. + """ + + window: "MaintenanceWindow" = proto.Field( + proto.MESSAGE, + number=1, + message="MaintenanceWindow", + ) + resource_version: str = proto.Field( + proto.STRING, + number=3, + ) + + +class MaintenanceWindow(proto.Message): + r"""MaintenanceWindow defines the maintenance window to be used + for the cluster. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + daily_maintenance_window (google.cloud.container_v1beta1.types.DailyMaintenanceWindow): + DailyMaintenanceWindow specifies a daily + maintenance operation window. + + This field is a member of `oneof`_ ``policy``. + recurring_window (google.cloud.container_v1beta1.types.RecurringTimeWindow): + RecurringWindow specifies some number of + recurring time periods for maintenance to occur. + The time windows may be overlapping. If no + maintenance windows are set, maintenance can + occur at any time. + + This field is a member of `oneof`_ ``policy``. + maintenance_exclusions (MutableMapping[str, google.cloud.container_v1beta1.types.TimeWindow]): + Exceptions to maintenance window. + Non-emergency maintenance should not occur in + these windows. + """ + + daily_maintenance_window: "DailyMaintenanceWindow" = proto.Field( + proto.MESSAGE, + number=2, + oneof="policy", + message="DailyMaintenanceWindow", + ) + recurring_window: "RecurringTimeWindow" = proto.Field( + proto.MESSAGE, + number=3, + oneof="policy", + message="RecurringTimeWindow", + ) + maintenance_exclusions: MutableMapping[str, "TimeWindow"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=4, + message="TimeWindow", + ) + + +class TimeWindow(proto.Message): + r"""Represents an arbitrary window of time. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + maintenance_exclusion_options (google.cloud.container_v1beta1.types.MaintenanceExclusionOptions): + MaintenanceExclusionOptions provides + maintenance exclusion related options. + + This field is a member of `oneof`_ ``options``. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time that the window first starts. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time that the window ends. The end time + should take place after the start time. + """ + + maintenance_exclusion_options: "MaintenanceExclusionOptions" = proto.Field( + proto.MESSAGE, + number=3, + oneof="options", + message="MaintenanceExclusionOptions", + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class MaintenanceExclusionOptions(proto.Message): + r"""Represents the Maintenance exclusion option. + + Attributes: + scope (google.cloud.container_v1beta1.types.MaintenanceExclusionOptions.Scope): + Scope specifies the upgrade scope which + upgrades are blocked by the exclusion. + """ + + class Scope(proto.Enum): + r"""Scope of exclusion. + + Values: + NO_UPGRADES (0): + NO_UPGRADES excludes all upgrades, including patch upgrades + and minor upgrades across control planes and nodes. This is + the default exclusion behavior. + NO_MINOR_UPGRADES (1): + NO_MINOR_UPGRADES excludes all minor upgrades for the + cluster, only patches are allowed. + NO_MINOR_OR_NODE_UPGRADES (2): + NO_MINOR_OR_NODE_UPGRADES excludes all minor upgrades for + the cluster, and also exclude all node pool upgrades. Only + control plane patches are allowed. + """ + NO_UPGRADES = 0 + NO_MINOR_UPGRADES = 1 + NO_MINOR_OR_NODE_UPGRADES = 2 + + scope: Scope = proto.Field( + proto.ENUM, + number=1, + enum=Scope, + ) + + +class RecurringTimeWindow(proto.Message): + r"""Represents an arbitrary window of time that recurs. + + Attributes: + window (google.cloud.container_v1beta1.types.TimeWindow): + The window of the first recurrence. + recurrence (str): + An RRULE + (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for + how this window reccurs. They go on for the span of time + between the start and end time. + + For example, to have something repeat every weekday, you'd + use: ``FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR`` + + To repeat some window daily (equivalent to the + DailyMaintenanceWindow): ``FREQ=DAILY`` + + For the first weekend of every month: + ``FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU`` + + This specifies how frequently the window starts. Eg, if you + wanted to have a 9-5 UTC-4 window every weekday, you'd use + something like: + + :: + + start time = 2019-01-01T09:00:00-0400 + end time = 2019-01-01T17:00:00-0400 + recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR + + Windows can span multiple days. Eg, to make the window + encompass every weekend from midnight Saturday till the last + minute of Sunday UTC: + + :: + + start time = 2019-01-05T00:00:00Z + end time = 2019-01-07T23:59:00Z + recurrence = FREQ=WEEKLY;BYDAY=SA + + Note the start and end time's specific dates are largely + arbitrary except to specify duration of the window and when + it first starts. The FREQ values of HOURLY, MINUTELY, and + SECONDLY are not supported. + """ + + window: "TimeWindow" = proto.Field( + proto.MESSAGE, + number=1, + message="TimeWindow", + ) + recurrence: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DailyMaintenanceWindow(proto.Message): + r"""Time window specified for daily maintenance operations. + + Attributes: + start_time (str): + Time within the maintenance window to start the maintenance + operations. It must be in format "HH:MM", where HH : [00-23] + and MM : [00-59] GMT. + duration (str): + [Output only] Duration of the time window, automatically + chosen to be smallest possible in the given scenario. + """ + + start_time: str = proto.Field( + proto.STRING, + number=2, + ) + duration: str = proto.Field( + proto.STRING, + number=3, + ) + + +class SetNodePoolManagementRequest(proto.Message): + r"""SetNodePoolManagementRequest sets the node management + properties of a node pool. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to update. This field has been deprecated and + replaced by the name field. + node_pool_id (str): + Required. Deprecated. The name of the node + pool to update. This field has been deprecated + and replaced by the name field. + management (google.cloud.container_v1beta1.types.NodeManagement): + Required. NodeManagement configuration for + the node pool. + name (str): + The name (project, location, cluster, node pool id) of the + node pool to set management properties. Specified in the + format ``projects/*/locations/*/clusters/*/nodePools/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=4, + ) + management: "NodeManagement" = proto.Field( + proto.MESSAGE, + number=5, + message="NodeManagement", + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class SetNodePoolSizeRequest(proto.Message): + r"""SetNodePoolSizeRequest sets the size of a node pool. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to update. This field has been deprecated and + replaced by the name field. + node_pool_id (str): + Required. Deprecated. The name of the node + pool to update. This field has been deprecated + and replaced by the name field. + node_count (int): + Required. The desired node count for the + pool. + name (str): + The name (project, location, cluster, node pool id) of the + node pool to set size. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=4, + ) + node_count: int = proto.Field( + proto.INT32, + number=5, + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class CompleteNodePoolUpgradeRequest(proto.Message): + r"""CompleteNodePoolUpgradeRequest sets the name of target node + pool to complete upgrade. + + Attributes: + name (str): + The name (project, location, cluster, node pool id) of the + node pool to complete upgrade. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RollbackNodePoolUpgradeRequest(proto.Message): + r"""RollbackNodePoolUpgradeRequest rollbacks the previously + Aborted or Failed NodePool upgrade. This will be an no-op if the + last upgrade successfully completed. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to rollback. This field has been deprecated and + replaced by the name field. + node_pool_id (str): + Required. Deprecated. The name of the node + pool to rollback. This field has been deprecated + and replaced by the name field. + name (str): + The name (project, location, cluster, node pool id) of the + node poll to rollback upgrade. Specified in the format + ``projects/*/locations/*/clusters/*/nodePools/*``. + respect_pdb (bool): + Option for rollback to ignore the + PodDisruptionBudget. Default value is false. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + node_pool_id: str = proto.Field( + proto.STRING, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + respect_pdb: bool = proto.Field( + proto.BOOL, + number=7, + ) + + +class ListNodePoolsResponse(proto.Message): + r"""ListNodePoolsResponse is the result of ListNodePoolsRequest. + + Attributes: + node_pools (MutableSequence[google.cloud.container_v1beta1.types.NodePool]): + A list of node pools for a cluster. + """ + + node_pools: MutableSequence["NodePool"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="NodePool", + ) + + +class ClusterAutoscaling(proto.Message): + r"""ClusterAutoscaling contains global, per-cluster information + required by Cluster Autoscaler to automatically adjust the size + of the cluster and create/delete + node pools based on the current needs. + + Attributes: + enable_node_autoprovisioning (bool): + Enables automatic node pool creation and + deletion. + resource_limits (MutableSequence[google.cloud.container_v1beta1.types.ResourceLimit]): + Contains global constraints regarding minimum + and maximum amount of resources in the cluster. + autoscaling_profile (google.cloud.container_v1beta1.types.ClusterAutoscaling.AutoscalingProfile): + Defines autoscaling behaviour. + autoprovisioning_node_pool_defaults (google.cloud.container_v1beta1.types.AutoprovisioningNodePoolDefaults): + AutoprovisioningNodePoolDefaults contains + defaults for a node pool created by NAP. + autoprovisioning_locations (MutableSequence[str]): + The list of Google Compute Engine + `zones `__ + in which the NodePool's nodes can be created by NAP. + """ + + class AutoscalingProfile(proto.Enum): + r"""Defines possible options for autoscaling_profile field. + + Values: + PROFILE_UNSPECIFIED (0): + No change to autoscaling configuration. + OPTIMIZE_UTILIZATION (1): + Prioritize optimizing utilization of + resources. + BALANCED (2): + Use default (balanced) autoscaling + configuration. + """ + PROFILE_UNSPECIFIED = 0 + OPTIMIZE_UTILIZATION = 1 + BALANCED = 2 + + enable_node_autoprovisioning: bool = proto.Field( + proto.BOOL, + number=1, + ) + resource_limits: MutableSequence["ResourceLimit"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ResourceLimit", + ) + autoscaling_profile: AutoscalingProfile = proto.Field( + proto.ENUM, + number=3, + enum=AutoscalingProfile, + ) + autoprovisioning_node_pool_defaults: "AutoprovisioningNodePoolDefaults" = ( + proto.Field( + proto.MESSAGE, + number=4, + message="AutoprovisioningNodePoolDefaults", + ) + ) + autoprovisioning_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class AutoprovisioningNodePoolDefaults(proto.Message): + r"""AutoprovisioningNodePoolDefaults contains defaults for a node + pool created by NAP. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + oauth_scopes (MutableSequence[str]): + The set of Google API scopes to be made available on all of + the node VMs under the "default" service account. + + The following scopes are recommended, but not required, and + by default are not included: + + - ``https://www.googleapis.com/auth/compute`` is required + for mounting persistent storage on your nodes. + - ``https://www.googleapis.com/auth/devstorage.read_only`` + is required for communicating with **gcr.io** (the + `Google Container + Registry `__). + + If unspecified, no scopes are added, unless Cloud Logging or + Cloud Monitoring are enabled, in which case their required + scopes will be added. + service_account (str): + The Google Cloud Platform Service Account to + be used by the node VMs. Specify the email + address of the Service Account; otherwise, if no + Service Account is specified, the "default" + service account is used. + upgrade_settings (google.cloud.container_v1beta1.types.NodePool.UpgradeSettings): + Upgrade settings control disruption and speed + of the upgrade. + management (google.cloud.container_v1beta1.types.NodeManagement): + NodeManagement configuration for this + NodePool. + min_cpu_platform (str): + Deprecated. Minimum CPU platform to be used for NAP created + node pools. The instance may be scheduled on the specified + or newer CPU platform. Applicable values are the friendly + names of CPU platforms, such as minCpuPlatform: Intel + Haswell or minCpuPlatform: Intel Sandy Bridge. For more + information, read `how to specify min CPU + platform `__. + This field is deprecated, min_cpu_platform should be + specified using + ``cloud.google.com/requested-min-cpu-platform`` label + selector on the pod. To unset the min cpu platform field + pass "automatic" as field value. + disk_size_gb (int): + Size of the disk attached to each node, + specified in GB. The smallest allowed disk size + is 10GB. + + If unspecified, the default disk size is 100GB. + disk_type (str): + Type of the disk attached to each node (e.g. + 'pd-standard', 'pd-ssd' or 'pd-balanced') + + If unspecified, the default disk type is + 'pd-standard' + shielded_instance_config (google.cloud.container_v1beta1.types.ShieldedInstanceConfig): + Shielded Instance options. + boot_disk_kms_key (str): + The Customer Managed Encryption Key used to encrypt the boot + disk attached to each node in the node pool. This should be + of the form + projects/[KEY_PROJECT_ID]/locations/[LOCATION]/keyRings/[RING_NAME]/cryptoKeys/[KEY_NAME]. + For more information about protecting resources with Cloud + KMS Keys please see: + https://cloud.google.com/compute/docs/disks/customer-managed-encryption + image_type (str): + The image type to use for NAP created node. + Please see + https://cloud.google.com/kubernetes-engine/docs/concepts/node-images + for available image types. + insecure_kubelet_readonly_port_enabled (bool): + Enable or disable Kubelet read only port. + + This field is a member of `oneof`_ ``_insecure_kubelet_readonly_port_enabled``. + """ + + oauth_scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + service_account: str = proto.Field( + proto.STRING, + number=2, + ) + upgrade_settings: "NodePool.UpgradeSettings" = proto.Field( + proto.MESSAGE, + number=3, + message="NodePool.UpgradeSettings", + ) + management: "NodeManagement" = proto.Field( + proto.MESSAGE, + number=4, + message="NodeManagement", + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=5, + ) + disk_size_gb: int = proto.Field( + proto.INT32, + number=6, + ) + disk_type: str = proto.Field( + proto.STRING, + number=7, + ) + shielded_instance_config: "ShieldedInstanceConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="ShieldedInstanceConfig", + ) + boot_disk_kms_key: str = proto.Field( + proto.STRING, + number=9, + ) + image_type: str = proto.Field( + proto.STRING, + number=10, + ) + insecure_kubelet_readonly_port_enabled: bool = proto.Field( + proto.BOOL, + number=13, + optional=True, + ) + + +class ResourceLimit(proto.Message): + r"""Contains information about amount of some resource in the + cluster. For memory, value should be in GB. + + Attributes: + resource_type (str): + Resource name "cpu", "memory" or gpu-specific + string. + minimum (int): + Minimum amount of the resource in the + cluster. + maximum (int): + Maximum amount of the resource in the + cluster. + """ + + resource_type: str = proto.Field( + proto.STRING, + number=1, + ) + minimum: int = proto.Field( + proto.INT64, + number=2, + ) + maximum: int = proto.Field( + proto.INT64, + number=3, + ) + + +class NodePoolAutoscaling(proto.Message): + r"""NodePoolAutoscaling contains information required by cluster + autoscaler to adjust the size of the node pool to the current + cluster usage. + + Attributes: + enabled (bool): + Is autoscaling enabled for this node pool. + min_node_count (int): + Minimum number of nodes for one location in the NodePool. + Must be >= 1 and <= max_node_count. + max_node_count (int): + Maximum number of nodes for one location in the NodePool. + Must be >= min_node_count. There has to be enough quota to + scale up the cluster. + autoprovisioned (bool): + Can this node pool be deleted automatically. + location_policy (google.cloud.container_v1beta1.types.NodePoolAutoscaling.LocationPolicy): + Location policy used when scaling up a + nodepool. + total_min_node_count (int): + Minimum number of nodes in the node pool. Must be greater + than 1 less than total_max_node_count. The + total_*_node_count fields are mutually exclusive with the + \*_node_count fields. + total_max_node_count (int): + Maximum number of nodes in the node pool. Must be greater + than total_min_node_count. There has to be enough quota to + scale up the cluster. The total_*_node_count fields are + mutually exclusive with the \*_node_count fields. + """ + + class LocationPolicy(proto.Enum): + r"""Location policy specifies how zones are picked when scaling + up the nodepool. + + Values: + LOCATION_POLICY_UNSPECIFIED (0): + Not set. + BALANCED (1): + BALANCED is a best effort policy that aims to + balance the sizes of different zones. + ANY (2): + ANY policy picks zones that have the highest + capacity available. + """ + LOCATION_POLICY_UNSPECIFIED = 0 + BALANCED = 1 + ANY = 2 + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + min_node_count: int = proto.Field( + proto.INT32, + number=2, + ) + max_node_count: int = proto.Field( + proto.INT32, + number=3, + ) + autoprovisioned: bool = proto.Field( + proto.BOOL, + number=4, + ) + location_policy: LocationPolicy = proto.Field( + proto.ENUM, + number=5, + enum=LocationPolicy, + ) + total_min_node_count: int = proto.Field( + proto.INT32, + number=6, + ) + total_max_node_count: int = proto.Field( + proto.INT32, + number=7, + ) + + +class SetLabelsRequest(proto.Message): + r"""SetLabelsRequest sets the Google Cloud Platform labels on a + Google Container Engine cluster, which will in turn set them for + Google Compute Engine resources used by that cluster + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated and + replaced by the name field. + resource_labels (MutableMapping[str, str]): + Required. The labels to set for that cluster. + label_fingerprint (str): + Required. The fingerprint of the previous set of labels for + this resource, used to detect conflicts. The fingerprint is + initially generated by Kubernetes Engine and changes after + every request to modify or update labels. You must always + provide an up-to-date fingerprint hash when updating or + changing labels. Make a ``get()`` request to the resource to + get the latest fingerprint. + name (str): + The name (project, location, cluster name) of the cluster to + set labels. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + resource_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + label_fingerprint: str = proto.Field( + proto.STRING, + number=5, + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class SetLegacyAbacRequest(proto.Message): + r"""SetLegacyAbacRequest enables or disables the ABAC + authorization mechanism for a cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the cluster + to update. This field has been deprecated and + replaced by the name field. + enabled (bool): + Required. Whether ABAC authorization will be + enabled in the cluster. + name (str): + The name (project, location, cluster name) of the cluster to + set legacy abac. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + enabled: bool = proto.Field( + proto.BOOL, + number=4, + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class StartIPRotationRequest(proto.Message): + r"""StartIPRotationRequest creates a new IP for the cluster and + then performs a node upgrade on each node pool to point to the + new IP. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated and + replaced by the name field. + name (str): + The name (project, location, cluster name) of the cluster to + start IP rotation. Specified in the format + ``projects/*/locations/*/clusters/*``. + rotate_credentials (bool): + Whether to rotate credentials during IP + rotation. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + rotate_credentials: bool = proto.Field( + proto.BOOL, + number=7, + ) + + +class CompleteIPRotationRequest(proto.Message): + r"""CompleteIPRotationRequest moves the cluster master back into + single-IP mode. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated and + replaced by the name field. + name (str): + The name (project, location, cluster name) of the cluster to + complete IP rotation. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class AcceleratorConfig(proto.Message): + r"""AcceleratorConfig represents a Hardware Accelerator request. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + accelerator_count (int): + The number of the accelerator cards exposed + to an instance. + accelerator_type (str): + The accelerator type resource name. List of supported + accelerators + `here `__ + gpu_partition_size (str): + Size of partitions to create on the GPU. Valid values are + described in the NVIDIA `mig user + guide `__. + max_time_shared_clients_per_gpu (int): + The number of time-shared GPU resources to + expose for each physical GPU. + gpu_sharing_config (google.cloud.container_v1beta1.types.GPUSharingConfig): + The configuration for GPU sharing options. + + This field is a member of `oneof`_ ``_gpu_sharing_config``. + gpu_driver_installation_config (google.cloud.container_v1beta1.types.GPUDriverInstallationConfig): + The configuration for auto installation of + GPU driver. + + This field is a member of `oneof`_ ``_gpu_driver_installation_config``. + """ + + accelerator_count: int = proto.Field( + proto.INT64, + number=1, + ) + accelerator_type: str = proto.Field( + proto.STRING, + number=2, + ) + gpu_partition_size: str = proto.Field( + proto.STRING, + number=3, + ) + max_time_shared_clients_per_gpu: int = proto.Field( + proto.INT64, + number=4, + ) + gpu_sharing_config: "GPUSharingConfig" = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message="GPUSharingConfig", + ) + gpu_driver_installation_config: "GPUDriverInstallationConfig" = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message="GPUDriverInstallationConfig", + ) + + +class GPUSharingConfig(proto.Message): + r"""GPUSharingConfig represents the GPU sharing configuration for + Hardware Accelerators. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + max_shared_clients_per_gpu (int): + The max number of containers that can share a + physical GPU. + gpu_sharing_strategy (google.cloud.container_v1beta1.types.GPUSharingConfig.GPUSharingStrategy): + The type of GPU sharing strategy to enable on + the GPU node. + + This field is a member of `oneof`_ ``_gpu_sharing_strategy``. + """ + + class GPUSharingStrategy(proto.Enum): + r"""The type of GPU sharing strategy currently provided. + + Values: + GPU_SHARING_STRATEGY_UNSPECIFIED (0): + Default value. + TIME_SHARING (1): + GPUs are time-shared between containers. + """ + GPU_SHARING_STRATEGY_UNSPECIFIED = 0 + TIME_SHARING = 1 + + max_shared_clients_per_gpu: int = proto.Field( + proto.INT64, + number=1, + ) + gpu_sharing_strategy: GPUSharingStrategy = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum=GPUSharingStrategy, + ) + + +class GPUDriverInstallationConfig(proto.Message): + r"""GPUDriverInstallationConfig specifies the version of GPU + driver to be auto installed. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gpu_driver_version (google.cloud.container_v1beta1.types.GPUDriverInstallationConfig.GPUDriverVersion): + Mode for how the GPU driver is installed. + + This field is a member of `oneof`_ ``_gpu_driver_version``. + """ + + class GPUDriverVersion(proto.Enum): + r"""The GPU driver version to install. + + Values: + GPU_DRIVER_VERSION_UNSPECIFIED (0): + Default value is to not install any GPU + driver. + INSTALLATION_DISABLED (1): + Disable GPU driver auto installation and + needs manual installation + DEFAULT (2): + "Default" GPU driver in COS and Ubuntu. + LATEST (3): + "Latest" GPU driver in COS. + """ + GPU_DRIVER_VERSION_UNSPECIFIED = 0 + INSTALLATION_DISABLED = 1 + DEFAULT = 2 + LATEST = 3 + + gpu_driver_version: GPUDriverVersion = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=GPUDriverVersion, + ) + + +class ManagedPrometheusConfig(proto.Message): + r"""ManagedPrometheusConfig defines the configuration for + Google Cloud Managed Service for Prometheus. + + Attributes: + enabled (bool): + Enable Managed Collection. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class WorkloadMetadataConfig(proto.Message): + r"""WorkloadMetadataConfig defines the metadata configuration to + expose to workloads on the node pool. + + Attributes: + node_metadata (google.cloud.container_v1beta1.types.WorkloadMetadataConfig.NodeMetadata): + NodeMetadata is the configuration for how to + expose metadata to the workloads running on the + node. + mode (google.cloud.container_v1beta1.types.WorkloadMetadataConfig.Mode): + Mode is the configuration for how to expose + metadata to workloads running on the node pool. + """ + + class NodeMetadata(proto.Enum): + r"""NodeMetadata is the configuration for if and how to expose + the node metadata to the workload running on the node. + + Values: + UNSPECIFIED (0): + Not set. + SECURE (1): + Prevent workloads not in hostNetwork from + accessing certain VM metadata, specifically + kube-env, which contains Kubelet credentials, + and the instance identity token. + + Metadata concealment is a temporary security + solution available while the bootstrapping + process for cluster nodes is being redesigned + with significant security improvements. This + feature is scheduled to be deprecated in the + future and later removed. + EXPOSE (2): + Expose all VM metadata to pods. + GKE_METADATA_SERVER (3): + Run the GKE Metadata Server on this node. The + GKE Metadata Server exposes a metadata API to + workloads that is compatible with the V1 Compute + Metadata APIs exposed by the Compute Engine and + App Engine Metadata Servers. This feature can + only be enabled if Workload Identity is enabled + at the cluster level. + """ + UNSPECIFIED = 0 + SECURE = 1 + EXPOSE = 2 + GKE_METADATA_SERVER = 3 + + class Mode(proto.Enum): + r"""Mode is the configuration for how to expose metadata to + workloads running on the node. + + Values: + MODE_UNSPECIFIED (0): + Not set. + GCE_METADATA (1): + Expose all Compute Engine metadata to pods. + GKE_METADATA (2): + Run the GKE Metadata Server on this node. The + GKE Metadata Server exposes a metadata API to + workloads that is compatible with the V1 Compute + Metadata APIs exposed by the Compute Engine and + App Engine Metadata Servers. This feature can + only be enabled if Workload Identity is enabled + at the cluster level. + """ + MODE_UNSPECIFIED = 0 + GCE_METADATA = 1 + GKE_METADATA = 2 + + node_metadata: NodeMetadata = proto.Field( + proto.ENUM, + number=1, + enum=NodeMetadata, + ) + mode: Mode = proto.Field( + proto.ENUM, + number=2, + enum=Mode, + ) + + +class SetNetworkPolicyRequest(proto.Message): + r"""SetNetworkPolicyRequest enables/disables network policy for a + cluster. + + Attributes: + project_id (str): + Required. Deprecated. The Google Developers Console `project + ID or project + number `__. + This field has been deprecated and replaced by the name + field. + zone (str): + Required. Deprecated. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. This field has been deprecated + and replaced by the name field. + cluster_id (str): + Required. Deprecated. The name of the + cluster. This field has been deprecated and + replaced by the name field. + network_policy (google.cloud.container_v1beta1.types.NetworkPolicy): + Required. Configuration options for the + NetworkPolicy feature. + name (str): + The name (project, location, cluster name) of the cluster to + set networking policy. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + network_policy: "NetworkPolicy" = proto.Field( + proto.MESSAGE, + number=4, + message="NetworkPolicy", + ) + name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class SetMaintenancePolicyRequest(proto.Message): + r"""SetMaintenancePolicyRequest sets the maintenance policy for a + cluster. + + Attributes: + project_id (str): + Required. The Google Developers Console `project ID or + project + number `__. + zone (str): + Required. The name of the Google Compute Engine + `zone `__ + in which the cluster resides. + cluster_id (str): + Required. The name of the cluster to update. + maintenance_policy (google.cloud.container_v1beta1.types.MaintenancePolicy): + Required. The maintenance policy to be set + for the cluster. An empty field clears the + existing maintenance policy. + name (str): + The name (project, location, cluster name) of the cluster to + set maintenance policy. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + maintenance_policy: "MaintenancePolicy" = proto.Field( + proto.MESSAGE, + number=4, + message="MaintenancePolicy", + ) + name: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListLocationsRequest(proto.Message): + r"""ListLocationsRequest is used to request the locations that + offer GKE. + + Attributes: + parent (str): + Required. Contains the name of the resource requested. + Specified in the format ``projects/*``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListLocationsResponse(proto.Message): + r"""ListLocationsResponse returns the list of all GKE locations + and their recommendation state. + + Attributes: + locations (MutableSequence[google.cloud.container_v1beta1.types.Location]): + A full list of GKE locations. + next_page_token (str): + Only return ListLocationsResponse that occur after the + page_token. This value should be populated from the + ListLocationsResponse.next_page_token if that response token + was set (which happens when listing more Locations than fit + in a single ListLocationsResponse). + """ + + @property + def raw_page(self): + return self + + locations: MutableSequence["Location"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Location", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Location(proto.Message): + r"""Location returns the location name, and if the location is + recommended for GKE cluster scheduling. + + Attributes: + type_ (google.cloud.container_v1beta1.types.Location.LocationType): + Contains the type of location this Location + is for. Regional or Zonal. + name (str): + Contains the name of the resource requested. Specified in + the format ``projects/*/locations/*``. + recommended (bool): + Whether the location is recommended for GKE + cluster scheduling. + """ + + class LocationType(proto.Enum): + r"""LocationType is the type of GKE location, regional or zonal. + + Values: + LOCATION_TYPE_UNSPECIFIED (0): + LOCATION_TYPE_UNSPECIFIED means the location type was not + determined. + ZONE (1): + A GKE Location where Zonal clusters can be + created. + REGION (2): + A GKE Location where Regional clusters can be + created. + """ + LOCATION_TYPE_UNSPECIFIED = 0 + ZONE = 1 + REGION = 2 + + type_: LocationType = proto.Field( + proto.ENUM, + number=1, + enum=LocationType, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + recommended: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class StatusCondition(proto.Message): + r"""StatusCondition describes why a cluster or a node pool has a + certain status (e.g., ERROR or DEGRADED). + + Attributes: + code (google.cloud.container_v1beta1.types.StatusCondition.Code): + Machine-friendly representation of the condition Deprecated. + Use canonical_code instead. + message (str): + Human-friendly representation of the + condition + canonical_code (google.rpc.code_pb2.Code): + Canonical code of the condition. + """ + + class Code(proto.Enum): + r"""Code for each condition + + Values: + UNKNOWN (0): + UNKNOWN indicates a generic condition. + GCE_STOCKOUT (1): + GCE_STOCKOUT indicates that Google Compute Engine resources + are temporarily unavailable. + GKE_SERVICE_ACCOUNT_DELETED (2): + GKE_SERVICE_ACCOUNT_DELETED indicates that the user deleted + their robot service account. + GCE_QUOTA_EXCEEDED (3): + Google Compute Engine quota was exceeded. + SET_BY_OPERATOR (4): + Cluster state was manually changed by an SRE + due to a system logic error. + CLOUD_KMS_KEY_ERROR (7): + Unable to perform an encrypt operation + against the CloudKMS key used for etcd level + encryption. + CA_EXPIRING (9): + Cluster CA is expiring soon. + More codes TBA + """ + _pb_options = {"deprecated": True} + UNKNOWN = 0 + GCE_STOCKOUT = 1 + GKE_SERVICE_ACCOUNT_DELETED = 2 + GCE_QUOTA_EXCEEDED = 3 + SET_BY_OPERATOR = 4 + CLOUD_KMS_KEY_ERROR = 7 + CA_EXPIRING = 9 + + code: Code = proto.Field( + proto.ENUM, + number=1, + enum=Code, + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) + canonical_code: code_pb2.Code = proto.Field( + proto.ENUM, + number=3, + enum=code_pb2.Code, + ) + + +class NetworkConfig(proto.Message): + r"""NetworkConfig reports the relative names of network & + subnetwork. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Output only. The relative name of the Google Compute Engine + [network]`google.container.v1beta1.NetworkConfig.network `__ + to which the cluster is connected. Example: + projects/my-project/global/networks/my-network + subnetwork (str): + Output only. The relative name of the Google Compute Engine + `subnetwork `__ + to which the cluster is connected. Example: + projects/my-project/regions/us-central1/subnetworks/my-subnet + enable_intra_node_visibility (bool): + Whether Intra-node visibility is enabled for + this cluster. This makes same node pod to pod + traffic visible for VPC network. + default_snat_status (google.cloud.container_v1beta1.types.DefaultSnatStatus): + Whether the cluster disables default in-node sNAT rules. + In-node sNAT rules will be disabled when default_snat_status + is disabled. When disabled is set to false, default IP + masquerade rules will be applied to the nodes to prevent + sNAT on cluster internal traffic. + enable_l4ilb_subsetting (bool): + Whether L4ILB Subsetting is enabled for this + cluster. + datapath_provider (google.cloud.container_v1beta1.types.DatapathProvider): + The desired datapath provider for this + cluster. By default, uses the IPTables-based + kube-proxy implementation. + private_ipv6_google_access (google.cloud.container_v1beta1.types.PrivateIPv6GoogleAccess): + The desired state of IPv6 connectivity to + Google Services. By default, no private IPv6 + access to or from Google Services (all access + will be via IPv4) + dns_config (google.cloud.container_v1beta1.types.DNSConfig): + DNSConfig contains clusterDNS config for this + cluster. + service_external_ips_config (google.cloud.container_v1beta1.types.ServiceExternalIPsConfig): + ServiceExternalIPsConfig specifies if + services with externalIPs field are blocked or + not. + gateway_api_config (google.cloud.container_v1beta1.types.GatewayAPIConfig): + GatewayAPIConfig contains the desired config + of Gateway API on this cluster. + enable_multi_networking (bool): + Whether multi-networking is enabled for this + cluster. + network_performance_config (google.cloud.container_v1beta1.types.NetworkConfig.ClusterNetworkPerformanceConfig): + Network bandwidth tier configuration. + enable_fqdn_network_policy (bool): + Whether FQDN Network Policy is enabled on + this cluster. + + This field is a member of `oneof`_ ``_enable_fqdn_network_policy``. + """ + + class ClusterNetworkPerformanceConfig(proto.Message): + r"""Configuration of all network bandwidth tiers + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + total_egress_bandwidth_tier (google.cloud.container_v1beta1.types.NetworkConfig.ClusterNetworkPerformanceConfig.Tier): + Specifies the total network bandwidth tier + for the NodePool. + + This field is a member of `oneof`_ ``_total_egress_bandwidth_tier``. + """ + + class Tier(proto.Enum): + r"""Node network tier + + Values: + TIER_UNSPECIFIED (0): + Default value + TIER_1 (1): + Higher bandwidth, actual values based on VM + size. + """ + TIER_UNSPECIFIED = 0 + TIER_1 = 1 + + total_egress_bandwidth_tier: "NetworkConfig.ClusterNetworkPerformanceConfig.Tier" = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum="NetworkConfig.ClusterNetworkPerformanceConfig.Tier", + ) + + network: str = proto.Field( + proto.STRING, + number=1, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=2, + ) + enable_intra_node_visibility: bool = proto.Field( + proto.BOOL, + number=5, + ) + default_snat_status: "DefaultSnatStatus" = proto.Field( + proto.MESSAGE, + number=7, + message="DefaultSnatStatus", + ) + enable_l4ilb_subsetting: bool = proto.Field( + proto.BOOL, + number=10, + ) + datapath_provider: "DatapathProvider" = proto.Field( + proto.ENUM, + number=11, + enum="DatapathProvider", + ) + private_ipv6_google_access: "PrivateIPv6GoogleAccess" = proto.Field( + proto.ENUM, + number=12, + enum="PrivateIPv6GoogleAccess", + ) + dns_config: "DNSConfig" = proto.Field( + proto.MESSAGE, + number=13, + message="DNSConfig", + ) + service_external_ips_config: "ServiceExternalIPsConfig" = proto.Field( + proto.MESSAGE, + number=15, + message="ServiceExternalIPsConfig", + ) + gateway_api_config: "GatewayAPIConfig" = proto.Field( + proto.MESSAGE, + number=16, + message="GatewayAPIConfig", + ) + enable_multi_networking: bool = proto.Field( + proto.BOOL, + number=17, + ) + network_performance_config: ClusterNetworkPerformanceConfig = proto.Field( + proto.MESSAGE, + number=18, + message=ClusterNetworkPerformanceConfig, + ) + enable_fqdn_network_policy: bool = proto.Field( + proto.BOOL, + number=19, + optional=True, + ) + + +class GatewayAPIConfig(proto.Message): + r"""GatewayAPIConfig contains the desired config of Gateway API + on this cluster. + + Attributes: + channel (google.cloud.container_v1beta1.types.GatewayAPIConfig.Channel): + The Gateway API release channel to use for + Gateway API. + """ + + class Channel(proto.Enum): + r"""Channel describes if/how Gateway API should be installed and + implemented in a cluster. + + Values: + CHANNEL_UNSPECIFIED (0): + Default value. + CHANNEL_DISABLED (1): + Gateway API support is disabled + CHANNEL_EXPERIMENTAL (3): + Gateway API support is enabled, experimental + CRDs are installed + CHANNEL_STANDARD (4): + Gateway API support is enabled, standard CRDs + are installed + """ + CHANNEL_UNSPECIFIED = 0 + CHANNEL_DISABLED = 1 + CHANNEL_EXPERIMENTAL = 3 + CHANNEL_STANDARD = 4 + + channel: Channel = proto.Field( + proto.ENUM, + number=1, + enum=Channel, + ) + + +class ServiceExternalIPsConfig(proto.Message): + r"""Config to block services with externalIPs field. + + Attributes: + enabled (bool): + Whether Services with ExternalIPs field are + allowed or not. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ListUsableSubnetworksRequest(proto.Message): + r"""ListUsableSubnetworksRequest requests the list of usable + subnetworks. available to a user for creating clusters. + + Attributes: + parent (str): + Required. The parent project where subnetworks are usable. + Specified in the format ``projects/*``. + filter (str): + Filtering currently only supports equality on the + networkProjectId and must be in the form: + "networkProjectId=[PROJECTID]", where ``networkProjectId`` + is the project which owns the listed subnetworks. This + defaults to the parent project ID. + page_size (int): + The max number of results per page that should be returned. + If the number of available results is larger than + ``page_size``, a ``next_page_token`` is returned which can + be used to get the next page of results in subsequent + requests. Acceptable values are 0 to 500, inclusive. + (Default: 500) + page_token (str): + Specifies a page token to use. Set this to + the nextPageToken returned by previous list + requests to get the next page of results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListUsableSubnetworksResponse(proto.Message): + r"""ListUsableSubnetworksResponse is the response of + ListUsableSubnetworksRequest. + + Attributes: + subnetworks (MutableSequence[google.cloud.container_v1beta1.types.UsableSubnetwork]): + A list of usable subnetworks in the specified + network project. + next_page_token (str): + This token allows you to get the next page of results for + list requests. If the number of results is larger than + ``page_size``, use the ``next_page_token`` as a value for + the query parameter ``page_token`` in the next request. The + value will become empty when there are no more pages. + """ + + @property + def raw_page(self): + return self + + subnetworks: MutableSequence["UsableSubnetwork"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="UsableSubnetwork", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UsableSubnetworkSecondaryRange(proto.Message): + r"""Secondary IP range of a usable subnetwork. + + Attributes: + range_name (str): + The name associated with this subnetwork + secondary range, used when adding an alias IP + range to a VM instance. + ip_cidr_range (str): + The range of IP addresses belonging to this + subnetwork secondary range. + status (google.cloud.container_v1beta1.types.UsableSubnetworkSecondaryRange.Status): + This field is to determine the status of the + secondary range programmably. + """ + + class Status(proto.Enum): + r"""Status shows the current usage of a secondary IP range. + + Values: + UNKNOWN (0): + UNKNOWN is the zero value of the Status enum. + It's not a valid status. + UNUSED (1): + UNUSED denotes that this range is unclaimed + by any cluster. + IN_USE_SERVICE (2): + IN_USE_SERVICE denotes that this range is claimed by a + cluster for services. It cannot be used for other clusters. + IN_USE_SHAREABLE_POD (3): + IN_USE_SHAREABLE_POD denotes this range was created by the + network admin and is currently claimed by a cluster for + pods. It can only be used by other clusters as a pod range. + IN_USE_MANAGED_POD (4): + IN_USE_MANAGED_POD denotes this range was created by GKE and + is claimed for pods. It cannot be used for other clusters. + """ + UNKNOWN = 0 + UNUSED = 1 + IN_USE_SERVICE = 2 + IN_USE_SHAREABLE_POD = 3 + IN_USE_MANAGED_POD = 4 + + range_name: str = proto.Field( + proto.STRING, + number=1, + ) + ip_cidr_range: str = proto.Field( + proto.STRING, + number=2, + ) + status: Status = proto.Field( + proto.ENUM, + number=3, + enum=Status, + ) + + +class UsableSubnetwork(proto.Message): + r"""UsableSubnetwork resource returns the subnetwork name, its + associated network and the primary CIDR range. + + Attributes: + subnetwork (str): + Subnetwork Name. + Example: + projects/my-project/regions/us-central1/subnetworks/my-subnet + network (str): + Network Name. + Example: + projects/my-project/global/networks/my-network + ip_cidr_range (str): + The range of internal addresses that are + owned by this subnetwork. + secondary_ip_ranges (MutableSequence[google.cloud.container_v1beta1.types.UsableSubnetworkSecondaryRange]): + Secondary IP ranges. + status_message (str): + A human readable status message representing the reasons for + cases where the caller cannot use the secondary ranges under + the subnet. For example if the secondary_ip_ranges is empty + due to a permission issue, an insufficient permission + message will be given by status_message. + """ + + subnetwork: str = proto.Field( + proto.STRING, + number=1, + ) + network: str = proto.Field( + proto.STRING, + number=2, + ) + ip_cidr_range: str = proto.Field( + proto.STRING, + number=3, + ) + secondary_ip_ranges: MutableSequence[ + "UsableSubnetworkSecondaryRange" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="UsableSubnetworkSecondaryRange", + ) + status_message: str = proto.Field( + proto.STRING, + number=5, + ) + + +class VerticalPodAutoscaling(proto.Message): + r"""VerticalPodAutoscaling contains global, per-cluster + information required by Vertical Pod Autoscaler to automatically + adjust the resources of pods controlled by it. + + Attributes: + enabled (bool): + Enables vertical pod autoscaling. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class DefaultSnatStatus(proto.Message): + r"""DefaultSnatStatus contains the desired state of whether + default sNAT should be disabled on the cluster. + + Attributes: + disabled (bool): + Disables cluster default sNAT rules. + """ + + disabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class IntraNodeVisibilityConfig(proto.Message): + r"""IntraNodeVisibilityConfig contains the desired config of the + intra-node visibility on this cluster. + + Attributes: + enabled (bool): + Enables intra node visibility for this + cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class ILBSubsettingConfig(proto.Message): + r"""ILBSubsettingConfig contains the desired config of L4 + Internal LoadBalancer subsetting on this cluster. + + Attributes: + enabled (bool): + Enables l4 ILB subsetting for this cluster + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class DNSConfig(proto.Message): + r"""DNSConfig contains the desired set of options for configuring + clusterDNS. + + Attributes: + cluster_dns (google.cloud.container_v1beta1.types.DNSConfig.Provider): + cluster_dns indicates which in-cluster DNS provider should + be used. + cluster_dns_scope (google.cloud.container_v1beta1.types.DNSConfig.DNSScope): + cluster_dns_scope indicates the scope of access to cluster + DNS records. + cluster_dns_domain (str): + cluster_dns_domain is the suffix used for all cluster + service records. + """ + + class Provider(proto.Enum): + r"""Provider lists the various in-cluster DNS providers. + + Values: + PROVIDER_UNSPECIFIED (0): + Default value + PLATFORM_DEFAULT (1): + Use GKE default DNS provider(kube-dns) for + DNS resolution. + CLOUD_DNS (2): + Use CloudDNS for DNS resolution. + KUBE_DNS (3): + Use KubeDNS for DNS resolution. + """ + PROVIDER_UNSPECIFIED = 0 + PLATFORM_DEFAULT = 1 + CLOUD_DNS = 2 + KUBE_DNS = 3 + + class DNSScope(proto.Enum): + r"""DNSScope lists the various scopes of access to cluster DNS + records. + + Values: + DNS_SCOPE_UNSPECIFIED (0): + Default value, will be inferred as cluster + scope. + CLUSTER_SCOPE (1): + DNS records are accessible from within the + cluster. + VPC_SCOPE (2): + DNS records are accessible from within the + VPC. + """ + DNS_SCOPE_UNSPECIFIED = 0 + CLUSTER_SCOPE = 1 + VPC_SCOPE = 2 + + cluster_dns: Provider = proto.Field( + proto.ENUM, + number=1, + enum=Provider, + ) + cluster_dns_scope: DNSScope = proto.Field( + proto.ENUM, + number=2, + enum=DNSScope, + ) + cluster_dns_domain: str = proto.Field( + proto.STRING, + number=3, + ) + + +class MaxPodsConstraint(proto.Message): + r"""Constraints applied to pods. + + Attributes: + max_pods_per_node (int): + Constraint enforced on the max num of pods + per node. + """ + + max_pods_per_node: int = proto.Field( + proto.INT64, + number=1, + ) + + +class WorkloadIdentityConfig(proto.Message): + r"""Configuration for the use of Kubernetes Service Accounts in + GCP IAM policies. + + Attributes: + identity_namespace (str): + IAM Identity Namespace to attach all + Kubernetes Service Accounts to. + workload_pool (str): + The workload pool to attach all Kubernetes + service accounts to. + identity_provider (str): + identity provider is the third party identity + provider. + """ + + identity_namespace: str = proto.Field( + proto.STRING, + number=1, + ) + workload_pool: str = proto.Field( + proto.STRING, + number=2, + ) + identity_provider: str = proto.Field( + proto.STRING, + number=3, + ) + + +class WorkloadALTSConfig(proto.Message): + r"""Configuration for direct-path (via ALTS) with workload + identity. + + Attributes: + enable_alts (google.protobuf.wrappers_pb2.BoolValue): + enable_alts controls whether the alts handshaker should be + enabled or not for direct-path. + + Requires Workload Identity + ([workload_pool][google.container.v1beta1.WorkloadIdentityConfig.workload_pool] + must be non-empty). + """ + + enable_alts: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.BoolValue, + ) + + +class WorkloadCertificates(proto.Message): + r"""Configuration for issuance of mTLS keys and certificates to + Kubernetes pods. + + Attributes: + enable_certificates (google.protobuf.wrappers_pb2.BoolValue): + enable_certificates controls issuance of workload mTLS + certificates. + + If set, the GKE Workload Identity Certificates controller + and node agent will be deployed in the cluster, which can + then be configured by creating a WorkloadCertificateConfig + Custom Resource. + + Requires Workload Identity + ([workload_pool][google.container.v1beta1.WorkloadIdentityConfig.workload_pool] + must be non-empty). + """ + + enable_certificates: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.BoolValue, + ) + + +class MeshCertificates(proto.Message): + r"""Configuration for issuance of mTLS keys and certificates to + Kubernetes pods. + + Attributes: + enable_certificates (google.protobuf.wrappers_pb2.BoolValue): + enable_certificates controls issuance of workload mTLS + certificates. + + If set, the GKE Workload Identity Certificates controller + and node agent will be deployed in the cluster, which can + then be configured by creating a WorkloadCertificateConfig + Custom Resource. + + Requires Workload Identity + ([workload_pool][google.container.v1alpha1.WorkloadIdentityConfig.workload_pool] + must be non-empty). + """ + + enable_certificates: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=1, + message=wrappers_pb2.BoolValue, + ) + + +class DatabaseEncryption(proto.Message): + r"""Configuration of etcd encryption. + + Attributes: + key_name (str): + Name of CloudKMS key to use for the + encryption of secrets in etcd. Ex. + projects/my-project/locations/global/keyRings/my-ring/cryptoKeys/my-key + state (google.cloud.container_v1beta1.types.DatabaseEncryption.State): + The desired state of etcd encryption. + """ + + class State(proto.Enum): + r"""State of etcd encryption. + + Values: + UNKNOWN (0): + Should never be set + ENCRYPTED (1): + Secrets in etcd are encrypted. + DECRYPTED (2): + Secrets in etcd are stored in plain text (at + etcd level) - this is unrelated to Compute + Engine level full disk encryption. + """ + UNKNOWN = 0 + ENCRYPTED = 1 + DECRYPTED = 2 + + key_name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) + + +class ResourceUsageExportConfig(proto.Message): + r"""Configuration for exporting cluster resource usages. + + Attributes: + bigquery_destination (google.cloud.container_v1beta1.types.ResourceUsageExportConfig.BigQueryDestination): + Configuration to use BigQuery as usage export + destination. + enable_network_egress_metering (bool): + Whether to enable network egress metering for + this cluster. If enabled, a daemonset will be + created in the cluster to meter network egress + traffic. + consumption_metering_config (google.cloud.container_v1beta1.types.ResourceUsageExportConfig.ConsumptionMeteringConfig): + Configuration to enable resource consumption + metering. + """ + + class BigQueryDestination(proto.Message): + r"""Parameters for using BigQuery as the destination of resource + usage export. + + Attributes: + dataset_id (str): + The ID of a BigQuery Dataset. + """ + + dataset_id: str = proto.Field( + proto.STRING, + number=1, + ) + + class ConsumptionMeteringConfig(proto.Message): + r"""Parameters for controlling consumption metering. + + Attributes: + enabled (bool): + Whether to enable consumption metering for + this cluster. If enabled, a second BigQuery + table will be created to hold resource + consumption records. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + bigquery_destination: BigQueryDestination = proto.Field( + proto.MESSAGE, + number=1, + message=BigQueryDestination, + ) + enable_network_egress_metering: bool = proto.Field( + proto.BOOL, + number=2, + ) + consumption_metering_config: ConsumptionMeteringConfig = proto.Field( + proto.MESSAGE, + number=3, + message=ConsumptionMeteringConfig, + ) + + +class ShieldedNodes(proto.Message): + r"""Configuration of Shielded Nodes feature. + + Attributes: + enabled (bool): + Whether Shielded Nodes features are enabled + on all nodes in this cluster. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class VirtualNIC(proto.Message): + r"""Configuration of gVNIC feature. + + Attributes: + enabled (bool): + Whether gVNIC features are enabled in the + node pool. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class FastSocket(proto.Message): + r"""Configuration of Fast Socket feature. + + Attributes: + enabled (bool): + Whether Fast Socket features are enabled in + the node pool. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class GetOpenIDConfigRequest(proto.Message): + r"""GetOpenIDConfigRequest gets the OIDC discovery document for + the cluster. See the OpenID Connect Discovery 1.0 specification + for details. + + Attributes: + parent (str): + The cluster (project, location, cluster name) to get the + discovery document for. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetOpenIDConfigResponse(proto.Message): + r"""GetOpenIDConfigResponse is an OIDC discovery document for the + cluster. See the OpenID Connect Discovery 1.0 specification for + details. + + Attributes: + issuer (str): + OIDC Issuer. + jwks_uri (str): + JSON Web Key uri. + response_types_supported (MutableSequence[str]): + Supported response types. + subject_types_supported (MutableSequence[str]): + Supported subject types. + id_token_signing_alg_values_supported (MutableSequence[str]): + supported ID Token signing Algorithms. + claims_supported (MutableSequence[str]): + Supported claims. + grant_types (MutableSequence[str]): + Supported grant types. + """ + + issuer: str = proto.Field( + proto.STRING, + number=1, + ) + jwks_uri: str = proto.Field( + proto.STRING, + number=2, + ) + response_types_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + subject_types_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + id_token_signing_alg_values_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + claims_supported: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + grant_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + + +class GetJSONWebKeysRequest(proto.Message): + r"""GetJSONWebKeysRequest gets the public component of the keys used by + the cluster to sign token requests. This will be the jwks_uri for + the discover document returned by getOpenIDConfig. See the OpenID + Connect Discovery 1.0 specification for details. + + Attributes: + parent (str): + The cluster (project, location, cluster name) to get keys + for. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Jwk(proto.Message): + r"""Jwk is a JSON Web Key as specified in RFC 7517 + + Attributes: + kty (str): + Key Type. + alg (str): + Algorithm. + use (str): + Permitted uses for the public keys. + kid (str): + Key ID. + n (str): + Used for RSA keys. + e (str): + Used for RSA keys. + x (str): + Used for ECDSA keys. + y (str): + Used for ECDSA keys. + crv (str): + Used for ECDSA keys. + """ + + kty: str = proto.Field( + proto.STRING, + number=1, + ) + alg: str = proto.Field( + proto.STRING, + number=2, + ) + use: str = proto.Field( + proto.STRING, + number=3, + ) + kid: str = proto.Field( + proto.STRING, + number=4, + ) + n: str = proto.Field( + proto.STRING, + number=5, + ) + e: str = proto.Field( + proto.STRING, + number=6, + ) + x: str = proto.Field( + proto.STRING, + number=7, + ) + y: str = proto.Field( + proto.STRING, + number=8, + ) + crv: str = proto.Field( + proto.STRING, + number=9, + ) + + +class GetJSONWebKeysResponse(proto.Message): + r"""GetJSONWebKeysResponse is a valid JSON Web Key Set as + specififed in rfc 7517 + + Attributes: + keys (MutableSequence[google.cloud.container_v1beta1.types.Jwk]): + The public component of the keys used by the + cluster to sign token requests. + """ + + keys: MutableSequence["Jwk"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Jwk", + ) + + +class CheckAutopilotCompatibilityRequest(proto.Message): + r"""CheckAutopilotCompatibilityRequest requests getting the + blockers for the given operation in the cluster. + + Attributes: + name (str): + The name (project, location, cluster) of the cluster to + retrieve. Specified in the format + ``projects/*/locations/*/clusters/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AutopilotCompatibilityIssue(proto.Message): + r"""AutopilotCompatibilityIssue contains information about a + specific compatibility issue with Autopilot mode. + + Attributes: + last_observation (google.protobuf.timestamp_pb2.Timestamp): + The last time when this issue was observed. + constraint_type (str): + The constraint type of the issue. + incompatibility_type (google.cloud.container_v1beta1.types.AutopilotCompatibilityIssue.IssueType): + The incompatibility type of this issue. + subjects (MutableSequence[str]): + The name of the resources which are subject + to this issue. + documentation_url (str): + A URL to a public documnetation, which + addresses resolving this issue. + description (str): + The description of the issue. + """ + + class IssueType(proto.Enum): + r"""The type of the reported issue. + + Values: + UNSPECIFIED (0): + Default value, should not be used. + INCOMPATIBILITY (1): + Indicates that the issue is a known + incompatibility between the cluster and + Autopilot mode. + ADDITIONAL_CONFIG_REQUIRED (2): + Indicates the issue is an incompatibility if + customers take no further action to resolve. + PASSED_WITH_OPTIONAL_CONFIG (3): + Indicates the issue is not an + incompatibility, but depending on the workloads + business logic, there is a potential that they + won't work on Autopilot. + """ + UNSPECIFIED = 0 + INCOMPATIBILITY = 1 + ADDITIONAL_CONFIG_REQUIRED = 2 + PASSED_WITH_OPTIONAL_CONFIG = 3 + + last_observation: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + constraint_type: str = proto.Field( + proto.STRING, + number=2, + ) + incompatibility_type: IssueType = proto.Field( + proto.ENUM, + number=3, + enum=IssueType, + ) + subjects: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + documentation_url: str = proto.Field( + proto.STRING, + number=5, + ) + description: str = proto.Field( + proto.STRING, + number=6, + ) + + +class CheckAutopilotCompatibilityResponse(proto.Message): + r"""CheckAutopilotCompatibilityResponse has a list of + compatibility issues. + + Attributes: + issues (MutableSequence[google.cloud.container_v1beta1.types.AutopilotCompatibilityIssue]): + The list of issues for the given operation. + summary (str): + The summary of the autopilot compatibility + response. + """ + + issues: MutableSequence["AutopilotCompatibilityIssue"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AutopilotCompatibilityIssue", + ) + summary: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ReleaseChannel(proto.Message): + r"""ReleaseChannel indicates which release channel a cluster is + subscribed to. Release channels are arranged in order of risk. + + When a cluster is subscribed to a release channel, Google + maintains both the master version and the node version. Node + auto-upgrade defaults to true and cannot be disabled. + + Attributes: + channel (google.cloud.container_v1beta1.types.ReleaseChannel.Channel): + channel specifies which release channel the + cluster is subscribed to. + """ + + class Channel(proto.Enum): + r"""Possible values for 'channel'. + + Values: + UNSPECIFIED (0): + No channel specified. + RAPID (1): + RAPID channel is offered on an early access + basis for customers who want to test new + releases. + + WARNING: Versions available in the RAPID Channel + may be subject to unresolved issues with no + known workaround and are not subject to any + SLAs. + REGULAR (2): + Clusters subscribed to REGULAR receive + versions that are considered GA quality. REGULAR + is intended for production users who want to + take advantage of new features. + STABLE (3): + Clusters subscribed to STABLE receive + versions that are known to be stable and + reliable in production. + """ + UNSPECIFIED = 0 + RAPID = 1 + REGULAR = 2 + STABLE = 3 + + channel: Channel = proto.Field( + proto.ENUM, + number=1, + enum=Channel, + ) + + +class CostManagementConfig(proto.Message): + r"""Configuration for fine-grained cost management feature. + + Attributes: + enabled (bool): + Whether the feature is enabled or not. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class TpuConfig(proto.Message): + r"""Configuration for Cloud TPU. + + Attributes: + enabled (bool): + Whether Cloud TPU integration is enabled or + not. + use_service_networking (bool): + Whether to use service networking for Cloud + TPU or not. + ipv4_cidr_block (str): + IPv4 CIDR block reserved for Cloud TPU in the + VPC. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + use_service_networking: bool = proto.Field( + proto.BOOL, + number=2, + ) + ipv4_cidr_block: str = proto.Field( + proto.STRING, + number=3, + ) + + +class Master(proto.Message): + r"""Master is the configuration for components on master.""" + + +class Autopilot(proto.Message): + r"""Autopilot is the configuration for Autopilot settings on the + cluster. + + Attributes: + enabled (bool): + Enable Autopilot + workload_policy_config (google.cloud.container_v1beta1.types.WorkloadPolicyConfig): + Workload policy configuration for Autopilot. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + workload_policy_config: "WorkloadPolicyConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="WorkloadPolicyConfig", + ) + + +class WorkloadPolicyConfig(proto.Message): + r"""WorkloadPolicyConfig is the configuration of workload policy + for autopilot clusters. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + allow_net_admin (bool): + If true, workloads can use NET_ADMIN capability. + + This field is a member of `oneof`_ ``_allow_net_admin``. + """ + + allow_net_admin: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + + +class NotificationConfig(proto.Message): + r"""NotificationConfig is the configuration of notifications. + + Attributes: + pubsub (google.cloud.container_v1beta1.types.NotificationConfig.PubSub): + Notification config for Pub/Sub. + """ + + class EventType(proto.Enum): + r"""Types of notifications currently supported. Can be used to + filter what notifications are sent. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + Not set, will be ignored. + UPGRADE_AVAILABLE_EVENT (1): + Corresponds with UpgradeAvailableEvent. + UPGRADE_EVENT (2): + Corresponds with UpgradeEvent. + SECURITY_BULLETIN_EVENT (3): + Corresponds with SecurityBulletinEvent. + """ + EVENT_TYPE_UNSPECIFIED = 0 + UPGRADE_AVAILABLE_EVENT = 1 + UPGRADE_EVENT = 2 + SECURITY_BULLETIN_EVENT = 3 + + class PubSub(proto.Message): + r"""Pub/Sub specific notification config. + + Attributes: + enabled (bool): + Enable notifications for Pub/Sub. + topic (str): + The desired Pub/Sub topic to which notifications will be + sent by GKE. Format is + ``projects/{project}/topics/{topic}``. + filter (google.cloud.container_v1beta1.types.NotificationConfig.Filter): + Allows filtering to one or more specific + event types. If no filter is specified, or if a + filter is specified with no event types, all + event types will be sent + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + topic: str = proto.Field( + proto.STRING, + number=2, + ) + filter: "NotificationConfig.Filter" = proto.Field( + proto.MESSAGE, + number=3, + message="NotificationConfig.Filter", + ) + + class Filter(proto.Message): + r"""Allows filtering to one or more specific event types. If + event types are present, those and only those event types will + be transmitted to the cluster. Other types will be skipped. If + no filter is specified, or no event types are present, all event + types will be sent + + Attributes: + event_type (MutableSequence[google.cloud.container_v1beta1.types.NotificationConfig.EventType]): + Event types to allowlist. + """ + + event_type: MutableSequence[ + "NotificationConfig.EventType" + ] = proto.RepeatedField( + proto.ENUM, + number=1, + enum="NotificationConfig.EventType", + ) + + pubsub: PubSub = proto.Field( + proto.MESSAGE, + number=1, + message=PubSub, + ) + + +class ConfidentialNodes(proto.Message): + r"""ConfidentialNodes is configuration for the confidential nodes + feature, which makes nodes run on confidential VMs. + + Attributes: + enabled (bool): + Whether Confidential Nodes feature is + enabled. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class UpgradeEvent(proto.Message): + r"""UpgradeEvent is a notification sent to customers by the + cluster server when a resource is upgrading. + + Attributes: + resource_type (google.cloud.container_v1beta1.types.UpgradeResourceType): + The resource type that is upgrading. + operation (str): + The operation associated with this upgrade. + operation_start_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the operation was started. + current_version (str): + The current version before the upgrade. + target_version (str): + The target version for the upgrade. + resource (str): + Optional relative path to the resource. For + example in node pool upgrades, the relative path + of the node pool. + """ + + resource_type: "UpgradeResourceType" = proto.Field( + proto.ENUM, + number=1, + enum="UpgradeResourceType", + ) + operation: str = proto.Field( + proto.STRING, + number=2, + ) + operation_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + current_version: str = proto.Field( + proto.STRING, + number=4, + ) + target_version: str = proto.Field( + proto.STRING, + number=5, + ) + resource: str = proto.Field( + proto.STRING, + number=6, + ) + + +class UpgradeAvailableEvent(proto.Message): + r"""UpgradeAvailableEvent is a notification sent to customers + when a new available version is released. + + Attributes: + version (str): + The release version available for upgrade. + resource_type (google.cloud.container_v1beta1.types.UpgradeResourceType): + The resource type of the release version. + release_channel (google.cloud.container_v1beta1.types.ReleaseChannel): + The release channel of the version. If empty, + it means a non-channel release. + resource (str): + Optional relative path to the resource. For + example, the relative path of the node pool. + windows_versions (google.cloud.container_v1beta1.types.WindowsVersions): + Windows node versions info. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + resource_type: "UpgradeResourceType" = proto.Field( + proto.ENUM, + number=2, + enum="UpgradeResourceType", + ) + release_channel: "ReleaseChannel" = proto.Field( + proto.MESSAGE, + number=3, + message="ReleaseChannel", + ) + resource: str = proto.Field( + proto.STRING, + number=4, + ) + windows_versions: "WindowsVersions" = proto.Field( + proto.MESSAGE, + number=5, + message="WindowsVersions", + ) + + +class SecurityBulletinEvent(proto.Message): + r"""SecurityBulletinEvent is a notification sent to customers + when a security bulletin has been posted that they are + vulnerable to. + + Attributes: + resource_type_affected (str): + The resource type (node/control plane) that + has the vulnerability. Multiple notifications (1 + notification per resource type) will be sent for + a vulnerability that affects > 1 resource type. + bulletin_id (str): + The ID of the bulletin corresponding to the + vulnerability. + cve_ids (MutableSequence[str]): + The CVEs associated with this bulletin. + severity (str): + The severity of this bulletin as it relates + to GKE. + bulletin_uri (str): + The URI link to the bulletin on the website + for more information. + brief_description (str): + A brief description of the bulletin. See the bulletin + pointed to by the bulletin_uri field for an expanded + description. + affected_supported_minors (MutableSequence[str]): + The GKE minor versions affected by this + vulnerability. + patched_versions (MutableSequence[str]): + The GKE versions where this vulnerability is + patched. + suggested_upgrade_target (str): + This represents a version selected from the patched_versions + field that the cluster receiving this notification should + most likely want to upgrade to based on its current version. + Note that if this notification is being received by a given + cluster, it means that this version is currently available + as an upgrade target in that cluster's location. + manual_steps_required (bool): + If this field is specified, it means there + are manual steps that the user must take to make + their clusters safe. + """ + + resource_type_affected: str = proto.Field( + proto.STRING, + number=1, + ) + bulletin_id: str = proto.Field( + proto.STRING, + number=2, + ) + cve_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + severity: str = proto.Field( + proto.STRING, + number=4, + ) + bulletin_uri: str = proto.Field( + proto.STRING, + number=5, + ) + brief_description: str = proto.Field( + proto.STRING, + number=6, + ) + affected_supported_minors: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + patched_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + suggested_upgrade_target: str = proto.Field( + proto.STRING, + number=9, + ) + manual_steps_required: bool = proto.Field( + proto.BOOL, + number=10, + ) + + +class IdentityServiceConfig(proto.Message): + r"""IdentityServiceConfig is configuration for Identity Service + which allows customers to use external identity providers with + the K8S API + + Attributes: + enabled (bool): + Whether to enable the Identity Service + component + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class LoggingConfig(proto.Message): + r"""LoggingConfig is cluster logging configuration. + + Attributes: + component_config (google.cloud.container_v1beta1.types.LoggingComponentConfig): + Logging components configuration + """ + + component_config: "LoggingComponentConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="LoggingComponentConfig", + ) + + +class LoggingComponentConfig(proto.Message): + r"""LoggingComponentConfig is cluster logging component + configuration. + + Attributes: + enable_components (MutableSequence[google.cloud.container_v1beta1.types.LoggingComponentConfig.Component]): + Select components to collect logs. An empty + set would disable all logging. + """ + + class Component(proto.Enum): + r"""GKE components exposing logs + + Values: + COMPONENT_UNSPECIFIED (0): + Default value. This shouldn't be used. + SYSTEM_COMPONENTS (1): + system components + WORKLOADS (2): + workloads + APISERVER (3): + kube-apiserver + SCHEDULER (4): + kube-scheduler + CONTROLLER_MANAGER (5): + kube-controller-manager + """ + COMPONENT_UNSPECIFIED = 0 + SYSTEM_COMPONENTS = 1 + WORKLOADS = 2 + APISERVER = 3 + SCHEDULER = 4 + CONTROLLER_MANAGER = 5 + + enable_components: MutableSequence[Component] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=Component, + ) + + +class MonitoringConfig(proto.Message): + r"""MonitoringConfig is cluster monitoring configuration. + + Attributes: + component_config (google.cloud.container_v1beta1.types.MonitoringComponentConfig): + Monitoring components configuration + managed_prometheus_config (google.cloud.container_v1beta1.types.ManagedPrometheusConfig): + Enable Google Cloud Managed Service for + Prometheus in the cluster. + advanced_datapath_observability_config (google.cloud.container_v1beta1.types.AdvancedDatapathObservabilityConfig): + Configuration of Advanced Datapath + Observability features. + """ + + component_config: "MonitoringComponentConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="MonitoringComponentConfig", + ) + managed_prometheus_config: "ManagedPrometheusConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="ManagedPrometheusConfig", + ) + advanced_datapath_observability_config: "AdvancedDatapathObservabilityConfig" = ( + proto.Field( + proto.MESSAGE, + number=3, + message="AdvancedDatapathObservabilityConfig", + ) + ) + + +class AdvancedDatapathObservabilityConfig(proto.Message): + r"""AdvancedDatapathObservabilityConfig specifies configuration + of observability features of advanced datapath. + + Attributes: + enable_metrics (bool): + Expose flow metrics on nodes + relay_mode (google.cloud.container_v1beta1.types.AdvancedDatapathObservabilityConfig.RelayMode): + Method used to make Relay available + """ + + class RelayMode(proto.Enum): + r"""Supported Relay modes + + Values: + RELAY_MODE_UNSPECIFIED (0): + Default value. This shouldn't be used. + DISABLED (1): + disabled + INTERNAL_VPC_LB (3): + exposed via internal load balancer + EXTERNAL_LB (4): + exposed via external load balancer + """ + RELAY_MODE_UNSPECIFIED = 0 + DISABLED = 1 + INTERNAL_VPC_LB = 3 + EXTERNAL_LB = 4 + + enable_metrics: bool = proto.Field( + proto.BOOL, + number=1, + ) + relay_mode: RelayMode = proto.Field( + proto.ENUM, + number=2, + enum=RelayMode, + ) + + +class NodePoolLoggingConfig(proto.Message): + r"""NodePoolLoggingConfig specifies logging configuration for + nodepools. + + Attributes: + variant_config (google.cloud.container_v1beta1.types.LoggingVariantConfig): + Logging variant configuration. + """ + + variant_config: "LoggingVariantConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="LoggingVariantConfig", + ) + + +class LoggingVariantConfig(proto.Message): + r"""LoggingVariantConfig specifies the behaviour of the logging + component. + + Attributes: + variant (google.cloud.container_v1beta1.types.LoggingVariantConfig.Variant): + Logging variant deployed on nodes. + """ + + class Variant(proto.Enum): + r"""Logging component variants. + + Values: + VARIANT_UNSPECIFIED (0): + Default value. This shouldn't be used. + DEFAULT (1): + default logging variant. + MAX_THROUGHPUT (2): + maximum logging throughput variant. + """ + VARIANT_UNSPECIFIED = 0 + DEFAULT = 1 + MAX_THROUGHPUT = 2 + + variant: Variant = proto.Field( + proto.ENUM, + number=1, + enum=Variant, + ) + + +class MonitoringComponentConfig(proto.Message): + r"""MonitoringComponentConfig is cluster monitoring component + configuration. + + Attributes: + enable_components (MutableSequence[google.cloud.container_v1beta1.types.MonitoringComponentConfig.Component]): + Select components to collect metrics. An + empty set would disable all monitoring. + """ + + class Component(proto.Enum): + r"""GKE components exposing metrics + + Values: + COMPONENT_UNSPECIFIED (0): + Default value. This shouldn't be used. + SYSTEM_COMPONENTS (1): + system components + WORKLOADS (2): + Deprecated: Use Google Cloud Managed Service + for Prometheus. + APISERVER (3): + kube-apiserver + SCHEDULER (4): + kube-scheduler + CONTROLLER_MANAGER (5): + kube-controller-manager + STORAGE (7): + Storage + HPA (8): + Horizontal Pod Autoscaling + POD (9): + Pod + DAEMONSET (10): + DaemonSet + DEPLOYMENT (11): + Deployment + STATEFULSET (12): + Statefulset + """ + COMPONENT_UNSPECIFIED = 0 + SYSTEM_COMPONENTS = 1 + WORKLOADS = 2 + APISERVER = 3 + SCHEDULER = 4 + CONTROLLER_MANAGER = 5 + STORAGE = 7 + HPA = 8 + POD = 9 + DAEMONSET = 10 + DEPLOYMENT = 11 + STATEFULSET = 12 + + enable_components: MutableSequence[Component] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=Component, + ) + + +class Fleet(proto.Message): + r"""Fleet is the fleet configuration for the cluster. + + Attributes: + project (str): + The Fleet host project(project ID or project + number) where this cluster will be registered + to. This field cannot be changed after the + cluster has been registered. + membership (str): + [Output only] The full resource name of the registered fleet + membership of the cluster, in the format + ``//gkehub.googleapis.com/projects/*/locations/*/memberships/*``. + pre_registered (bool): + [Output only] Whether the cluster has been registered + through the fleet API. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + membership: str = proto.Field( + proto.STRING, + number=2, + ) + pre_registered: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-container/mypy.ini b/packages/google-cloud-container/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-container/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-container/noxfile.py b/packages/google-cloud-container/noxfile.py new file mode 100644 index 000000000000..be54712bfa8f --- /dev/null +++ b/packages/google-cloud-container/noxfile.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-container/renovate.json b/packages/google-cloud-container/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-container/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-container/scripts/decrypt-secrets.sh b/packages/google-cloud-container/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-container/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-container/scripts/fixup_container_v1_keywords.py b/packages/google-cloud-container/scripts/fixup_container_v1_keywords.py new file mode 100644 index 000000000000..b7ec98364665 --- /dev/null +++ b/packages/google-cloud-container/scripts/fixup_container_v1_keywords.py @@ -0,0 +1,209 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class containerCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'cancel_operation': ('project_id', 'zone', 'operation_id', 'name', ), + 'check_autopilot_compatibility': ('name', ), + 'complete_ip_rotation': ('project_id', 'zone', 'cluster_id', 'name', ), + 'complete_node_pool_upgrade': ('name', ), + 'create_cluster': ('cluster', 'project_id', 'zone', 'parent', ), + 'create_node_pool': ('node_pool', 'project_id', 'zone', 'cluster_id', 'parent', ), + 'delete_cluster': ('project_id', 'zone', 'cluster_id', 'name', ), + 'delete_node_pool': ('project_id', 'zone', 'cluster_id', 'node_pool_id', 'name', ), + 'get_cluster': ('project_id', 'zone', 'cluster_id', 'name', ), + 'get_json_web_keys': ('parent', ), + 'get_node_pool': ('project_id', 'zone', 'cluster_id', 'node_pool_id', 'name', ), + 'get_operation': ('project_id', 'zone', 'operation_id', 'name', ), + 'get_server_config': ('project_id', 'zone', 'name', ), + 'list_clusters': ('project_id', 'zone', 'parent', ), + 'list_node_pools': ('project_id', 'zone', 'cluster_id', 'parent', ), + 'list_operations': ('project_id', 'zone', 'parent', ), + 'list_usable_subnetworks': ('parent', 'filter', 'page_size', 'page_token', ), + 'rollback_node_pool_upgrade': ('project_id', 'zone', 'cluster_id', 'node_pool_id', 'name', 'respect_pdb', ), + 'set_addons_config': ('addons_config', 'project_id', 'zone', 'cluster_id', 'name', ), + 'set_labels': ('resource_labels', 'label_fingerprint', 'project_id', 'zone', 'cluster_id', 'name', ), + 'set_legacy_abac': ('enabled', 'project_id', 'zone', 'cluster_id', 'name', ), + 'set_locations': ('locations', 'project_id', 'zone', 'cluster_id', 'name', ), + 'set_logging_service': ('logging_service', 'project_id', 'zone', 'cluster_id', 'name', ), + 'set_maintenance_policy': ('project_id', 'zone', 'cluster_id', 'maintenance_policy', 'name', ), + 'set_master_auth': ('action', 'update', 'project_id', 'zone', 'cluster_id', 'name', ), + 'set_monitoring_service': ('monitoring_service', 'project_id', 'zone', 'cluster_id', 'name', ), + 'set_network_policy': ('network_policy', 'project_id', 'zone', 'cluster_id', 'name', ), + 'set_node_pool_autoscaling': ('autoscaling', 'project_id', 'zone', 'cluster_id', 'node_pool_id', 'name', ), + 'set_node_pool_management': ('management', 'project_id', 'zone', 'cluster_id', 'node_pool_id', 'name', ), + 'set_node_pool_size': ('node_count', 'project_id', 'zone', 'cluster_id', 'node_pool_id', 'name', ), + 'start_ip_rotation': ('project_id', 'zone', 'cluster_id', 'name', 'rotate_credentials', ), + 'update_cluster': ('update', 'project_id', 'zone', 'cluster_id', 'name', ), + 'update_master': ('master_version', 'project_id', 'zone', 'cluster_id', 'name', ), + 'update_node_pool': ('node_version', 'image_type', 'project_id', 'zone', 'cluster_id', 'node_pool_id', 'name', 'locations', 'workload_metadata_config', 'upgrade_settings', 'tags', 'taints', 'labels', 'linux_node_config', 'kubelet_config', 'node_network_config', 'gcfs_config', 'confidential_nodes', 'gvnic', 'etag', 'fast_socket', 'logging_config', 'resource_labels', 'windows_node_config', 'machine_type', 'disk_type', 'disk_size_gb', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=containerCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the container client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-container/scripts/fixup_container_v1beta1_keywords.py b/packages/google-cloud-container/scripts/fixup_container_v1beta1_keywords.py new file mode 100644 index 000000000000..cebfcd2b713a --- /dev/null +++ b/packages/google-cloud-container/scripts/fixup_container_v1beta1_keywords.py @@ -0,0 +1,210 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class containerCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'cancel_operation': ('project_id', 'zone', 'operation_id', 'name', ), + 'check_autopilot_compatibility': ('name', ), + 'complete_ip_rotation': ('project_id', 'zone', 'cluster_id', 'name', ), + 'complete_node_pool_upgrade': ('name', ), + 'create_cluster': ('project_id', 'zone', 'cluster', 'parent', ), + 'create_node_pool': ('project_id', 'zone', 'cluster_id', 'node_pool', 'parent', ), + 'delete_cluster': ('project_id', 'zone', 'cluster_id', 'name', ), + 'delete_node_pool': ('project_id', 'zone', 'cluster_id', 'node_pool_id', 'name', ), + 'get_cluster': ('project_id', 'zone', 'cluster_id', 'name', ), + 'get_json_web_keys': ('parent', ), + 'get_node_pool': ('project_id', 'zone', 'cluster_id', 'node_pool_id', 'name', ), + 'get_operation': ('project_id', 'zone', 'operation_id', 'name', ), + 'get_server_config': ('project_id', 'zone', 'name', ), + 'list_clusters': ('project_id', 'zone', 'parent', ), + 'list_locations': ('parent', ), + 'list_node_pools': ('project_id', 'zone', 'cluster_id', 'parent', ), + 'list_operations': ('project_id', 'zone', 'parent', ), + 'list_usable_subnetworks': ('parent', 'filter', 'page_size', 'page_token', ), + 'rollback_node_pool_upgrade': ('project_id', 'zone', 'cluster_id', 'node_pool_id', 'name', 'respect_pdb', ), + 'set_addons_config': ('project_id', 'zone', 'cluster_id', 'addons_config', 'name', ), + 'set_labels': ('project_id', 'zone', 'cluster_id', 'resource_labels', 'label_fingerprint', 'name', ), + 'set_legacy_abac': ('project_id', 'zone', 'cluster_id', 'enabled', 'name', ), + 'set_locations': ('project_id', 'zone', 'cluster_id', 'locations', 'name', ), + 'set_logging_service': ('project_id', 'zone', 'cluster_id', 'logging_service', 'name', ), + 'set_maintenance_policy': ('project_id', 'zone', 'cluster_id', 'maintenance_policy', 'name', ), + 'set_master_auth': ('project_id', 'zone', 'cluster_id', 'action', 'update', 'name', ), + 'set_monitoring_service': ('project_id', 'zone', 'cluster_id', 'monitoring_service', 'name', ), + 'set_network_policy': ('project_id', 'zone', 'cluster_id', 'network_policy', 'name', ), + 'set_node_pool_autoscaling': ('project_id', 'zone', 'cluster_id', 'node_pool_id', 'autoscaling', 'name', ), + 'set_node_pool_management': ('project_id', 'zone', 'cluster_id', 'node_pool_id', 'management', 'name', ), + 'set_node_pool_size': ('project_id', 'zone', 'cluster_id', 'node_pool_id', 'node_count', 'name', ), + 'start_ip_rotation': ('project_id', 'zone', 'cluster_id', 'name', 'rotate_credentials', ), + 'update_cluster': ('project_id', 'zone', 'cluster_id', 'update', 'name', ), + 'update_master': ('project_id', 'zone', 'cluster_id', 'master_version', 'name', ), + 'update_node_pool': ('project_id', 'zone', 'cluster_id', 'node_pool_id', 'node_version', 'image_type', 'locations', 'workload_metadata_config', 'name', 'upgrade_settings', 'tags', 'taints', 'labels', 'linux_node_config', 'kubelet_config', 'node_network_config', 'gcfs_config', 'confidential_nodes', 'gvnic', 'etag', 'fast_socket', 'logging_config', 'resource_labels', 'windows_node_config', 'machine_type', 'disk_type', 'disk_size_gb', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=containerCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the container client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-container/scripts/readme-gen/readme_gen.py b/packages/google-cloud-container/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..1acc119835b5 --- /dev/null +++ b/packages/google-cloud-container/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-container/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-container/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-container/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-container/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-container/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-container/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-container/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-container/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-container/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-container/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-container/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-container/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-container/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-container/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-container/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-container/setup.cfg b/packages/google-cloud-container/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-container/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-container/setup.py b/packages/google-cloud-container/setup.py new file mode 100644 index 000000000000..b9f37472558c --- /dev/null +++ b/packages/google-cloud-container/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-container" + + +description = "Google Cloud Container API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/container/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-container/testing/.gitignore b/packages/google-cloud-container/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-container/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-container/testing/constraints-3.10.txt b/packages/google-cloud-container/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-container/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-container/testing/constraints-3.11.txt b/packages/google-cloud-container/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-container/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-container/testing/constraints-3.12.txt b/packages/google-cloud-container/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-container/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-container/testing/constraints-3.7.txt b/packages/google-cloud-container/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-container/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-container/testing/constraints-3.8.txt b/packages/google-cloud-container/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-container/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-container/testing/constraints-3.9.txt b/packages/google-cloud-container/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-container/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-container/tests/__init__.py b/packages/google-cloud-container/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-container/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-container/tests/system/__init__.py b/packages/google-cloud-container/tests/system/__init__.py new file mode 100644 index 000000000000..6cfb48f4cdf8 --- /dev/null +++ b/packages/google-cloud-container/tests/system/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-container/tests/system/smoke_test.py b/packages/google-cloud-container/tests/system/smoke_test.py new file mode 100644 index 000000000000..9dc5cd54cb60 --- /dev/null +++ b/packages/google-cloud-container/tests/system/smoke_test.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + +from google.cloud import container_v1 + + +@pytest.fixture(scope="session") +def project_id(): + return os.environ["PROJECT_ID"] + + +# REST transport is not yet supported in the client +@pytest.mark.parametrize("transport", ["grpc"]) +def test_list_clusters(project_id: str, transport: str): + client = container_v1.ClusterManagerClient(transport=transport) + + parent = client.common_location_path(project_id, location="us-central1") + client.list_clusters(parent=parent) + + # The purpose of this smoke test is to test the communication with the API server, + # rather than API-specific functionality. + # If the smoke test fails, we won't reach this line. + assert True diff --git a/packages/google-cloud-container/tests/unit/__init__.py b/packages/google-cloud-container/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-container/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-container/tests/unit/gapic/__init__.py b/packages/google-cloud-container/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-container/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-container/tests/unit/gapic/container_v1/__init__.py b/packages/google-cloud-container/tests/unit/gapic/container_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-container/tests/unit/gapic/container_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py b/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py new file mode 100644 index 000000000000..011f56ececa1 --- /dev/null +++ b/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py @@ -0,0 +1,10837 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import code_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.container_v1.services.cluster_manager import ( + ClusterManagerAsyncClient, + ClusterManagerClient, + pagers, + transports, +) +from google.cloud.container_v1.types import cluster_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ClusterManagerClient._get_default_mtls_endpoint(None) is None + assert ( + ClusterManagerClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ClusterManagerClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ClusterManagerClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ClusterManagerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ClusterManagerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ClusterManagerClient, "grpc"), + (ClusterManagerAsyncClient, "grpc_asyncio"), + ], +) +def test_cluster_manager_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("container.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ClusterManagerGrpcTransport, "grpc"), + (transports.ClusterManagerGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_cluster_manager_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ClusterManagerClient, "grpc"), + (ClusterManagerAsyncClient, "grpc_asyncio"), + ], +) +def test_cluster_manager_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("container.googleapis.com:443") + + +def test_cluster_manager_client_get_transport_class(): + transport = ClusterManagerClient.get_transport_class() + available_transports = [ + transports.ClusterManagerGrpcTransport, + ] + assert transport in available_transports + + transport = ClusterManagerClient.get_transport_class("grpc") + assert transport == transports.ClusterManagerGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ClusterManagerClient, transports.ClusterManagerGrpcTransport, "grpc"), + ( + ClusterManagerAsyncClient, + transports.ClusterManagerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + ClusterManagerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterManagerClient), +) +@mock.patch.object( + ClusterManagerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterManagerAsyncClient), +) +def test_cluster_manager_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ClusterManagerClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ClusterManagerClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ClusterManagerClient, transports.ClusterManagerGrpcTransport, "grpc", "true"), + ( + ClusterManagerAsyncClient, + transports.ClusterManagerGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ClusterManagerClient, transports.ClusterManagerGrpcTransport, "grpc", "false"), + ( + ClusterManagerAsyncClient, + transports.ClusterManagerGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + ClusterManagerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterManagerClient), +) +@mock.patch.object( + ClusterManagerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterManagerAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cluster_manager_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ClusterManagerClient, ClusterManagerAsyncClient] +) +@mock.patch.object( + ClusterManagerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterManagerClient), +) +@mock.patch.object( + ClusterManagerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterManagerAsyncClient), +) +def test_cluster_manager_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ClusterManagerClient, transports.ClusterManagerGrpcTransport, "grpc"), + ( + ClusterManagerAsyncClient, + transports.ClusterManagerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cluster_manager_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ClusterManagerClient, + transports.ClusterManagerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ClusterManagerAsyncClient, + transports.ClusterManagerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cluster_manager_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_cluster_manager_client_client_options_from_dict(): + with mock.patch( + "google.cloud.container_v1.services.cluster_manager.transports.ClusterManagerGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ClusterManagerClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ClusterManagerClient, + transports.ClusterManagerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ClusterManagerAsyncClient, + transports.ClusterManagerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cluster_manager_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "container.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="container.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.ListClustersRequest, + dict, + ], +) +def test_list_clusters(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListClustersResponse( + missing_zones=["missing_zones_value"], + ) + response = client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListClustersResponse) + assert response.missing_zones == ["missing_zones_value"] + + +def test_list_clusters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + client.list_clusters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListClustersRequest() + + +@pytest.mark.asyncio +async def test_list_clusters_async( + transport: str = "grpc_asyncio", request_type=cluster_service.ListClustersRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListClustersResponse( + missing_zones=["missing_zones_value"], + ) + ) + response = await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListClustersResponse) + assert response.missing_zones == ["missing_zones_value"] + + +@pytest.mark.asyncio +async def test_list_clusters_async_from_dict(): + await test_list_clusters_async(request_type=dict) + + +def test_list_clusters_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = cluster_service.ListClustersResponse() + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_clusters_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListClustersResponse() + ) + await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_clusters_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListClustersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_clusters( + project_id="project_id_value", + zone="zone_value", + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_clusters_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + cluster_service.ListClustersRequest(), + project_id="project_id_value", + zone="zone_value", + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_clusters_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListClustersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListClustersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_clusters( + project_id="project_id_value", + zone="zone_value", + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_clusters_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_clusters( + cluster_service.ListClustersRequest(), + project_id="project_id_value", + zone="zone_value", + parent="parent_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.GetClusterRequest, + dict, + ], +) +def test_get_cluster(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Cluster( + name="name_value", + description="description_value", + initial_node_count=1911, + logging_service="logging_service_value", + monitoring_service="monitoring_service_value", + network="network_value", + cluster_ipv4_cidr="cluster_ipv4_cidr_value", + subnetwork="subnetwork_value", + locations=["locations_value"], + enable_kubernetes_alpha=True, + label_fingerprint="label_fingerprint_value", + self_link="self_link_value", + zone="zone_value", + endpoint="endpoint_value", + initial_cluster_version="initial_cluster_version_value", + current_master_version="current_master_version_value", + current_node_version="current_node_version_value", + create_time="create_time_value", + status=cluster_service.Cluster.Status.PROVISIONING, + status_message="status_message_value", + node_ipv4_cidr_size=1955, + services_ipv4_cidr="services_ipv4_cidr_value", + instance_group_urls=["instance_group_urls_value"], + current_node_count=1936, + expire_time="expire_time_value", + location="location_value", + enable_tpu=True, + tpu_ipv4_cidr_block="tpu_ipv4_cidr_block_value", + id="id_value", + etag="etag_value", + ) + response = client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Cluster) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.initial_node_count == 1911 + assert response.logging_service == "logging_service_value" + assert response.monitoring_service == "monitoring_service_value" + assert response.network == "network_value" + assert response.cluster_ipv4_cidr == "cluster_ipv4_cidr_value" + assert response.subnetwork == "subnetwork_value" + assert response.locations == ["locations_value"] + assert response.enable_kubernetes_alpha is True + assert response.label_fingerprint == "label_fingerprint_value" + assert response.self_link == "self_link_value" + assert response.zone == "zone_value" + assert response.endpoint == "endpoint_value" + assert response.initial_cluster_version == "initial_cluster_version_value" + assert response.current_master_version == "current_master_version_value" + assert response.current_node_version == "current_node_version_value" + assert response.create_time == "create_time_value" + assert response.status == cluster_service.Cluster.Status.PROVISIONING + assert response.status_message == "status_message_value" + assert response.node_ipv4_cidr_size == 1955 + assert response.services_ipv4_cidr == "services_ipv4_cidr_value" + assert response.instance_group_urls == ["instance_group_urls_value"] + assert response.current_node_count == 1936 + assert response.expire_time == "expire_time_value" + assert response.location == "location_value" + assert response.enable_tpu is True + assert response.tpu_ipv4_cidr_block == "tpu_ipv4_cidr_block_value" + assert response.id == "id_value" + assert response.etag == "etag_value" + + +def test_get_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + client.get_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetClusterRequest() + + +@pytest.mark.asyncio +async def test_get_cluster_async( + transport: str = "grpc_asyncio", request_type=cluster_service.GetClusterRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Cluster( + name="name_value", + description="description_value", + initial_node_count=1911, + logging_service="logging_service_value", + monitoring_service="monitoring_service_value", + network="network_value", + cluster_ipv4_cidr="cluster_ipv4_cidr_value", + subnetwork="subnetwork_value", + locations=["locations_value"], + enable_kubernetes_alpha=True, + label_fingerprint="label_fingerprint_value", + self_link="self_link_value", + zone="zone_value", + endpoint="endpoint_value", + initial_cluster_version="initial_cluster_version_value", + current_master_version="current_master_version_value", + current_node_version="current_node_version_value", + create_time="create_time_value", + status=cluster_service.Cluster.Status.PROVISIONING, + status_message="status_message_value", + node_ipv4_cidr_size=1955, + services_ipv4_cidr="services_ipv4_cidr_value", + instance_group_urls=["instance_group_urls_value"], + current_node_count=1936, + expire_time="expire_time_value", + location="location_value", + enable_tpu=True, + tpu_ipv4_cidr_block="tpu_ipv4_cidr_block_value", + id="id_value", + etag="etag_value", + ) + ) + response = await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Cluster) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.initial_node_count == 1911 + assert response.logging_service == "logging_service_value" + assert response.monitoring_service == "monitoring_service_value" + assert response.network == "network_value" + assert response.cluster_ipv4_cidr == "cluster_ipv4_cidr_value" + assert response.subnetwork == "subnetwork_value" + assert response.locations == ["locations_value"] + assert response.enable_kubernetes_alpha is True + assert response.label_fingerprint == "label_fingerprint_value" + assert response.self_link == "self_link_value" + assert response.zone == "zone_value" + assert response.endpoint == "endpoint_value" + assert response.initial_cluster_version == "initial_cluster_version_value" + assert response.current_master_version == "current_master_version_value" + assert response.current_node_version == "current_node_version_value" + assert response.create_time == "create_time_value" + assert response.status == cluster_service.Cluster.Status.PROVISIONING + assert response.status_message == "status_message_value" + assert response.node_ipv4_cidr_size == 1955 + assert response.services_ipv4_cidr == "services_ipv4_cidr_value" + assert response.instance_group_urls == ["instance_group_urls_value"] + assert response.current_node_count == 1936 + assert response.expire_time == "expire_time_value" + assert response.location == "location_value" + assert response.enable_tpu is True + assert response.tpu_ipv4_cidr_block == "tpu_ipv4_cidr_block_value" + assert response.id == "id_value" + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_cluster_async_from_dict(): + await test_get_cluster_async(request_type=dict) + + +def test_get_cluster_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = cluster_service.Cluster() + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cluster_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Cluster() + ) + await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_cluster_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Cluster() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_cluster( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_cluster_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + cluster_service.GetClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_cluster_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Cluster() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Cluster() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_cluster( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_cluster_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_cluster( + cluster_service.GetClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CreateClusterRequest, + dict, + ], +) +def test_create_cluster(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CreateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_create_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + client.create_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CreateClusterRequest() + + +@pytest.mark.asyncio +async def test_create_cluster_async( + transport: str = "grpc_asyncio", request_type=cluster_service.CreateClusterRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CreateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_create_cluster_async_from_dict(): + await test_create_cluster_async(request_type=dict) + + +def test_create_cluster_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CreateClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = cluster_service.Operation() + client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_cluster_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CreateClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_cluster_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_cluster( + project_id="project_id_value", + zone="zone_value", + cluster=cluster_service.Cluster(name="name_value"), + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = cluster_service.Cluster(name="name_value") + assert arg == mock_val + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_create_cluster_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cluster( + cluster_service.CreateClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster=cluster_service.Cluster(name="name_value"), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_create_cluster_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_cluster( + project_id="project_id_value", + zone="zone_value", + cluster=cluster_service.Cluster(name="name_value"), + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = cluster_service.Cluster(name="name_value") + assert arg == mock_val + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_cluster_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_cluster( + cluster_service.CreateClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster=cluster_service.Cluster(name="name_value"), + parent="parent_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.UpdateClusterRequest, + dict, + ], +) +def test_update_cluster(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_update_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + client.update_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateClusterRequest() + + +@pytest.mark.asyncio +async def test_update_cluster_async( + transport: str = "grpc_asyncio", request_type=cluster_service.UpdateClusterRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_update_cluster_async_from_dict(): + await test_update_cluster_async(request_type=dict) + + +def test_update_cluster_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.UpdateClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = cluster_service.Operation() + client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_cluster_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.UpdateClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_update_cluster_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_cluster( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + update=cluster_service.ClusterUpdate( + desired_node_version="desired_node_version_value" + ), + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].update + mock_val = cluster_service.ClusterUpdate( + desired_node_version="desired_node_version_value" + ) + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_update_cluster_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + cluster_service.UpdateClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + update=cluster_service.ClusterUpdate( + desired_node_version="desired_node_version_value" + ), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_update_cluster_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_cluster( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + update=cluster_service.ClusterUpdate( + desired_node_version="desired_node_version_value" + ), + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].update + mock_val = cluster_service.ClusterUpdate( + desired_node_version="desired_node_version_value" + ) + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_cluster_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_cluster( + cluster_service.UpdateClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + update=cluster_service.ClusterUpdate( + desired_node_version="desired_node_version_value" + ), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.UpdateNodePoolRequest, + dict, + ], +) +def test_update_node_pool(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.update_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_update_node_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_node_pool), "__call__") as call: + client.update_node_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateNodePoolRequest() + + +@pytest.mark.asyncio +async def test_update_node_pool_async( + transport: str = "grpc_asyncio", request_type=cluster_service.UpdateNodePoolRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.update_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_update_node_pool_async_from_dict(): + await test_update_node_pool_async(request_type=dict) + + +def test_update_node_pool_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.UpdateNodePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_node_pool), "__call__") as call: + call.return_value = cluster_service.Operation() + client.update_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_node_pool_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.UpdateNodePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_node_pool), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.update_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetNodePoolAutoscalingRequest, + dict, + ], +) +def test_set_node_pool_autoscaling(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_autoscaling), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_node_pool_autoscaling(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolAutoscalingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_node_pool_autoscaling_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_autoscaling), "__call__" + ) as call: + client.set_node_pool_autoscaling() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolAutoscalingRequest() + + +@pytest.mark.asyncio +async def test_set_node_pool_autoscaling_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.SetNodePoolAutoscalingRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_autoscaling), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_node_pool_autoscaling(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolAutoscalingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_node_pool_autoscaling_async_from_dict(): + await test_set_node_pool_autoscaling_async(request_type=dict) + + +def test_set_node_pool_autoscaling_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNodePoolAutoscalingRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_autoscaling), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_node_pool_autoscaling(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_node_pool_autoscaling_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNodePoolAutoscalingRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_autoscaling), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_node_pool_autoscaling(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetLoggingServiceRequest, + dict, + ], +) +def test_set_logging_service(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_logging_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_logging_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLoggingServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_logging_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_logging_service), "__call__" + ) as call: + client.set_logging_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLoggingServiceRequest() + + +@pytest.mark.asyncio +async def test_set_logging_service_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.SetLoggingServiceRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_logging_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_logging_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLoggingServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_logging_service_async_from_dict(): + await test_set_logging_service_async(request_type=dict) + + +def test_set_logging_service_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLoggingServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_logging_service), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_logging_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_logging_service_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLoggingServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_logging_service), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_logging_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_logging_service_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_logging_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_logging_service( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + logging_service="logging_service_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].logging_service + mock_val = "logging_service_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_set_logging_service_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_logging_service( + cluster_service.SetLoggingServiceRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + logging_service="logging_service_value", + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_set_logging_service_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_logging_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_logging_service( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + logging_service="logging_service_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].logging_service + mock_val = "logging_service_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_logging_service_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_logging_service( + cluster_service.SetLoggingServiceRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + logging_service="logging_service_value", + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetMonitoringServiceRequest, + dict, + ], +) +def test_set_monitoring_service(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_monitoring_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_monitoring_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMonitoringServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_monitoring_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_monitoring_service), "__call__" + ) as call: + client.set_monitoring_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMonitoringServiceRequest() + + +@pytest.mark.asyncio +async def test_set_monitoring_service_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.SetMonitoringServiceRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_monitoring_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_monitoring_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMonitoringServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_monitoring_service_async_from_dict(): + await test_set_monitoring_service_async(request_type=dict) + + +def test_set_monitoring_service_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetMonitoringServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_monitoring_service), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_monitoring_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_monitoring_service_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetMonitoringServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_monitoring_service), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_monitoring_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_monitoring_service_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_monitoring_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_monitoring_service( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + monitoring_service="monitoring_service_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].monitoring_service + mock_val = "monitoring_service_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_set_monitoring_service_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_monitoring_service( + cluster_service.SetMonitoringServiceRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + monitoring_service="monitoring_service_value", + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_set_monitoring_service_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_monitoring_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_monitoring_service( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + monitoring_service="monitoring_service_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].monitoring_service + mock_val = "monitoring_service_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_monitoring_service_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_monitoring_service( + cluster_service.SetMonitoringServiceRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + monitoring_service="monitoring_service_value", + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetAddonsConfigRequest, + dict, + ], +) +def test_set_addons_config(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_addons_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_addons_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetAddonsConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_addons_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_addons_config), "__call__" + ) as call: + client.set_addons_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetAddonsConfigRequest() + + +@pytest.mark.asyncio +async def test_set_addons_config_async( + transport: str = "grpc_asyncio", request_type=cluster_service.SetAddonsConfigRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_addons_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_addons_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetAddonsConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_addons_config_async_from_dict(): + await test_set_addons_config_async(request_type=dict) + + +def test_set_addons_config_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetAddonsConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_addons_config), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_addons_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_addons_config_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetAddonsConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_addons_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_addons_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_addons_config_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_addons_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_addons_config( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + addons_config=cluster_service.AddonsConfig( + http_load_balancing=cluster_service.HttpLoadBalancing(disabled=True) + ), + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].addons_config + mock_val = cluster_service.AddonsConfig( + http_load_balancing=cluster_service.HttpLoadBalancing(disabled=True) + ) + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_set_addons_config_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_addons_config( + cluster_service.SetAddonsConfigRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + addons_config=cluster_service.AddonsConfig( + http_load_balancing=cluster_service.HttpLoadBalancing(disabled=True) + ), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_set_addons_config_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_addons_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_addons_config( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + addons_config=cluster_service.AddonsConfig( + http_load_balancing=cluster_service.HttpLoadBalancing(disabled=True) + ), + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].addons_config + mock_val = cluster_service.AddonsConfig( + http_load_balancing=cluster_service.HttpLoadBalancing(disabled=True) + ) + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_addons_config_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_addons_config( + cluster_service.SetAddonsConfigRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + addons_config=cluster_service.AddonsConfig( + http_load_balancing=cluster_service.HttpLoadBalancing(disabled=True) + ), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetLocationsRequest, + dict, + ], +) +def test_set_locations(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_locations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLocationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_locations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_locations), "__call__") as call: + client.set_locations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLocationsRequest() + + +@pytest.mark.asyncio +async def test_set_locations_async( + transport: str = "grpc_asyncio", request_type=cluster_service.SetLocationsRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_locations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLocationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_locations_async_from_dict(): + await test_set_locations_async(request_type=dict) + + +def test_set_locations_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLocationsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_locations), "__call__") as call: + call.return_value = cluster_service.Operation() + client.set_locations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_locations_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLocationsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_locations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_locations_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_locations( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + locations=["locations_value"], + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].locations + mock_val = ["locations_value"] + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_set_locations_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_locations( + cluster_service.SetLocationsRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + locations=["locations_value"], + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_set_locations_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_locations( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + locations=["locations_value"], + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].locations + mock_val = ["locations_value"] + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_locations_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_locations( + cluster_service.SetLocationsRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + locations=["locations_value"], + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.UpdateMasterRequest, + dict, + ], +) +def test_update_master(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_master), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.update_master(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateMasterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_update_master_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_master), "__call__") as call: + client.update_master() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateMasterRequest() + + +@pytest.mark.asyncio +async def test_update_master_async( + transport: str = "grpc_asyncio", request_type=cluster_service.UpdateMasterRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_master), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.update_master(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateMasterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_update_master_async_from_dict(): + await test_update_master_async(request_type=dict) + + +def test_update_master_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.UpdateMasterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_master), "__call__") as call: + call.return_value = cluster_service.Operation() + client.update_master(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_master_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.UpdateMasterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_master), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.update_master(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_update_master_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_master), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_master( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + master_version="master_version_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].master_version + mock_val = "master_version_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_update_master_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_master( + cluster_service.UpdateMasterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + master_version="master_version_value", + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_update_master_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_master), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_master( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + master_version="master_version_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].master_version + mock_val = "master_version_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_master_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_master( + cluster_service.UpdateMasterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + master_version="master_version_value", + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetMasterAuthRequest, + dict, + ], +) +def test_set_master_auth(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_master_auth), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_master_auth(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMasterAuthRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_master_auth_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_master_auth), "__call__") as call: + client.set_master_auth() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMasterAuthRequest() + + +@pytest.mark.asyncio +async def test_set_master_auth_async( + transport: str = "grpc_asyncio", request_type=cluster_service.SetMasterAuthRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_master_auth), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_master_auth(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMasterAuthRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_master_auth_async_from_dict(): + await test_set_master_auth_async(request_type=dict) + + +def test_set_master_auth_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetMasterAuthRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_master_auth), "__call__") as call: + call.return_value = cluster_service.Operation() + client.set_master_auth(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_master_auth_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetMasterAuthRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_master_auth), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_master_auth(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.DeleteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_delete_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + client.delete_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.DeleteClusterRequest() + + +@pytest.mark.asyncio +async def test_delete_cluster_async( + transport: str = "grpc_asyncio", request_type=cluster_service.DeleteClusterRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.DeleteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_delete_cluster_async_from_dict(): + await test_delete_cluster_async(request_type=dict) + + +def test_delete_cluster_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.DeleteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = cluster_service.Operation() + client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_cluster_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.DeleteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_cluster_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_cluster( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_cluster_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cluster( + cluster_service.DeleteClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_cluster_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_cluster( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_cluster_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_cluster( + cluster_service.DeleteClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.ListOperationsRequest, + dict, + ], +) +def test_list_operations(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListOperationsResponse( + missing_zones=["missing_zones_value"], + ) + response = client.list_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListOperationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListOperationsResponse) + assert response.missing_zones == ["missing_zones_value"] + + +def test_list_operations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + client.list_operations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListOperationsRequest() + + +@pytest.mark.asyncio +async def test_list_operations_async( + transport: str = "grpc_asyncio", request_type=cluster_service.ListOperationsRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListOperationsResponse( + missing_zones=["missing_zones_value"], + ) + ) + response = await client.list_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListOperationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListOperationsResponse) + assert response.missing_zones == ["missing_zones_value"] + + +@pytest.mark.asyncio +async def test_list_operations_async_from_dict(): + await test_list_operations_async(request_type=dict) + + +def test_list_operations_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListOperationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = cluster_service.ListOperationsResponse() + client.list_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListOperationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListOperationsResponse() + ) + await client.list_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_operations_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListOperationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_operations( + project_id="project_id_value", + zone="zone_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + + +def test_list_operations_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_operations( + cluster_service.ListOperationsRequest(), + project_id="project_id_value", + zone="zone_value", + ) + + +@pytest.mark.asyncio +async def test_list_operations_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListOperationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListOperationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_operations( + project_id="project_id_value", + zone="zone_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_operations_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_operations( + cluster_service.ListOperationsRequest(), + project_id="project_id_value", + zone="zone_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.GetOperationRequest, + dict, + ], +) +def test_get_operation(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.get_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetOperationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_get_operation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + client.get_operation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetOperationRequest() + + +@pytest.mark.asyncio +async def test_get_operation_async( + transport: str = "grpc_asyncio", request_type=cluster_service.GetOperationRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.get_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetOperationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_get_operation_async_from_dict(): + await test_get_operation_async(request_type=dict) + + +def test_get_operation_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetOperationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = cluster_service.Operation() + client.get_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetOperationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.get_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_operation_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_operation( + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].operation_id + mock_val = "operation_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_operation_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_operation( + cluster_service.GetOperationRequest(), + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_operation_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_operation( + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].operation_id + mock_val = "operation_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_operation_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_operation( + cluster_service.GetOperationRequest(), + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CancelOperationRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + client.cancel_operation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CancelOperationRequest() + + +@pytest.mark.asyncio +async def test_cancel_operation_async( + transport: str = "grpc_asyncio", request_type=cluster_service.CancelOperationRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CancelOperationRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async_from_dict(): + await test_cancel_operation_async(request_type=dict) + + +def test_cancel_operation_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CancelOperationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + client.cancel_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CancelOperationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_cancel_operation_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_operation( + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].operation_id + mock_val = "operation_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_cancel_operation_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_operation( + cluster_service.CancelOperationRequest(), + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_cancel_operation_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_operation( + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].operation_id + mock_val = "operation_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_cancel_operation_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_operation( + cluster_service.CancelOperationRequest(), + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.GetServerConfigRequest, + dict, + ], +) +def test_get_server_config(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ServerConfig( + default_cluster_version="default_cluster_version_value", + valid_node_versions=["valid_node_versions_value"], + default_image_type="default_image_type_value", + valid_image_types=["valid_image_types_value"], + valid_master_versions=["valid_master_versions_value"], + ) + response = client.get_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetServerConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ServerConfig) + assert response.default_cluster_version == "default_cluster_version_value" + assert response.valid_node_versions == ["valid_node_versions_value"] + assert response.default_image_type == "default_image_type_value" + assert response.valid_image_types == ["valid_image_types_value"] + assert response.valid_master_versions == ["valid_master_versions_value"] + + +def test_get_server_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_config), "__call__" + ) as call: + client.get_server_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetServerConfigRequest() + + +@pytest.mark.asyncio +async def test_get_server_config_async( + transport: str = "grpc_asyncio", request_type=cluster_service.GetServerConfigRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ServerConfig( + default_cluster_version="default_cluster_version_value", + valid_node_versions=["valid_node_versions_value"], + default_image_type="default_image_type_value", + valid_image_types=["valid_image_types_value"], + valid_master_versions=["valid_master_versions_value"], + ) + ) + response = await client.get_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetServerConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ServerConfig) + assert response.default_cluster_version == "default_cluster_version_value" + assert response.valid_node_versions == ["valid_node_versions_value"] + assert response.default_image_type == "default_image_type_value" + assert response.valid_image_types == ["valid_image_types_value"] + assert response.valid_master_versions == ["valid_master_versions_value"] + + +@pytest.mark.asyncio +async def test_get_server_config_async_from_dict(): + await test_get_server_config_async(request_type=dict) + + +def test_get_server_config_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetServerConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_config), "__call__" + ) as call: + call.return_value = cluster_service.ServerConfig() + client.get_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_server_config_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetServerConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ServerConfig() + ) + await client.get_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_server_config_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ServerConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_server_config( + project_id="project_id_value", + zone="zone_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_server_config_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_server_config( + cluster_service.GetServerConfigRequest(), + project_id="project_id_value", + zone="zone_value", + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_server_config_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ServerConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ServerConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_server_config( + project_id="project_id_value", + zone="zone_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_server_config_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_server_config( + cluster_service.GetServerConfigRequest(), + project_id="project_id_value", + zone="zone_value", + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.GetJSONWebKeysRequest, + dict, + ], +) +def test_get_json_web_keys(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_json_web_keys), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.GetJSONWebKeysResponse() + response = client.get_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetJSONWebKeysRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.GetJSONWebKeysResponse) + + +def test_get_json_web_keys_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_json_web_keys), "__call__" + ) as call: + client.get_json_web_keys() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetJSONWebKeysRequest() + + +@pytest.mark.asyncio +async def test_get_json_web_keys_async( + transport: str = "grpc_asyncio", request_type=cluster_service.GetJSONWebKeysRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_json_web_keys), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.GetJSONWebKeysResponse() + ) + response = await client.get_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetJSONWebKeysRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.GetJSONWebKeysResponse) + + +@pytest.mark.asyncio +async def test_get_json_web_keys_async_from_dict(): + await test_get_json_web_keys_async(request_type=dict) + + +def test_get_json_web_keys_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetJSONWebKeysRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_json_web_keys), "__call__" + ) as call: + call.return_value = cluster_service.GetJSONWebKeysResponse() + client.get_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_json_web_keys_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetJSONWebKeysRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_json_web_keys), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.GetJSONWebKeysResponse() + ) + await client.get_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.ListNodePoolsRequest, + dict, + ], +) +def test_list_node_pools(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_pools), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListNodePoolsResponse() + response = client.list_node_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListNodePoolsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListNodePoolsResponse) + + +def test_list_node_pools_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_pools), "__call__") as call: + client.list_node_pools() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListNodePoolsRequest() + + +@pytest.mark.asyncio +async def test_list_node_pools_async( + transport: str = "grpc_asyncio", request_type=cluster_service.ListNodePoolsRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_pools), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListNodePoolsResponse() + ) + response = await client.list_node_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListNodePoolsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListNodePoolsResponse) + + +@pytest.mark.asyncio +async def test_list_node_pools_async_from_dict(): + await test_list_node_pools_async(request_type=dict) + + +def test_list_node_pools_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListNodePoolsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_pools), "__call__") as call: + call.return_value = cluster_service.ListNodePoolsResponse() + client.list_node_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_node_pools_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListNodePoolsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_pools), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListNodePoolsResponse() + ) + await client.list_node_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_node_pools_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_pools), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListNodePoolsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_node_pools( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_node_pools_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_node_pools( + cluster_service.ListNodePoolsRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_node_pools_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_pools), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListNodePoolsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListNodePoolsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_node_pools( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_node_pools_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_node_pools( + cluster_service.ListNodePoolsRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + parent="parent_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.GetNodePoolRequest, + dict, + ], +) +def test_get_node_pool(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.NodePool( + name="name_value", + initial_node_count=1911, + locations=["locations_value"], + self_link="self_link_value", + version="version_value", + instance_group_urls=["instance_group_urls_value"], + status=cluster_service.NodePool.Status.PROVISIONING, + status_message="status_message_value", + pod_ipv4_cidr_size=1856, + etag="etag_value", + ) + response = client.get_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.NodePool) + assert response.name == "name_value" + assert response.initial_node_count == 1911 + assert response.locations == ["locations_value"] + assert response.self_link == "self_link_value" + assert response.version == "version_value" + assert response.instance_group_urls == ["instance_group_urls_value"] + assert response.status == cluster_service.NodePool.Status.PROVISIONING + assert response.status_message == "status_message_value" + assert response.pod_ipv4_cidr_size == 1856 + assert response.etag == "etag_value" + + +def test_get_node_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_pool), "__call__") as call: + client.get_node_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetNodePoolRequest() + + +@pytest.mark.asyncio +async def test_get_node_pool_async( + transport: str = "grpc_asyncio", request_type=cluster_service.GetNodePoolRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.NodePool( + name="name_value", + initial_node_count=1911, + locations=["locations_value"], + self_link="self_link_value", + version="version_value", + instance_group_urls=["instance_group_urls_value"], + status=cluster_service.NodePool.Status.PROVISIONING, + status_message="status_message_value", + pod_ipv4_cidr_size=1856, + etag="etag_value", + ) + ) + response = await client.get_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.NodePool) + assert response.name == "name_value" + assert response.initial_node_count == 1911 + assert response.locations == ["locations_value"] + assert response.self_link == "self_link_value" + assert response.version == "version_value" + assert response.instance_group_urls == ["instance_group_urls_value"] + assert response.status == cluster_service.NodePool.Status.PROVISIONING + assert response.status_message == "status_message_value" + assert response.pod_ipv4_cidr_size == 1856 + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_node_pool_async_from_dict(): + await test_get_node_pool_async(request_type=dict) + + +def test_get_node_pool_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetNodePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_pool), "__call__") as call: + call.return_value = cluster_service.NodePool() + client.get_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_node_pool_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetNodePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_pool), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.NodePool() + ) + await client.get_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_node_pool_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.NodePool() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_node_pool( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool_id + mock_val = "node_pool_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_node_pool_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_node_pool( + cluster_service.GetNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_node_pool_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.NodePool() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.NodePool() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_node_pool( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool_id + mock_val = "node_pool_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_node_pool_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_node_pool( + cluster_service.GetNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CreateNodePoolRequest, + dict, + ], +) +def test_create_node_pool(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.create_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CreateNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_create_node_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_node_pool), "__call__") as call: + client.create_node_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CreateNodePoolRequest() + + +@pytest.mark.asyncio +async def test_create_node_pool_async( + transport: str = "grpc_asyncio", request_type=cluster_service.CreateNodePoolRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.create_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CreateNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_create_node_pool_async_from_dict(): + await test_create_node_pool_async(request_type=dict) + + +def test_create_node_pool_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CreateNodePoolRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_node_pool), "__call__") as call: + call.return_value = cluster_service.Operation() + client.create_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_node_pool_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CreateNodePoolRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_node_pool), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.create_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_node_pool_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_node_pool( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool=cluster_service.NodePool(name="name_value"), + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool + mock_val = cluster_service.NodePool(name="name_value") + assert arg == mock_val + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_create_node_pool_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_node_pool( + cluster_service.CreateNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool=cluster_service.NodePool(name="name_value"), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_create_node_pool_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_node_pool( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool=cluster_service.NodePool(name="name_value"), + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool + mock_val = cluster_service.NodePool(name="name_value") + assert arg == mock_val + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_node_pool_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_node_pool( + cluster_service.CreateNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool=cluster_service.NodePool(name="name_value"), + parent="parent_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.DeleteNodePoolRequest, + dict, + ], +) +def test_delete_node_pool(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.delete_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.DeleteNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_delete_node_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_node_pool), "__call__") as call: + client.delete_node_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.DeleteNodePoolRequest() + + +@pytest.mark.asyncio +async def test_delete_node_pool_async( + transport: str = "grpc_asyncio", request_type=cluster_service.DeleteNodePoolRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.delete_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.DeleteNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_delete_node_pool_async_from_dict(): + await test_delete_node_pool_async(request_type=dict) + + +def test_delete_node_pool_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.DeleteNodePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_node_pool), "__call__") as call: + call.return_value = cluster_service.Operation() + client.delete_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_node_pool_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.DeleteNodePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_node_pool), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.delete_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_node_pool_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_node_pool( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool_id + mock_val = "node_pool_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_node_pool_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_node_pool( + cluster_service.DeleteNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_node_pool_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_node_pool( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool_id + mock_val = "node_pool_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_node_pool_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_node_pool( + cluster_service.DeleteNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CompleteNodePoolUpgradeRequest, + dict, + ], +) +def test_complete_node_pool_upgrade(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_node_pool_upgrade), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.complete_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CompleteNodePoolUpgradeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_complete_node_pool_upgrade_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_node_pool_upgrade), "__call__" + ) as call: + client.complete_node_pool_upgrade() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CompleteNodePoolUpgradeRequest() + + +@pytest.mark.asyncio +async def test_complete_node_pool_upgrade_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.CompleteNodePoolUpgradeRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_node_pool_upgrade), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.complete_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CompleteNodePoolUpgradeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_complete_node_pool_upgrade_async_from_dict(): + await test_complete_node_pool_upgrade_async(request_type=dict) + + +def test_complete_node_pool_upgrade_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CompleteNodePoolUpgradeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_node_pool_upgrade), "__call__" + ) as call: + call.return_value = None + client.complete_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_complete_node_pool_upgrade_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CompleteNodePoolUpgradeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_node_pool_upgrade), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.complete_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.RollbackNodePoolUpgradeRequest, + dict, + ], +) +def test_rollback_node_pool_upgrade(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_node_pool_upgrade), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.rollback_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.RollbackNodePoolUpgradeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_rollback_node_pool_upgrade_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_node_pool_upgrade), "__call__" + ) as call: + client.rollback_node_pool_upgrade() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.RollbackNodePoolUpgradeRequest() + + +@pytest.mark.asyncio +async def test_rollback_node_pool_upgrade_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.RollbackNodePoolUpgradeRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_node_pool_upgrade), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.rollback_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.RollbackNodePoolUpgradeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_rollback_node_pool_upgrade_async_from_dict(): + await test_rollback_node_pool_upgrade_async(request_type=dict) + + +def test_rollback_node_pool_upgrade_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.RollbackNodePoolUpgradeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_node_pool_upgrade), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.rollback_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rollback_node_pool_upgrade_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.RollbackNodePoolUpgradeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_node_pool_upgrade), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.rollback_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_rollback_node_pool_upgrade_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_node_pool_upgrade), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rollback_node_pool_upgrade( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool_id + mock_val = "node_pool_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_rollback_node_pool_upgrade_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback_node_pool_upgrade( + cluster_service.RollbackNodePoolUpgradeRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_rollback_node_pool_upgrade_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_node_pool_upgrade), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rollback_node_pool_upgrade( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool_id + mock_val = "node_pool_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_rollback_node_pool_upgrade_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rollback_node_pool_upgrade( + cluster_service.RollbackNodePoolUpgradeRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetNodePoolManagementRequest, + dict, + ], +) +def test_set_node_pool_management(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_management), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_node_pool_management(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolManagementRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_node_pool_management_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_management), "__call__" + ) as call: + client.set_node_pool_management() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolManagementRequest() + + +@pytest.mark.asyncio +async def test_set_node_pool_management_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.SetNodePoolManagementRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_management), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_node_pool_management(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolManagementRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_node_pool_management_async_from_dict(): + await test_set_node_pool_management_async(request_type=dict) + + +def test_set_node_pool_management_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNodePoolManagementRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_management), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_node_pool_management(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_node_pool_management_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNodePoolManagementRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_management), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_node_pool_management(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetLabelsRequest, + dict, + ], +) +def test_set_labels(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_labels), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLabelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_labels_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_labels), "__call__") as call: + client.set_labels() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLabelsRequest() + + +@pytest.mark.asyncio +async def test_set_labels_async( + transport: str = "grpc_asyncio", request_type=cluster_service.SetLabelsRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_labels), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLabelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_labels_async_from_dict(): + await test_set_labels_async(request_type=dict) + + +def test_set_labels_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLabelsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_labels), "__call__") as call: + call.return_value = cluster_service.Operation() + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_labels_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLabelsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_labels), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetLegacyAbacRequest, + dict, + ], +) +def test_set_legacy_abac(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_legacy_abac), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_legacy_abac(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLegacyAbacRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_legacy_abac_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_legacy_abac), "__call__") as call: + client.set_legacy_abac() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLegacyAbacRequest() + + +@pytest.mark.asyncio +async def test_set_legacy_abac_async( + transport: str = "grpc_asyncio", request_type=cluster_service.SetLegacyAbacRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_legacy_abac), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_legacy_abac(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLegacyAbacRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_legacy_abac_async_from_dict(): + await test_set_legacy_abac_async(request_type=dict) + + +def test_set_legacy_abac_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLegacyAbacRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_legacy_abac), "__call__") as call: + call.return_value = cluster_service.Operation() + client.set_legacy_abac(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_legacy_abac_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLegacyAbacRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_legacy_abac), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_legacy_abac(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_legacy_abac_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_legacy_abac), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_legacy_abac( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + enabled=True, + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].enabled + mock_val = True + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_set_legacy_abac_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_legacy_abac( + cluster_service.SetLegacyAbacRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + enabled=True, + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_set_legacy_abac_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_legacy_abac), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_legacy_abac( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + enabled=True, + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].enabled + mock_val = True + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_legacy_abac_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_legacy_abac( + cluster_service.SetLegacyAbacRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + enabled=True, + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.StartIPRotationRequest, + dict, + ], +) +def test_start_ip_rotation(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.start_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.StartIPRotationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_start_ip_rotation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_ip_rotation), "__call__" + ) as call: + client.start_ip_rotation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.StartIPRotationRequest() + + +@pytest.mark.asyncio +async def test_start_ip_rotation_async( + transport: str = "grpc_asyncio", request_type=cluster_service.StartIPRotationRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.start_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.StartIPRotationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_start_ip_rotation_async_from_dict(): + await test_start_ip_rotation_async(request_type=dict) + + +def test_start_ip_rotation_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.StartIPRotationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_ip_rotation), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.start_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_start_ip_rotation_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.StartIPRotationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_ip_rotation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.start_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_start_ip_rotation_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.start_ip_rotation( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_start_ip_rotation_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_ip_rotation( + cluster_service.StartIPRotationRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_start_ip_rotation_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.start_ip_rotation( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_start_ip_rotation_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.start_ip_rotation( + cluster_service.StartIPRotationRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CompleteIPRotationRequest, + dict, + ], +) +def test_complete_ip_rotation(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.complete_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CompleteIPRotationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_complete_ip_rotation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_ip_rotation), "__call__" + ) as call: + client.complete_ip_rotation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CompleteIPRotationRequest() + + +@pytest.mark.asyncio +async def test_complete_ip_rotation_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.CompleteIPRotationRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.complete_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CompleteIPRotationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_complete_ip_rotation_async_from_dict(): + await test_complete_ip_rotation_async(request_type=dict) + + +def test_complete_ip_rotation_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CompleteIPRotationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_ip_rotation), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.complete_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_complete_ip_rotation_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CompleteIPRotationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_ip_rotation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.complete_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_complete_ip_rotation_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.complete_ip_rotation( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_complete_ip_rotation_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.complete_ip_rotation( + cluster_service.CompleteIPRotationRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_complete_ip_rotation_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.complete_ip_rotation( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_complete_ip_rotation_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.complete_ip_rotation( + cluster_service.CompleteIPRotationRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetNodePoolSizeRequest, + dict, + ], +) +def test_set_node_pool_size(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_size), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_node_pool_size(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolSizeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_node_pool_size_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_size), "__call__" + ) as call: + client.set_node_pool_size() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolSizeRequest() + + +@pytest.mark.asyncio +async def test_set_node_pool_size_async( + transport: str = "grpc_asyncio", request_type=cluster_service.SetNodePoolSizeRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_size), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_node_pool_size(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolSizeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_node_pool_size_async_from_dict(): + await test_set_node_pool_size_async(request_type=dict) + + +def test_set_node_pool_size_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNodePoolSizeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_size), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_node_pool_size(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_node_pool_size_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNodePoolSizeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_size), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_node_pool_size(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetNetworkPolicyRequest, + dict, + ], +) +def test_set_network_policy(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNetworkPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_network_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_network_policy), "__call__" + ) as call: + client.set_network_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNetworkPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_network_policy_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.SetNetworkPolicyRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNetworkPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_network_policy_async_from_dict(): + await test_set_network_policy_async(request_type=dict) + + +def test_set_network_policy_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNetworkPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_network_policy), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_network_policy_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNetworkPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_network_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_network_policy_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_network_policy( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + network_policy=cluster_service.NetworkPolicy( + provider=cluster_service.NetworkPolicy.Provider.CALICO + ), + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].network_policy + mock_val = cluster_service.NetworkPolicy( + provider=cluster_service.NetworkPolicy.Provider.CALICO + ) + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_set_network_policy_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_network_policy( + cluster_service.SetNetworkPolicyRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + network_policy=cluster_service.NetworkPolicy( + provider=cluster_service.NetworkPolicy.Provider.CALICO + ), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_set_network_policy_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_network_policy( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + network_policy=cluster_service.NetworkPolicy( + provider=cluster_service.NetworkPolicy.Provider.CALICO + ), + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].network_policy + mock_val = cluster_service.NetworkPolicy( + provider=cluster_service.NetworkPolicy.Provider.CALICO + ) + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_network_policy_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_network_policy( + cluster_service.SetNetworkPolicyRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + network_policy=cluster_service.NetworkPolicy( + provider=cluster_service.NetworkPolicy.Provider.CALICO + ), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetMaintenancePolicyRequest, + dict, + ], +) +def test_set_maintenance_policy(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_maintenance_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_maintenance_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMaintenancePolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_maintenance_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_maintenance_policy), "__call__" + ) as call: + client.set_maintenance_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMaintenancePolicyRequest() + + +@pytest.mark.asyncio +async def test_set_maintenance_policy_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.SetMaintenancePolicyRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_maintenance_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_maintenance_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMaintenancePolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_maintenance_policy_async_from_dict(): + await test_set_maintenance_policy_async(request_type=dict) + + +def test_set_maintenance_policy_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetMaintenancePolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_maintenance_policy), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_maintenance_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_maintenance_policy_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetMaintenancePolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_maintenance_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_maintenance_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_maintenance_policy_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_maintenance_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_maintenance_policy( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + maintenance_policy=cluster_service.MaintenancePolicy( + window=cluster_service.MaintenanceWindow( + daily_maintenance_window=cluster_service.DailyMaintenanceWindow( + start_time="start_time_value" + ) + ) + ), + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].maintenance_policy + mock_val = cluster_service.MaintenancePolicy( + window=cluster_service.MaintenanceWindow( + daily_maintenance_window=cluster_service.DailyMaintenanceWindow( + start_time="start_time_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_set_maintenance_policy_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_maintenance_policy( + cluster_service.SetMaintenancePolicyRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + maintenance_policy=cluster_service.MaintenancePolicy( + window=cluster_service.MaintenanceWindow( + daily_maintenance_window=cluster_service.DailyMaintenanceWindow( + start_time="start_time_value" + ) + ) + ), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_set_maintenance_policy_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_maintenance_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_maintenance_policy( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + maintenance_policy=cluster_service.MaintenancePolicy( + window=cluster_service.MaintenanceWindow( + daily_maintenance_window=cluster_service.DailyMaintenanceWindow( + start_time="start_time_value" + ) + ) + ), + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].maintenance_policy + mock_val = cluster_service.MaintenancePolicy( + window=cluster_service.MaintenanceWindow( + daily_maintenance_window=cluster_service.DailyMaintenanceWindow( + start_time="start_time_value" + ) + ) + ) + assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_maintenance_policy_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_maintenance_policy( + cluster_service.SetMaintenancePolicyRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + maintenance_policy=cluster_service.MaintenancePolicy( + window=cluster_service.MaintenanceWindow( + daily_maintenance_window=cluster_service.DailyMaintenanceWindow( + start_time="start_time_value" + ) + ) + ), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.ListUsableSubnetworksRequest, + dict, + ], +) +def test_list_usable_subnetworks(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListUsableSubnetworksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_usable_subnetworks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListUsableSubnetworksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableSubnetworksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_usable_subnetworks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + client.list_usable_subnetworks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListUsableSubnetworksRequest() + + +@pytest.mark.asyncio +async def test_list_usable_subnetworks_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.ListUsableSubnetworksRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListUsableSubnetworksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_usable_subnetworks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListUsableSubnetworksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableSubnetworksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_usable_subnetworks_async_from_dict(): + await test_list_usable_subnetworks_async(request_type=dict) + + +def test_list_usable_subnetworks_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListUsableSubnetworksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + call.return_value = cluster_service.ListUsableSubnetworksResponse() + client.list_usable_subnetworks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_usable_subnetworks_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListUsableSubnetworksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListUsableSubnetworksResponse() + ) + await client.list_usable_subnetworks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_usable_subnetworks_pager(transport_name: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + next_page_token="abc", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[], + next_page_token="def", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + ], + next_page_token="ghi", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_usable_subnetworks(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cluster_service.UsableSubnetwork) for i in results) + + +def test_list_usable_subnetworks_pages(transport_name: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + next_page_token="abc", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[], + next_page_token="def", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + ], + next_page_token="ghi", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + ), + RuntimeError, + ) + pages = list(client.list_usable_subnetworks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_usable_subnetworks_async_pager(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + next_page_token="abc", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[], + next_page_token="def", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + ], + next_page_token="ghi", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_usable_subnetworks( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cluster_service.UsableSubnetwork) for i in responses) + + +@pytest.mark.asyncio +async def test_list_usable_subnetworks_async_pages(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + next_page_token="abc", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[], + next_page_token="def", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + ], + next_page_token="ghi", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_usable_subnetworks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CheckAutopilotCompatibilityRequest, + dict, + ], +) +def test_check_autopilot_compatibility(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_autopilot_compatibility), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.CheckAutopilotCompatibilityResponse( + summary="summary_value", + ) + response = client.check_autopilot_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CheckAutopilotCompatibilityRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.CheckAutopilotCompatibilityResponse) + assert response.summary == "summary_value" + + +def test_check_autopilot_compatibility_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_autopilot_compatibility), "__call__" + ) as call: + client.check_autopilot_compatibility() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CheckAutopilotCompatibilityRequest() + + +@pytest.mark.asyncio +async def test_check_autopilot_compatibility_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.CheckAutopilotCompatibilityRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_autopilot_compatibility), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.CheckAutopilotCompatibilityResponse( + summary="summary_value", + ) + ) + response = await client.check_autopilot_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CheckAutopilotCompatibilityRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.CheckAutopilotCompatibilityResponse) + assert response.summary == "summary_value" + + +@pytest.mark.asyncio +async def test_check_autopilot_compatibility_async_from_dict(): + await test_check_autopilot_compatibility_async(request_type=dict) + + +def test_check_autopilot_compatibility_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CheckAutopilotCompatibilityRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_autopilot_compatibility), "__call__" + ) as call: + call.return_value = cluster_service.CheckAutopilotCompatibilityResponse() + client.check_autopilot_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_check_autopilot_compatibility_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CheckAutopilotCompatibilityRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_autopilot_compatibility), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.CheckAutopilotCompatibilityResponse() + ) + await client.check_autopilot_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ClusterManagerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ClusterManagerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ClusterManagerClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ClusterManagerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ClusterManagerClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ClusterManagerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterManagerGrpcTransport, + transports.ClusterManagerGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = ClusterManagerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ClusterManagerGrpcTransport, + ) + + +def test_cluster_manager_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ClusterManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cluster_manager_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.container_v1.services.cluster_manager.transports.ClusterManagerTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ClusterManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_clusters", + "get_cluster", + "create_cluster", + "update_cluster", + "update_node_pool", + "set_node_pool_autoscaling", + "set_logging_service", + "set_monitoring_service", + "set_addons_config", + "set_locations", + "update_master", + "set_master_auth", + "delete_cluster", + "list_operations", + "get_operation", + "cancel_operation", + "get_server_config", + "get_json_web_keys", + "list_node_pools", + "get_node_pool", + "create_node_pool", + "delete_node_pool", + "complete_node_pool_upgrade", + "rollback_node_pool_upgrade", + "set_node_pool_management", + "set_labels", + "set_legacy_abac", + "start_ip_rotation", + "complete_ip_rotation", + "set_node_pool_size", + "set_network_policy", + "set_maintenance_policy", + "list_usable_subnetworks", + "check_autopilot_compatibility", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cluster_manager_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.container_v1.services.cluster_manager.transports.ClusterManagerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ClusterManagerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cluster_manager_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.container_v1.services.cluster_manager.transports.ClusterManagerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ClusterManagerTransport() + adc.assert_called_once() + + +def test_cluster_manager_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ClusterManagerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterManagerGrpcTransport, + transports.ClusterManagerGrpcAsyncIOTransport, + ], +) +def test_cluster_manager_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterManagerGrpcTransport, + transports.ClusterManagerGrpcAsyncIOTransport, + ], +) +def test_cluster_manager_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ClusterManagerGrpcTransport, grpc_helpers), + (transports.ClusterManagerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_cluster_manager_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "container.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="container.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterManagerGrpcTransport, + transports.ClusterManagerGrpcAsyncIOTransport, + ], +) +def test_cluster_manager_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_cluster_manager_host_no_port(transport_name): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="container.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ("container.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_cluster_manager_host_with_port(transport_name): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="container.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ("container.googleapis.com:8000") + + +def test_cluster_manager_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ClusterManagerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cluster_manager_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ClusterManagerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterManagerGrpcTransport, + transports.ClusterManagerGrpcAsyncIOTransport, + ], +) +def test_cluster_manager_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterManagerGrpcTransport, + transports.ClusterManagerGrpcAsyncIOTransport, + ], +) +def test_cluster_manager_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_topic_path(): + project = "squid" + topic = "clam" + expected = "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) + actual = ClusterManagerClient.topic_path(project, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "whelk", + "topic": "octopus", + } + path = ClusterManagerClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterManagerClient.parse_topic_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ClusterManagerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = ClusterManagerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterManagerClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ClusterManagerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = ClusterManagerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterManagerClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ClusterManagerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = ClusterManagerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterManagerClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = ClusterManagerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = ClusterManagerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterManagerClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ClusterManagerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = ClusterManagerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterManagerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ClusterManagerTransport, "_prep_wrapped_messages" + ) as prep: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ClusterManagerTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ClusterManagerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ClusterManagerClient, transports.ClusterManagerGrpcTransport), + (ClusterManagerAsyncClient, transports.ClusterManagerGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/__init__.py b/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py b/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py new file mode 100644 index 000000000000..1e2fea4f2c63 --- /dev/null +++ b/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py @@ -0,0 +1,11183 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +from google.rpc import code_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.container_v1beta1.services.cluster_manager import ( + ClusterManagerAsyncClient, + ClusterManagerClient, + pagers, + transports, +) +from google.cloud.container_v1beta1.types import cluster_service + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ClusterManagerClient._get_default_mtls_endpoint(None) is None + assert ( + ClusterManagerClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ClusterManagerClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ClusterManagerClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ClusterManagerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ClusterManagerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ClusterManagerClient, "grpc"), + (ClusterManagerAsyncClient, "grpc_asyncio"), + ], +) +def test_cluster_manager_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("container.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ClusterManagerGrpcTransport, "grpc"), + (transports.ClusterManagerGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_cluster_manager_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ClusterManagerClient, "grpc"), + (ClusterManagerAsyncClient, "grpc_asyncio"), + ], +) +def test_cluster_manager_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("container.googleapis.com:443") + + +def test_cluster_manager_client_get_transport_class(): + transport = ClusterManagerClient.get_transport_class() + available_transports = [ + transports.ClusterManagerGrpcTransport, + ] + assert transport in available_transports + + transport = ClusterManagerClient.get_transport_class("grpc") + assert transport == transports.ClusterManagerGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ClusterManagerClient, transports.ClusterManagerGrpcTransport, "grpc"), + ( + ClusterManagerAsyncClient, + transports.ClusterManagerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + ClusterManagerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterManagerClient), +) +@mock.patch.object( + ClusterManagerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterManagerAsyncClient), +) +def test_cluster_manager_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ClusterManagerClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ClusterManagerClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ClusterManagerClient, transports.ClusterManagerGrpcTransport, "grpc", "true"), + ( + ClusterManagerAsyncClient, + transports.ClusterManagerGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ClusterManagerClient, transports.ClusterManagerGrpcTransport, "grpc", "false"), + ( + ClusterManagerAsyncClient, + transports.ClusterManagerGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + ClusterManagerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterManagerClient), +) +@mock.patch.object( + ClusterManagerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterManagerAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cluster_manager_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ClusterManagerClient, ClusterManagerAsyncClient] +) +@mock.patch.object( + ClusterManagerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterManagerClient), +) +@mock.patch.object( + ClusterManagerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterManagerAsyncClient), +) +def test_cluster_manager_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ClusterManagerClient, transports.ClusterManagerGrpcTransport, "grpc"), + ( + ClusterManagerAsyncClient, + transports.ClusterManagerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_cluster_manager_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ClusterManagerClient, + transports.ClusterManagerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ClusterManagerAsyncClient, + transports.ClusterManagerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cluster_manager_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_cluster_manager_client_client_options_from_dict(): + with mock.patch( + "google.cloud.container_v1beta1.services.cluster_manager.transports.ClusterManagerGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ClusterManagerClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ClusterManagerClient, + transports.ClusterManagerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ClusterManagerAsyncClient, + transports.ClusterManagerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cluster_manager_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "container.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="container.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.ListClustersRequest, + dict, + ], +) +def test_list_clusters(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListClustersResponse( + missing_zones=["missing_zones_value"], + ) + response = client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListClustersResponse) + assert response.missing_zones == ["missing_zones_value"] + + +def test_list_clusters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + client.list_clusters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListClustersRequest() + + +@pytest.mark.asyncio +async def test_list_clusters_async( + transport: str = "grpc_asyncio", request_type=cluster_service.ListClustersRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListClustersResponse( + missing_zones=["missing_zones_value"], + ) + ) + response = await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListClustersResponse) + assert response.missing_zones == ["missing_zones_value"] + + +@pytest.mark.asyncio +async def test_list_clusters_async_from_dict(): + await test_list_clusters_async(request_type=dict) + + +def test_list_clusters_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = cluster_service.ListClustersResponse() + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_clusters_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListClustersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListClustersResponse() + ) + await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_clusters_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListClustersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_clusters( + project_id="project_id_value", + zone="zone_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + + +def test_list_clusters_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + cluster_service.ListClustersRequest(), + project_id="project_id_value", + zone="zone_value", + ) + + +@pytest.mark.asyncio +async def test_list_clusters_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListClustersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListClustersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_clusters( + project_id="project_id_value", + zone="zone_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_clusters_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_clusters( + cluster_service.ListClustersRequest(), + project_id="project_id_value", + zone="zone_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.GetClusterRequest, + dict, + ], +) +def test_get_cluster(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Cluster( + name="name_value", + description="description_value", + initial_node_count=1911, + logging_service="logging_service_value", + monitoring_service="monitoring_service_value", + network="network_value", + cluster_ipv4_cidr="cluster_ipv4_cidr_value", + subnetwork="subnetwork_value", + locations=["locations_value"], + enable_kubernetes_alpha=True, + label_fingerprint="label_fingerprint_value", + private_cluster=True, + master_ipv4_cidr_block="master_ipv4_cidr_block_value", + self_link="self_link_value", + zone="zone_value", + endpoint="endpoint_value", + initial_cluster_version="initial_cluster_version_value", + current_master_version="current_master_version_value", + current_node_version="current_node_version_value", + create_time="create_time_value", + status=cluster_service.Cluster.Status.PROVISIONING, + status_message="status_message_value", + node_ipv4_cidr_size=1955, + services_ipv4_cidr="services_ipv4_cidr_value", + instance_group_urls=["instance_group_urls_value"], + current_node_count=1936, + expire_time="expire_time_value", + location="location_value", + enable_tpu=True, + tpu_ipv4_cidr_block="tpu_ipv4_cidr_block_value", + id="id_value", + etag="etag_value", + ) + response = client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Cluster) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.initial_node_count == 1911 + assert response.logging_service == "logging_service_value" + assert response.monitoring_service == "monitoring_service_value" + assert response.network == "network_value" + assert response.cluster_ipv4_cidr == "cluster_ipv4_cidr_value" + assert response.subnetwork == "subnetwork_value" + assert response.locations == ["locations_value"] + assert response.enable_kubernetes_alpha is True + assert response.label_fingerprint == "label_fingerprint_value" + assert response.private_cluster is True + assert response.master_ipv4_cidr_block == "master_ipv4_cidr_block_value" + assert response.self_link == "self_link_value" + assert response.zone == "zone_value" + assert response.endpoint == "endpoint_value" + assert response.initial_cluster_version == "initial_cluster_version_value" + assert response.current_master_version == "current_master_version_value" + assert response.current_node_version == "current_node_version_value" + assert response.create_time == "create_time_value" + assert response.status == cluster_service.Cluster.Status.PROVISIONING + assert response.status_message == "status_message_value" + assert response.node_ipv4_cidr_size == 1955 + assert response.services_ipv4_cidr == "services_ipv4_cidr_value" + assert response.instance_group_urls == ["instance_group_urls_value"] + assert response.current_node_count == 1936 + assert response.expire_time == "expire_time_value" + assert response.location == "location_value" + assert response.enable_tpu is True + assert response.tpu_ipv4_cidr_block == "tpu_ipv4_cidr_block_value" + assert response.id == "id_value" + assert response.etag == "etag_value" + + +def test_get_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + client.get_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetClusterRequest() + + +@pytest.mark.asyncio +async def test_get_cluster_async( + transport: str = "grpc_asyncio", request_type=cluster_service.GetClusterRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Cluster( + name="name_value", + description="description_value", + initial_node_count=1911, + logging_service="logging_service_value", + monitoring_service="monitoring_service_value", + network="network_value", + cluster_ipv4_cidr="cluster_ipv4_cidr_value", + subnetwork="subnetwork_value", + locations=["locations_value"], + enable_kubernetes_alpha=True, + label_fingerprint="label_fingerprint_value", + private_cluster=True, + master_ipv4_cidr_block="master_ipv4_cidr_block_value", + self_link="self_link_value", + zone="zone_value", + endpoint="endpoint_value", + initial_cluster_version="initial_cluster_version_value", + current_master_version="current_master_version_value", + current_node_version="current_node_version_value", + create_time="create_time_value", + status=cluster_service.Cluster.Status.PROVISIONING, + status_message="status_message_value", + node_ipv4_cidr_size=1955, + services_ipv4_cidr="services_ipv4_cidr_value", + instance_group_urls=["instance_group_urls_value"], + current_node_count=1936, + expire_time="expire_time_value", + location="location_value", + enable_tpu=True, + tpu_ipv4_cidr_block="tpu_ipv4_cidr_block_value", + id="id_value", + etag="etag_value", + ) + ) + response = await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Cluster) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.initial_node_count == 1911 + assert response.logging_service == "logging_service_value" + assert response.monitoring_service == "monitoring_service_value" + assert response.network == "network_value" + assert response.cluster_ipv4_cidr == "cluster_ipv4_cidr_value" + assert response.subnetwork == "subnetwork_value" + assert response.locations == ["locations_value"] + assert response.enable_kubernetes_alpha is True + assert response.label_fingerprint == "label_fingerprint_value" + assert response.private_cluster is True + assert response.master_ipv4_cidr_block == "master_ipv4_cidr_block_value" + assert response.self_link == "self_link_value" + assert response.zone == "zone_value" + assert response.endpoint == "endpoint_value" + assert response.initial_cluster_version == "initial_cluster_version_value" + assert response.current_master_version == "current_master_version_value" + assert response.current_node_version == "current_node_version_value" + assert response.create_time == "create_time_value" + assert response.status == cluster_service.Cluster.Status.PROVISIONING + assert response.status_message == "status_message_value" + assert response.node_ipv4_cidr_size == 1955 + assert response.services_ipv4_cidr == "services_ipv4_cidr_value" + assert response.instance_group_urls == ["instance_group_urls_value"] + assert response.current_node_count == 1936 + assert response.expire_time == "expire_time_value" + assert response.location == "location_value" + assert response.enable_tpu is True + assert response.tpu_ipv4_cidr_block == "tpu_ipv4_cidr_block_value" + assert response.id == "id_value" + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_cluster_async_from_dict(): + await test_get_cluster_async(request_type=dict) + + +def test_get_cluster_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = cluster_service.Cluster() + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cluster_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Cluster() + ) + await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_cluster_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Cluster() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_cluster( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +def test_get_cluster_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + cluster_service.GetClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + +@pytest.mark.asyncio +async def test_get_cluster_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Cluster() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Cluster() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_cluster( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_cluster_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_cluster( + cluster_service.GetClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CreateClusterRequest, + dict, + ], +) +def test_create_cluster(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CreateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_create_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + client.create_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CreateClusterRequest() + + +@pytest.mark.asyncio +async def test_create_cluster_async( + transport: str = "grpc_asyncio", request_type=cluster_service.CreateClusterRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CreateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_create_cluster_async_from_dict(): + await test_create_cluster_async(request_type=dict) + + +def test_create_cluster_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CreateClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = cluster_service.Operation() + client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_cluster_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CreateClusterRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_cluster_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_cluster( + project_id="project_id_value", + zone="zone_value", + cluster=cluster_service.Cluster(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = cluster_service.Cluster(name="name_value") + assert arg == mock_val + + +def test_create_cluster_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cluster( + cluster_service.CreateClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster=cluster_service.Cluster(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_cluster_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_cluster( + project_id="project_id_value", + zone="zone_value", + cluster=cluster_service.Cluster(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = cluster_service.Cluster(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_cluster_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_cluster( + cluster_service.CreateClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster=cluster_service.Cluster(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.UpdateClusterRequest, + dict, + ], +) +def test_update_cluster(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_update_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + client.update_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateClusterRequest() + + +@pytest.mark.asyncio +async def test_update_cluster_async( + transport: str = "grpc_asyncio", request_type=cluster_service.UpdateClusterRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_update_cluster_async_from_dict(): + await test_update_cluster_async(request_type=dict) + + +def test_update_cluster_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.UpdateClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = cluster_service.Operation() + client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_cluster_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.UpdateClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_update_cluster_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_cluster( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + update=cluster_service.ClusterUpdate( + desired_node_version="desired_node_version_value" + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].update + mock_val = cluster_service.ClusterUpdate( + desired_node_version="desired_node_version_value" + ) + assert arg == mock_val + + +def test_update_cluster_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + cluster_service.UpdateClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + update=cluster_service.ClusterUpdate( + desired_node_version="desired_node_version_value" + ), + ) + + +@pytest.mark.asyncio +async def test_update_cluster_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_cluster( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + update=cluster_service.ClusterUpdate( + desired_node_version="desired_node_version_value" + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].update + mock_val = cluster_service.ClusterUpdate( + desired_node_version="desired_node_version_value" + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_cluster_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_cluster( + cluster_service.UpdateClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + update=cluster_service.ClusterUpdate( + desired_node_version="desired_node_version_value" + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.UpdateNodePoolRequest, + dict, + ], +) +def test_update_node_pool(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.update_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_update_node_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_node_pool), "__call__") as call: + client.update_node_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateNodePoolRequest() + + +@pytest.mark.asyncio +async def test_update_node_pool_async( + transport: str = "grpc_asyncio", request_type=cluster_service.UpdateNodePoolRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.update_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_update_node_pool_async_from_dict(): + await test_update_node_pool_async(request_type=dict) + + +def test_update_node_pool_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.UpdateNodePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_node_pool), "__call__") as call: + call.return_value = cluster_service.Operation() + client.update_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_node_pool_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.UpdateNodePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_node_pool), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.update_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetNodePoolAutoscalingRequest, + dict, + ], +) +def test_set_node_pool_autoscaling(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_autoscaling), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_node_pool_autoscaling(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolAutoscalingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_node_pool_autoscaling_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_autoscaling), "__call__" + ) as call: + client.set_node_pool_autoscaling() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolAutoscalingRequest() + + +@pytest.mark.asyncio +async def test_set_node_pool_autoscaling_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.SetNodePoolAutoscalingRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_autoscaling), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_node_pool_autoscaling(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolAutoscalingRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_node_pool_autoscaling_async_from_dict(): + await test_set_node_pool_autoscaling_async(request_type=dict) + + +def test_set_node_pool_autoscaling_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNodePoolAutoscalingRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_autoscaling), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_node_pool_autoscaling(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_node_pool_autoscaling_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNodePoolAutoscalingRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_autoscaling), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_node_pool_autoscaling(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetLoggingServiceRequest, + dict, + ], +) +def test_set_logging_service(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_logging_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_logging_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLoggingServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_logging_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_logging_service), "__call__" + ) as call: + client.set_logging_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLoggingServiceRequest() + + +@pytest.mark.asyncio +async def test_set_logging_service_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.SetLoggingServiceRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_logging_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_logging_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLoggingServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_logging_service_async_from_dict(): + await test_set_logging_service_async(request_type=dict) + + +def test_set_logging_service_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLoggingServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_logging_service), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_logging_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_logging_service_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLoggingServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_logging_service), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_logging_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_logging_service_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_logging_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_logging_service( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + logging_service="logging_service_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].logging_service + mock_val = "logging_service_value" + assert arg == mock_val + + +def test_set_logging_service_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_logging_service( + cluster_service.SetLoggingServiceRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + logging_service="logging_service_value", + ) + + +@pytest.mark.asyncio +async def test_set_logging_service_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_logging_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_logging_service( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + logging_service="logging_service_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].logging_service + mock_val = "logging_service_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_logging_service_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_logging_service( + cluster_service.SetLoggingServiceRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + logging_service="logging_service_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetMonitoringServiceRequest, + dict, + ], +) +def test_set_monitoring_service(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_monitoring_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_monitoring_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMonitoringServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_monitoring_service_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_monitoring_service), "__call__" + ) as call: + client.set_monitoring_service() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMonitoringServiceRequest() + + +@pytest.mark.asyncio +async def test_set_monitoring_service_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.SetMonitoringServiceRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_monitoring_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_monitoring_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMonitoringServiceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_monitoring_service_async_from_dict(): + await test_set_monitoring_service_async(request_type=dict) + + +def test_set_monitoring_service_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetMonitoringServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_monitoring_service), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_monitoring_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_monitoring_service_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetMonitoringServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_monitoring_service), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_monitoring_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_monitoring_service_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_monitoring_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_monitoring_service( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + monitoring_service="monitoring_service_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].monitoring_service + mock_val = "monitoring_service_value" + assert arg == mock_val + + +def test_set_monitoring_service_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_monitoring_service( + cluster_service.SetMonitoringServiceRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + monitoring_service="monitoring_service_value", + ) + + +@pytest.mark.asyncio +async def test_set_monitoring_service_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_monitoring_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_monitoring_service( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + monitoring_service="monitoring_service_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].monitoring_service + mock_val = "monitoring_service_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_monitoring_service_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_monitoring_service( + cluster_service.SetMonitoringServiceRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + monitoring_service="monitoring_service_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetAddonsConfigRequest, + dict, + ], +) +def test_set_addons_config(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_addons_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_addons_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetAddonsConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_addons_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_addons_config), "__call__" + ) as call: + client.set_addons_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetAddonsConfigRequest() + + +@pytest.mark.asyncio +async def test_set_addons_config_async( + transport: str = "grpc_asyncio", request_type=cluster_service.SetAddonsConfigRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_addons_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_addons_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetAddonsConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_addons_config_async_from_dict(): + await test_set_addons_config_async(request_type=dict) + + +def test_set_addons_config_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetAddonsConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_addons_config), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_addons_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_addons_config_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetAddonsConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_addons_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_addons_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_addons_config_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_addons_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_addons_config( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + addons_config=cluster_service.AddonsConfig( + http_load_balancing=cluster_service.HttpLoadBalancing(disabled=True) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].addons_config + mock_val = cluster_service.AddonsConfig( + http_load_balancing=cluster_service.HttpLoadBalancing(disabled=True) + ) + assert arg == mock_val + + +def test_set_addons_config_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_addons_config( + cluster_service.SetAddonsConfigRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + addons_config=cluster_service.AddonsConfig( + http_load_balancing=cluster_service.HttpLoadBalancing(disabled=True) + ), + ) + + +@pytest.mark.asyncio +async def test_set_addons_config_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_addons_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_addons_config( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + addons_config=cluster_service.AddonsConfig( + http_load_balancing=cluster_service.HttpLoadBalancing(disabled=True) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].addons_config + mock_val = cluster_service.AddonsConfig( + http_load_balancing=cluster_service.HttpLoadBalancing(disabled=True) + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_addons_config_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_addons_config( + cluster_service.SetAddonsConfigRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + addons_config=cluster_service.AddonsConfig( + http_load_balancing=cluster_service.HttpLoadBalancing(disabled=True) + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetLocationsRequest, + dict, + ], +) +def test_set_locations(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_locations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLocationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_locations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_locations), "__call__") as call: + client.set_locations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLocationsRequest() + + +@pytest.mark.asyncio +async def test_set_locations_async( + transport: str = "grpc_asyncio", request_type=cluster_service.SetLocationsRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_locations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLocationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_locations_async_from_dict(): + await test_set_locations_async(request_type=dict) + + +def test_set_locations_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLocationsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_locations), "__call__") as call: + call.return_value = cluster_service.Operation() + client.set_locations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_locations_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLocationsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_locations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_locations_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_locations( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + locations=["locations_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].locations + mock_val = ["locations_value"] + assert arg == mock_val + + +def test_set_locations_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_locations( + cluster_service.SetLocationsRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + locations=["locations_value"], + ) + + +@pytest.mark.asyncio +async def test_set_locations_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_locations( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + locations=["locations_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].locations + mock_val = ["locations_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_locations_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_locations( + cluster_service.SetLocationsRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + locations=["locations_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.UpdateMasterRequest, + dict, + ], +) +def test_update_master(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_master), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.update_master(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateMasterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_update_master_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_master), "__call__") as call: + client.update_master() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateMasterRequest() + + +@pytest.mark.asyncio +async def test_update_master_async( + transport: str = "grpc_asyncio", request_type=cluster_service.UpdateMasterRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_master), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.update_master(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.UpdateMasterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_update_master_async_from_dict(): + await test_update_master_async(request_type=dict) + + +def test_update_master_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.UpdateMasterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_master), "__call__") as call: + call.return_value = cluster_service.Operation() + client.update_master(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_master_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.UpdateMasterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_master), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.update_master(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_update_master_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_master), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_master( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + master_version="master_version_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].master_version + mock_val = "master_version_value" + assert arg == mock_val + + +def test_update_master_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_master( + cluster_service.UpdateMasterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + master_version="master_version_value", + ) + + +@pytest.mark.asyncio +async def test_update_master_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_master), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_master( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + master_version="master_version_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].master_version + mock_val = "master_version_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_master_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_master( + cluster_service.UpdateMasterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + master_version="master_version_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetMasterAuthRequest, + dict, + ], +) +def test_set_master_auth(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_master_auth), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_master_auth(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMasterAuthRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_master_auth_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_master_auth), "__call__") as call: + client.set_master_auth() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMasterAuthRequest() + + +@pytest.mark.asyncio +async def test_set_master_auth_async( + transport: str = "grpc_asyncio", request_type=cluster_service.SetMasterAuthRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_master_auth), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_master_auth(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMasterAuthRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_master_auth_async_from_dict(): + await test_set_master_auth_async(request_type=dict) + + +def test_set_master_auth_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetMasterAuthRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_master_auth), "__call__") as call: + call.return_value = cluster_service.Operation() + client.set_master_auth(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_master_auth_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetMasterAuthRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_master_auth), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_master_auth(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.DeleteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_delete_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + client.delete_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.DeleteClusterRequest() + + +@pytest.mark.asyncio +async def test_delete_cluster_async( + transport: str = "grpc_asyncio", request_type=cluster_service.DeleteClusterRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.DeleteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_delete_cluster_async_from_dict(): + await test_delete_cluster_async(request_type=dict) + + +def test_delete_cluster_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.DeleteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = cluster_service.Operation() + client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_cluster_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.DeleteClusterRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_cluster_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_cluster( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +def test_delete_cluster_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cluster( + cluster_service.DeleteClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + +@pytest.mark.asyncio +async def test_delete_cluster_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_cluster( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_cluster_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_cluster( + cluster_service.DeleteClusterRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.ListOperationsRequest, + dict, + ], +) +def test_list_operations(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListOperationsResponse( + missing_zones=["missing_zones_value"], + ) + response = client.list_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListOperationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListOperationsResponse) + assert response.missing_zones == ["missing_zones_value"] + + +def test_list_operations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + client.list_operations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListOperationsRequest() + + +@pytest.mark.asyncio +async def test_list_operations_async( + transport: str = "grpc_asyncio", request_type=cluster_service.ListOperationsRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListOperationsResponse( + missing_zones=["missing_zones_value"], + ) + ) + response = await client.list_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListOperationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListOperationsResponse) + assert response.missing_zones == ["missing_zones_value"] + + +@pytest.mark.asyncio +async def test_list_operations_async_from_dict(): + await test_list_operations_async(request_type=dict) + + +def test_list_operations_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListOperationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = cluster_service.ListOperationsResponse() + client.list_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListOperationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListOperationsResponse() + ) + await client.list_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_operations_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListOperationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_operations( + project_id="project_id_value", + zone="zone_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + + +def test_list_operations_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_operations( + cluster_service.ListOperationsRequest(), + project_id="project_id_value", + zone="zone_value", + ) + + +@pytest.mark.asyncio +async def test_list_operations_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListOperationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListOperationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_operations( + project_id="project_id_value", + zone="zone_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_operations_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_operations( + cluster_service.ListOperationsRequest(), + project_id="project_id_value", + zone="zone_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.GetOperationRequest, + dict, + ], +) +def test_get_operation(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.get_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetOperationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_get_operation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + client.get_operation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetOperationRequest() + + +@pytest.mark.asyncio +async def test_get_operation_async( + transport: str = "grpc_asyncio", request_type=cluster_service.GetOperationRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.get_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetOperationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_get_operation_async_from_dict(): + await test_get_operation_async(request_type=dict) + + +def test_get_operation_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetOperationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = cluster_service.Operation() + client.get_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetOperationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.get_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_operation_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_operation( + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].operation_id + mock_val = "operation_id_value" + assert arg == mock_val + + +def test_get_operation_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_operation( + cluster_service.GetOperationRequest(), + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + ) + + +@pytest.mark.asyncio +async def test_get_operation_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_operation( + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].operation_id + mock_val = "operation_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_operation_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_operation( + cluster_service.GetOperationRequest(), + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CancelOperationRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + client.cancel_operation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CancelOperationRequest() + + +@pytest.mark.asyncio +async def test_cancel_operation_async( + transport: str = "grpc_asyncio", request_type=cluster_service.CancelOperationRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CancelOperationRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async_from_dict(): + await test_cancel_operation_async(request_type=dict) + + +def test_cancel_operation_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CancelOperationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + client.cancel_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CancelOperationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_cancel_operation_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_operation( + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].operation_id + mock_val = "operation_id_value" + assert arg == mock_val + + +def test_cancel_operation_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_operation( + cluster_service.CancelOperationRequest(), + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + ) + + +@pytest.mark.asyncio +async def test_cancel_operation_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_operation( + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].operation_id + mock_val = "operation_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_cancel_operation_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_operation( + cluster_service.CancelOperationRequest(), + project_id="project_id_value", + zone="zone_value", + operation_id="operation_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.GetServerConfigRequest, + dict, + ], +) +def test_get_server_config(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ServerConfig( + default_cluster_version="default_cluster_version_value", + valid_node_versions=["valid_node_versions_value"], + default_image_type="default_image_type_value", + valid_image_types=["valid_image_types_value"], + valid_master_versions=["valid_master_versions_value"], + ) + response = client.get_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetServerConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ServerConfig) + assert response.default_cluster_version == "default_cluster_version_value" + assert response.valid_node_versions == ["valid_node_versions_value"] + assert response.default_image_type == "default_image_type_value" + assert response.valid_image_types == ["valid_image_types_value"] + assert response.valid_master_versions == ["valid_master_versions_value"] + + +def test_get_server_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_config), "__call__" + ) as call: + client.get_server_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetServerConfigRequest() + + +@pytest.mark.asyncio +async def test_get_server_config_async( + transport: str = "grpc_asyncio", request_type=cluster_service.GetServerConfigRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ServerConfig( + default_cluster_version="default_cluster_version_value", + valid_node_versions=["valid_node_versions_value"], + default_image_type="default_image_type_value", + valid_image_types=["valid_image_types_value"], + valid_master_versions=["valid_master_versions_value"], + ) + ) + response = await client.get_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetServerConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ServerConfig) + assert response.default_cluster_version == "default_cluster_version_value" + assert response.valid_node_versions == ["valid_node_versions_value"] + assert response.default_image_type == "default_image_type_value" + assert response.valid_image_types == ["valid_image_types_value"] + assert response.valid_master_versions == ["valid_master_versions_value"] + + +@pytest.mark.asyncio +async def test_get_server_config_async_from_dict(): + await test_get_server_config_async(request_type=dict) + + +def test_get_server_config_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetServerConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_config), "__call__" + ) as call: + call.return_value = cluster_service.ServerConfig() + client.get_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_server_config_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetServerConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ServerConfig() + ) + await client.get_server_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_server_config_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ServerConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_server_config( + project_id="project_id_value", + zone="zone_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + + +def test_get_server_config_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_server_config( + cluster_service.GetServerConfigRequest(), + project_id="project_id_value", + zone="zone_value", + ) + + +@pytest.mark.asyncio +async def test_get_server_config_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_server_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ServerConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ServerConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_server_config( + project_id="project_id_value", + zone="zone_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_server_config_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_server_config( + cluster_service.GetServerConfigRequest(), + project_id="project_id_value", + zone="zone_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.GetJSONWebKeysRequest, + dict, + ], +) +def test_get_json_web_keys(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_json_web_keys), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.GetJSONWebKeysResponse() + response = client.get_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetJSONWebKeysRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.GetJSONWebKeysResponse) + + +def test_get_json_web_keys_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_json_web_keys), "__call__" + ) as call: + client.get_json_web_keys() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetJSONWebKeysRequest() + + +@pytest.mark.asyncio +async def test_get_json_web_keys_async( + transport: str = "grpc_asyncio", request_type=cluster_service.GetJSONWebKeysRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_json_web_keys), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.GetJSONWebKeysResponse() + ) + response = await client.get_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetJSONWebKeysRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.GetJSONWebKeysResponse) + + +@pytest.mark.asyncio +async def test_get_json_web_keys_async_from_dict(): + await test_get_json_web_keys_async(request_type=dict) + + +def test_get_json_web_keys_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetJSONWebKeysRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_json_web_keys), "__call__" + ) as call: + call.return_value = cluster_service.GetJSONWebKeysResponse() + client.get_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_json_web_keys_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetJSONWebKeysRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_json_web_keys), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.GetJSONWebKeysResponse() + ) + await client.get_json_web_keys(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.ListNodePoolsRequest, + dict, + ], +) +def test_list_node_pools(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_pools), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListNodePoolsResponse() + response = client.list_node_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListNodePoolsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListNodePoolsResponse) + + +def test_list_node_pools_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_pools), "__call__") as call: + client.list_node_pools() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListNodePoolsRequest() + + +@pytest.mark.asyncio +async def test_list_node_pools_async( + transport: str = "grpc_asyncio", request_type=cluster_service.ListNodePoolsRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_pools), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListNodePoolsResponse() + ) + response = await client.list_node_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListNodePoolsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListNodePoolsResponse) + + +@pytest.mark.asyncio +async def test_list_node_pools_async_from_dict(): + await test_list_node_pools_async(request_type=dict) + + +def test_list_node_pools_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListNodePoolsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_pools), "__call__") as call: + call.return_value = cluster_service.ListNodePoolsResponse() + client.list_node_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_node_pools_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListNodePoolsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_pools), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListNodePoolsResponse() + ) + await client.list_node_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_node_pools_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_pools), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListNodePoolsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_node_pools( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +def test_list_node_pools_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_node_pools( + cluster_service.ListNodePoolsRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + +@pytest.mark.asyncio +async def test_list_node_pools_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_node_pools), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListNodePoolsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListNodePoolsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_node_pools( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_node_pools_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_node_pools( + cluster_service.ListNodePoolsRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.GetNodePoolRequest, + dict, + ], +) +def test_get_node_pool(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.NodePool( + name="name_value", + initial_node_count=1911, + locations=["locations_value"], + self_link="self_link_value", + version="version_value", + instance_group_urls=["instance_group_urls_value"], + status=cluster_service.NodePool.Status.PROVISIONING, + status_message="status_message_value", + pod_ipv4_cidr_size=1856, + etag="etag_value", + ) + response = client.get_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.NodePool) + assert response.name == "name_value" + assert response.initial_node_count == 1911 + assert response.locations == ["locations_value"] + assert response.self_link == "self_link_value" + assert response.version == "version_value" + assert response.instance_group_urls == ["instance_group_urls_value"] + assert response.status == cluster_service.NodePool.Status.PROVISIONING + assert response.status_message == "status_message_value" + assert response.pod_ipv4_cidr_size == 1856 + assert response.etag == "etag_value" + + +def test_get_node_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_pool), "__call__") as call: + client.get_node_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetNodePoolRequest() + + +@pytest.mark.asyncio +async def test_get_node_pool_async( + transport: str = "grpc_asyncio", request_type=cluster_service.GetNodePoolRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.NodePool( + name="name_value", + initial_node_count=1911, + locations=["locations_value"], + self_link="self_link_value", + version="version_value", + instance_group_urls=["instance_group_urls_value"], + status=cluster_service.NodePool.Status.PROVISIONING, + status_message="status_message_value", + pod_ipv4_cidr_size=1856, + etag="etag_value", + ) + ) + response = await client.get_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.GetNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.NodePool) + assert response.name == "name_value" + assert response.initial_node_count == 1911 + assert response.locations == ["locations_value"] + assert response.self_link == "self_link_value" + assert response.version == "version_value" + assert response.instance_group_urls == ["instance_group_urls_value"] + assert response.status == cluster_service.NodePool.Status.PROVISIONING + assert response.status_message == "status_message_value" + assert response.pod_ipv4_cidr_size == 1856 + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_node_pool_async_from_dict(): + await test_get_node_pool_async(request_type=dict) + + +def test_get_node_pool_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetNodePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_pool), "__call__") as call: + call.return_value = cluster_service.NodePool() + client.get_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_node_pool_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.GetNodePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_pool), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.NodePool() + ) + await client.get_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_node_pool_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.NodePool() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_node_pool( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool_id + mock_val = "node_pool_id_value" + assert arg == mock_val + + +def test_get_node_pool_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_node_pool( + cluster_service.GetNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + +@pytest.mark.asyncio +async def test_get_node_pool_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.NodePool() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.NodePool() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_node_pool( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool_id + mock_val = "node_pool_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_node_pool_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_node_pool( + cluster_service.GetNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CreateNodePoolRequest, + dict, + ], +) +def test_create_node_pool(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.create_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CreateNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_create_node_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_node_pool), "__call__") as call: + client.create_node_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CreateNodePoolRequest() + + +@pytest.mark.asyncio +async def test_create_node_pool_async( + transport: str = "grpc_asyncio", request_type=cluster_service.CreateNodePoolRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.create_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CreateNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_create_node_pool_async_from_dict(): + await test_create_node_pool_async(request_type=dict) + + +def test_create_node_pool_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CreateNodePoolRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_node_pool), "__call__") as call: + call.return_value = cluster_service.Operation() + client.create_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_node_pool_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CreateNodePoolRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_node_pool), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.create_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_node_pool_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_node_pool( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool=cluster_service.NodePool(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool + mock_val = cluster_service.NodePool(name="name_value") + assert arg == mock_val + + +def test_create_node_pool_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_node_pool( + cluster_service.CreateNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool=cluster_service.NodePool(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_node_pool_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_node_pool( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool=cluster_service.NodePool(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool + mock_val = cluster_service.NodePool(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_node_pool_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_node_pool( + cluster_service.CreateNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool=cluster_service.NodePool(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.DeleteNodePoolRequest, + dict, + ], +) +def test_delete_node_pool(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.delete_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.DeleteNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_delete_node_pool_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_node_pool), "__call__") as call: + client.delete_node_pool() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.DeleteNodePoolRequest() + + +@pytest.mark.asyncio +async def test_delete_node_pool_async( + transport: str = "grpc_asyncio", request_type=cluster_service.DeleteNodePoolRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.delete_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.DeleteNodePoolRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_delete_node_pool_async_from_dict(): + await test_delete_node_pool_async(request_type=dict) + + +def test_delete_node_pool_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.DeleteNodePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_node_pool), "__call__") as call: + call.return_value = cluster_service.Operation() + client.delete_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_node_pool_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.DeleteNodePoolRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_node_pool), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.delete_node_pool(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_node_pool_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_node_pool( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool_id + mock_val = "node_pool_id_value" + assert arg == mock_val + + +def test_delete_node_pool_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_node_pool( + cluster_service.DeleteNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + +@pytest.mark.asyncio +async def test_delete_node_pool_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_node_pool), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_node_pool( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool_id + mock_val = "node_pool_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_node_pool_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_node_pool( + cluster_service.DeleteNodePoolRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CompleteNodePoolUpgradeRequest, + dict, + ], +) +def test_complete_node_pool_upgrade(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_node_pool_upgrade), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.complete_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CompleteNodePoolUpgradeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_complete_node_pool_upgrade_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_node_pool_upgrade), "__call__" + ) as call: + client.complete_node_pool_upgrade() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CompleteNodePoolUpgradeRequest() + + +@pytest.mark.asyncio +async def test_complete_node_pool_upgrade_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.CompleteNodePoolUpgradeRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_node_pool_upgrade), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.complete_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CompleteNodePoolUpgradeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_complete_node_pool_upgrade_async_from_dict(): + await test_complete_node_pool_upgrade_async(request_type=dict) + + +def test_complete_node_pool_upgrade_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CompleteNodePoolUpgradeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_node_pool_upgrade), "__call__" + ) as call: + call.return_value = None + client.complete_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_complete_node_pool_upgrade_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CompleteNodePoolUpgradeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_node_pool_upgrade), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.complete_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.RollbackNodePoolUpgradeRequest, + dict, + ], +) +def test_rollback_node_pool_upgrade(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_node_pool_upgrade), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.rollback_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.RollbackNodePoolUpgradeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_rollback_node_pool_upgrade_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_node_pool_upgrade), "__call__" + ) as call: + client.rollback_node_pool_upgrade() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.RollbackNodePoolUpgradeRequest() + + +@pytest.mark.asyncio +async def test_rollback_node_pool_upgrade_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.RollbackNodePoolUpgradeRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_node_pool_upgrade), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.rollback_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.RollbackNodePoolUpgradeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_rollback_node_pool_upgrade_async_from_dict(): + await test_rollback_node_pool_upgrade_async(request_type=dict) + + +def test_rollback_node_pool_upgrade_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.RollbackNodePoolUpgradeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_node_pool_upgrade), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.rollback_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rollback_node_pool_upgrade_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.RollbackNodePoolUpgradeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_node_pool_upgrade), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.rollback_node_pool_upgrade(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_rollback_node_pool_upgrade_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_node_pool_upgrade), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rollback_node_pool_upgrade( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool_id + mock_val = "node_pool_id_value" + assert arg == mock_val + + +def test_rollback_node_pool_upgrade_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback_node_pool_upgrade( + cluster_service.RollbackNodePoolUpgradeRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + +@pytest.mark.asyncio +async def test_rollback_node_pool_upgrade_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_node_pool_upgrade), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rollback_node_pool_upgrade( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool_id + mock_val = "node_pool_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_rollback_node_pool_upgrade_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rollback_node_pool_upgrade( + cluster_service.RollbackNodePoolUpgradeRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetNodePoolManagementRequest, + dict, + ], +) +def test_set_node_pool_management(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_management), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_node_pool_management(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolManagementRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_node_pool_management_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_management), "__call__" + ) as call: + client.set_node_pool_management() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolManagementRequest() + + +@pytest.mark.asyncio +async def test_set_node_pool_management_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.SetNodePoolManagementRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_management), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_node_pool_management(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolManagementRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_node_pool_management_async_from_dict(): + await test_set_node_pool_management_async(request_type=dict) + + +def test_set_node_pool_management_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNodePoolManagementRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_management), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_node_pool_management(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_node_pool_management_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNodePoolManagementRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_management), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_node_pool_management(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_node_pool_management_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_management), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_node_pool_management( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + management=cluster_service.NodeManagement(auto_upgrade=True), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool_id + mock_val = "node_pool_id_value" + assert arg == mock_val + arg = args[0].management + mock_val = cluster_service.NodeManagement(auto_upgrade=True) + assert arg == mock_val + + +def test_set_node_pool_management_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_node_pool_management( + cluster_service.SetNodePoolManagementRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + management=cluster_service.NodeManagement(auto_upgrade=True), + ) + + +@pytest.mark.asyncio +async def test_set_node_pool_management_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_management), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_node_pool_management( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + management=cluster_service.NodeManagement(auto_upgrade=True), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].node_pool_id + mock_val = "node_pool_id_value" + assert arg == mock_val + arg = args[0].management + mock_val = cluster_service.NodeManagement(auto_upgrade=True) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_node_pool_management_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_node_pool_management( + cluster_service.SetNodePoolManagementRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + node_pool_id="node_pool_id_value", + management=cluster_service.NodeManagement(auto_upgrade=True), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetLabelsRequest, + dict, + ], +) +def test_set_labels(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_labels), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLabelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_labels_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_labels), "__call__") as call: + client.set_labels() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLabelsRequest() + + +@pytest.mark.asyncio +async def test_set_labels_async( + transport: str = "grpc_asyncio", request_type=cluster_service.SetLabelsRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_labels), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLabelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_labels_async_from_dict(): + await test_set_labels_async(request_type=dict) + + +def test_set_labels_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLabelsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_labels), "__call__") as call: + call.return_value = cluster_service.Operation() + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_labels_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLabelsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_labels), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_labels_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_labels), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_labels( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + resource_labels={"key_value": "value_value"}, + label_fingerprint="label_fingerprint_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].resource_labels + mock_val = {"key_value": "value_value"} + assert arg == mock_val + arg = args[0].label_fingerprint + mock_val = "label_fingerprint_value" + assert arg == mock_val + + +def test_set_labels_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + cluster_service.SetLabelsRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + resource_labels={"key_value": "value_value"}, + label_fingerprint="label_fingerprint_value", + ) + + +@pytest.mark.asyncio +async def test_set_labels_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_labels), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_labels( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + resource_labels={"key_value": "value_value"}, + label_fingerprint="label_fingerprint_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].resource_labels + mock_val = {"key_value": "value_value"} + assert arg == mock_val + arg = args[0].label_fingerprint + mock_val = "label_fingerprint_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_labels_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_labels( + cluster_service.SetLabelsRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + resource_labels={"key_value": "value_value"}, + label_fingerprint="label_fingerprint_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetLegacyAbacRequest, + dict, + ], +) +def test_set_legacy_abac(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_legacy_abac), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_legacy_abac(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLegacyAbacRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_legacy_abac_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_legacy_abac), "__call__") as call: + client.set_legacy_abac() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLegacyAbacRequest() + + +@pytest.mark.asyncio +async def test_set_legacy_abac_async( + transport: str = "grpc_asyncio", request_type=cluster_service.SetLegacyAbacRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_legacy_abac), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_legacy_abac(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetLegacyAbacRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_legacy_abac_async_from_dict(): + await test_set_legacy_abac_async(request_type=dict) + + +def test_set_legacy_abac_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLegacyAbacRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_legacy_abac), "__call__") as call: + call.return_value = cluster_service.Operation() + client.set_legacy_abac(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_legacy_abac_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetLegacyAbacRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_legacy_abac), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_legacy_abac(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_legacy_abac_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_legacy_abac), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_legacy_abac( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + enabled=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].enabled + mock_val = True + assert arg == mock_val + + +def test_set_legacy_abac_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_legacy_abac( + cluster_service.SetLegacyAbacRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + enabled=True, + ) + + +@pytest.mark.asyncio +async def test_set_legacy_abac_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_legacy_abac), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_legacy_abac( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + enabled=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].enabled + mock_val = True + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_legacy_abac_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_legacy_abac( + cluster_service.SetLegacyAbacRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + enabled=True, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.StartIPRotationRequest, + dict, + ], +) +def test_start_ip_rotation(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.start_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.StartIPRotationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_start_ip_rotation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_ip_rotation), "__call__" + ) as call: + client.start_ip_rotation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.StartIPRotationRequest() + + +@pytest.mark.asyncio +async def test_start_ip_rotation_async( + transport: str = "grpc_asyncio", request_type=cluster_service.StartIPRotationRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.start_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.StartIPRotationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_start_ip_rotation_async_from_dict(): + await test_start_ip_rotation_async(request_type=dict) + + +def test_start_ip_rotation_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.StartIPRotationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_ip_rotation), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.start_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_start_ip_rotation_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.StartIPRotationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_ip_rotation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.start_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_start_ip_rotation_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.start_ip_rotation( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +def test_start_ip_rotation_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.start_ip_rotation( + cluster_service.StartIPRotationRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + +@pytest.mark.asyncio +async def test_start_ip_rotation_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.start_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.start_ip_rotation( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_start_ip_rotation_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.start_ip_rotation( + cluster_service.StartIPRotationRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CompleteIPRotationRequest, + dict, + ], +) +def test_complete_ip_rotation(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.complete_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CompleteIPRotationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_complete_ip_rotation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_ip_rotation), "__call__" + ) as call: + client.complete_ip_rotation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CompleteIPRotationRequest() + + +@pytest.mark.asyncio +async def test_complete_ip_rotation_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.CompleteIPRotationRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.complete_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CompleteIPRotationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_complete_ip_rotation_async_from_dict(): + await test_complete_ip_rotation_async(request_type=dict) + + +def test_complete_ip_rotation_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CompleteIPRotationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_ip_rotation), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.complete_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_complete_ip_rotation_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CompleteIPRotationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_ip_rotation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.complete_ip_rotation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_complete_ip_rotation_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.complete_ip_rotation( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +def test_complete_ip_rotation_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.complete_ip_rotation( + cluster_service.CompleteIPRotationRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + +@pytest.mark.asyncio +async def test_complete_ip_rotation_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.complete_ip_rotation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.complete_ip_rotation( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_complete_ip_rotation_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.complete_ip_rotation( + cluster_service.CompleteIPRotationRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetNodePoolSizeRequest, + dict, + ], +) +def test_set_node_pool_size(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_size), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_node_pool_size(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolSizeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_node_pool_size_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_size), "__call__" + ) as call: + client.set_node_pool_size() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolSizeRequest() + + +@pytest.mark.asyncio +async def test_set_node_pool_size_async( + transport: str = "grpc_asyncio", request_type=cluster_service.SetNodePoolSizeRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_size), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_node_pool_size(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNodePoolSizeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_node_pool_size_async_from_dict(): + await test_set_node_pool_size_async(request_type=dict) + + +def test_set_node_pool_size_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNodePoolSizeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_size), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_node_pool_size(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_node_pool_size_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNodePoolSizeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_node_pool_size), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_node_pool_size(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetNetworkPolicyRequest, + dict, + ], +) +def test_set_network_policy(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNetworkPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_network_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_network_policy), "__call__" + ) as call: + client.set_network_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNetworkPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_network_policy_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.SetNetworkPolicyRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetNetworkPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_network_policy_async_from_dict(): + await test_set_network_policy_async(request_type=dict) + + +def test_set_network_policy_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNetworkPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_network_policy), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_network_policy_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetNetworkPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_network_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_network_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_network_policy_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_network_policy( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + network_policy=cluster_service.NetworkPolicy( + provider=cluster_service.NetworkPolicy.Provider.CALICO + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].network_policy + mock_val = cluster_service.NetworkPolicy( + provider=cluster_service.NetworkPolicy.Provider.CALICO + ) + assert arg == mock_val + + +def test_set_network_policy_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_network_policy( + cluster_service.SetNetworkPolicyRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + network_policy=cluster_service.NetworkPolicy( + provider=cluster_service.NetworkPolicy.Provider.CALICO + ), + ) + + +@pytest.mark.asyncio +async def test_set_network_policy_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_network_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_network_policy( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + network_policy=cluster_service.NetworkPolicy( + provider=cluster_service.NetworkPolicy.Provider.CALICO + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].network_policy + mock_val = cluster_service.NetworkPolicy( + provider=cluster_service.NetworkPolicy.Provider.CALICO + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_network_policy_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_network_policy( + cluster_service.SetNetworkPolicyRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + network_policy=cluster_service.NetworkPolicy( + provider=cluster_service.NetworkPolicy.Provider.CALICO + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.SetMaintenancePolicyRequest, + dict, + ], +) +def test_set_maintenance_policy(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_maintenance_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + response = client.set_maintenance_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMaintenancePolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +def test_set_maintenance_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_maintenance_policy), "__call__" + ) as call: + client.set_maintenance_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMaintenancePolicyRequest() + + +@pytest.mark.asyncio +async def test_set_maintenance_policy_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.SetMaintenancePolicyRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_maintenance_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation( + name="name_value", + zone="zone_value", + operation_type=cluster_service.Operation.Type.CREATE_CLUSTER, + status=cluster_service.Operation.Status.PENDING, + detail="detail_value", + status_message="status_message_value", + self_link="self_link_value", + target_link="target_link_value", + location="location_value", + start_time="start_time_value", + end_time="end_time_value", + ) + ) + response = await client.set_maintenance_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.SetMaintenancePolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.Operation) + assert response.name == "name_value" + assert response.zone == "zone_value" + assert response.operation_type == cluster_service.Operation.Type.CREATE_CLUSTER + assert response.status == cluster_service.Operation.Status.PENDING + assert response.detail == "detail_value" + assert response.status_message == "status_message_value" + assert response.self_link == "self_link_value" + assert response.target_link == "target_link_value" + assert response.location == "location_value" + assert response.start_time == "start_time_value" + assert response.end_time == "end_time_value" + + +@pytest.mark.asyncio +async def test_set_maintenance_policy_async_from_dict(): + await test_set_maintenance_policy_async(request_type=dict) + + +def test_set_maintenance_policy_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetMaintenancePolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_maintenance_policy), "__call__" + ) as call: + call.return_value = cluster_service.Operation() + client.set_maintenance_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_maintenance_policy_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.SetMaintenancePolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_maintenance_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + await client.set_maintenance_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_set_maintenance_policy_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_maintenance_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_maintenance_policy( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + maintenance_policy=cluster_service.MaintenancePolicy( + window=cluster_service.MaintenanceWindow( + daily_maintenance_window=cluster_service.DailyMaintenanceWindow( + start_time="start_time_value" + ) + ) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].maintenance_policy + mock_val = cluster_service.MaintenancePolicy( + window=cluster_service.MaintenanceWindow( + daily_maintenance_window=cluster_service.DailyMaintenanceWindow( + start_time="start_time_value" + ) + ) + ) + assert arg == mock_val + + +def test_set_maintenance_policy_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_maintenance_policy( + cluster_service.SetMaintenancePolicyRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + maintenance_policy=cluster_service.MaintenancePolicy( + window=cluster_service.MaintenanceWindow( + daily_maintenance_window=cluster_service.DailyMaintenanceWindow( + start_time="start_time_value" + ) + ) + ), + ) + + +@pytest.mark.asyncio +async def test_set_maintenance_policy_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_maintenance_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.Operation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.Operation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_maintenance_policy( + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + maintenance_policy=cluster_service.MaintenancePolicy( + window=cluster_service.MaintenanceWindow( + daily_maintenance_window=cluster_service.DailyMaintenanceWindow( + start_time="start_time_value" + ) + ) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].zone + mock_val = "zone_value" + assert arg == mock_val + arg = args[0].cluster_id + mock_val = "cluster_id_value" + assert arg == mock_val + arg = args[0].maintenance_policy + mock_val = cluster_service.MaintenancePolicy( + window=cluster_service.MaintenanceWindow( + daily_maintenance_window=cluster_service.DailyMaintenanceWindow( + start_time="start_time_value" + ) + ) + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_maintenance_policy_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_maintenance_policy( + cluster_service.SetMaintenancePolicyRequest(), + project_id="project_id_value", + zone="zone_value", + cluster_id="cluster_id_value", + maintenance_policy=cluster_service.MaintenancePolicy( + window=cluster_service.MaintenanceWindow( + daily_maintenance_window=cluster_service.DailyMaintenanceWindow( + start_time="start_time_value" + ) + ) + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.ListUsableSubnetworksRequest, + dict, + ], +) +def test_list_usable_subnetworks(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListUsableSubnetworksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_usable_subnetworks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListUsableSubnetworksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableSubnetworksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_usable_subnetworks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + client.list_usable_subnetworks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListUsableSubnetworksRequest() + + +@pytest.mark.asyncio +async def test_list_usable_subnetworks_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.ListUsableSubnetworksRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListUsableSubnetworksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_usable_subnetworks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListUsableSubnetworksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsableSubnetworksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_usable_subnetworks_async_from_dict(): + await test_list_usable_subnetworks_async(request_type=dict) + + +def test_list_usable_subnetworks_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListUsableSubnetworksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + call.return_value = cluster_service.ListUsableSubnetworksResponse() + client.list_usable_subnetworks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_usable_subnetworks_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListUsableSubnetworksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListUsableSubnetworksResponse() + ) + await client.list_usable_subnetworks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_usable_subnetworks_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListUsableSubnetworksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_usable_subnetworks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_usable_subnetworks_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_usable_subnetworks( + cluster_service.ListUsableSubnetworksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_usable_subnetworks_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListUsableSubnetworksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListUsableSubnetworksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_usable_subnetworks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_usable_subnetworks_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_usable_subnetworks( + cluster_service.ListUsableSubnetworksRequest(), + parent="parent_value", + ) + + +def test_list_usable_subnetworks_pager(transport_name: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + next_page_token="abc", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[], + next_page_token="def", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + ], + next_page_token="ghi", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_usable_subnetworks(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cluster_service.UsableSubnetwork) for i in results) + + +def test_list_usable_subnetworks_pages(transport_name: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + next_page_token="abc", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[], + next_page_token="def", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + ], + next_page_token="ghi", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + ), + RuntimeError, + ) + pages = list(client.list_usable_subnetworks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_usable_subnetworks_async_pager(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + next_page_token="abc", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[], + next_page_token="def", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + ], + next_page_token="ghi", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_usable_subnetworks( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cluster_service.UsableSubnetwork) for i in responses) + + +@pytest.mark.asyncio +async def test_list_usable_subnetworks_async_pages(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_usable_subnetworks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + next_page_token="abc", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[], + next_page_token="def", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + ], + next_page_token="ghi", + ), + cluster_service.ListUsableSubnetworksResponse( + subnetworks=[ + cluster_service.UsableSubnetwork(), + cluster_service.UsableSubnetwork(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_usable_subnetworks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.CheckAutopilotCompatibilityRequest, + dict, + ], +) +def test_check_autopilot_compatibility(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_autopilot_compatibility), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.CheckAutopilotCompatibilityResponse( + summary="summary_value", + ) + response = client.check_autopilot_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CheckAutopilotCompatibilityRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.CheckAutopilotCompatibilityResponse) + assert response.summary == "summary_value" + + +def test_check_autopilot_compatibility_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_autopilot_compatibility), "__call__" + ) as call: + client.check_autopilot_compatibility() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CheckAutopilotCompatibilityRequest() + + +@pytest.mark.asyncio +async def test_check_autopilot_compatibility_async( + transport: str = "grpc_asyncio", + request_type=cluster_service.CheckAutopilotCompatibilityRequest, +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_autopilot_compatibility), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.CheckAutopilotCompatibilityResponse( + summary="summary_value", + ) + ) + response = await client.check_autopilot_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.CheckAutopilotCompatibilityRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.CheckAutopilotCompatibilityResponse) + assert response.summary == "summary_value" + + +@pytest.mark.asyncio +async def test_check_autopilot_compatibility_async_from_dict(): + await test_check_autopilot_compatibility_async(request_type=dict) + + +def test_check_autopilot_compatibility_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CheckAutopilotCompatibilityRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_autopilot_compatibility), "__call__" + ) as call: + call.return_value = cluster_service.CheckAutopilotCompatibilityResponse() + client.check_autopilot_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_check_autopilot_compatibility_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.CheckAutopilotCompatibilityRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_autopilot_compatibility), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.CheckAutopilotCompatibilityResponse() + ) + await client.check_autopilot_compatibility(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cluster_service.ListLocationsRequest, + dict, + ], +) +def test_list_locations(request_type, transport: str = "grpc"): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListLocationsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_locations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListLocationsRequest() + + # Establish that the response is the type that we expect. + assert response.raw_page is response + assert isinstance(response, cluster_service.ListLocationsResponse) + assert response.next_page_token == "next_page_token_value" + + +def test_list_locations_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + client.list_locations() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListLocationsRequest() + + +@pytest.mark.asyncio +async def test_list_locations_async( + transport: str = "grpc_asyncio", request_type=cluster_service.ListLocationsRequest +): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListLocationsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_locations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cluster_service.ListLocationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cluster_service.ListLocationsResponse) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_locations_async_from_dict(): + await test_list_locations_async(request_type=dict) + + +def test_list_locations_field_headers(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListLocationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = cluster_service.ListLocationsResponse() + client.list_locations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cluster_service.ListLocationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListLocationsResponse() + ) + await client.list_locations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_locations_flattened(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListLocationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_locations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_locations_flattened_error(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_locations( + cluster_service.ListLocationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_locations_flattened_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cluster_service.ListLocationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cluster_service.ListLocationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_locations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_locations_flattened_error_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_locations( + cluster_service.ListLocationsRequest(), + parent="parent_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ClusterManagerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ClusterManagerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ClusterManagerClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ClusterManagerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ClusterManagerClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ClusterManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ClusterManagerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterManagerGrpcTransport, + transports.ClusterManagerGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = ClusterManagerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ClusterManagerGrpcTransport, + ) + + +def test_cluster_manager_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ClusterManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cluster_manager_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.container_v1beta1.services.cluster_manager.transports.ClusterManagerTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ClusterManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_clusters", + "get_cluster", + "create_cluster", + "update_cluster", + "update_node_pool", + "set_node_pool_autoscaling", + "set_logging_service", + "set_monitoring_service", + "set_addons_config", + "set_locations", + "update_master", + "set_master_auth", + "delete_cluster", + "list_operations", + "get_operation", + "cancel_operation", + "get_server_config", + "get_json_web_keys", + "list_node_pools", + "get_node_pool", + "create_node_pool", + "delete_node_pool", + "complete_node_pool_upgrade", + "rollback_node_pool_upgrade", + "set_node_pool_management", + "set_labels", + "set_legacy_abac", + "start_ip_rotation", + "complete_ip_rotation", + "set_node_pool_size", + "set_network_policy", + "set_maintenance_policy", + "list_usable_subnetworks", + "check_autopilot_compatibility", + "list_locations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cluster_manager_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.container_v1beta1.services.cluster_manager.transports.ClusterManagerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ClusterManagerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cluster_manager_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.container_v1beta1.services.cluster_manager.transports.ClusterManagerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ClusterManagerTransport() + adc.assert_called_once() + + +def test_cluster_manager_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ClusterManagerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterManagerGrpcTransport, + transports.ClusterManagerGrpcAsyncIOTransport, + ], +) +def test_cluster_manager_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterManagerGrpcTransport, + transports.ClusterManagerGrpcAsyncIOTransport, + ], +) +def test_cluster_manager_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ClusterManagerGrpcTransport, grpc_helpers), + (transports.ClusterManagerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_cluster_manager_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "container.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="container.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterManagerGrpcTransport, + transports.ClusterManagerGrpcAsyncIOTransport, + ], +) +def test_cluster_manager_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_cluster_manager_host_no_port(transport_name): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="container.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ("container.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_cluster_manager_host_with_port(transport_name): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="container.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ("container.googleapis.com:8000") + + +def test_cluster_manager_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ClusterManagerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cluster_manager_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ClusterManagerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterManagerGrpcTransport, + transports.ClusterManagerGrpcAsyncIOTransport, + ], +) +def test_cluster_manager_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterManagerGrpcTransport, + transports.ClusterManagerGrpcAsyncIOTransport, + ], +) +def test_cluster_manager_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_topic_path(): + project = "squid" + topic = "clam" + expected = "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) + actual = ClusterManagerClient.topic_path(project, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "whelk", + "topic": "octopus", + } + path = ClusterManagerClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterManagerClient.parse_topic_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ClusterManagerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = ClusterManagerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterManagerClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ClusterManagerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = ClusterManagerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterManagerClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ClusterManagerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = ClusterManagerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterManagerClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = ClusterManagerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = ClusterManagerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterManagerClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ClusterManagerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = ClusterManagerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterManagerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ClusterManagerTransport, "_prep_wrapped_messages" + ) as prep: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ClusterManagerTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ClusterManagerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ClusterManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = ClusterManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ClusterManagerClient, transports.ClusterManagerGrpcTransport), + (ClusterManagerAsyncClient, transports.ClusterManagerGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-contentwarehouse/CONTRIBUTING.rst b/packages/google-cloud-contentwarehouse/CONTRIBUTING.rst index ddde08b340d6..672a41e3439a 100644 --- a/packages/google-cloud-contentwarehouse/CONTRIBUTING.rst +++ b/packages/google-cloud-contentwarehouse/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system- -- -k + $ nox -s system-3.11 -- -k .. note:: - System tests are only configured to run under Python. + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py index 1a229f7edfcc..360a0d13ebdd 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.7.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py index 1a229f7edfcc..360a0d13ebdd 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.7.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service.py index c1cd265e68d3..f4157f65827f 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/types/document_service.py @@ -220,6 +220,9 @@ class MatchingDocument(proto.Message): Experimental. Additional result info if the question-answering feature is enabled. + matched_token_page_indices (MutableSequence[int]): + Return the 1-based page indices where those + pages have one or more matched tokens. """ document: gcc_document.Document = proto.Field( @@ -236,6 +239,10 @@ class MatchingDocument(proto.Message): number=3, message="QAResult", ) + matched_token_page_indices: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=4, + ) @property def raw_page(self): diff --git a/packages/google-cloud-contentwarehouse/noxfile.py b/packages/google-cloud-contentwarehouse/noxfile.py index 9a2acd8b6787..be54712bfa8f 100644 --- a/packages/google-cloud-contentwarehouse/noxfile.py +++ b/packages/google-cloud-contentwarehouse/noxfile.py @@ -46,7 +46,7 @@ UNIT_TEST_EXTRAS = [] UNIT_TEST_EXTRAS_BY_PYTHON = {} -SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -405,24 +405,3 @@ def prerelease_deps(session): session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) diff --git a/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json b/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json index 4d8ced4c0eb8..4e798f93791e 100644 --- a/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json +++ b/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-contentwarehouse", - "version": "0.7.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/CONTRIBUTING.rst b/packages/google-cloud-dataproc-metastore/CONTRIBUTING.rst index 5ef2cfd86ee7..4128ae9d6f8f 100644 --- a/packages/google-cloud-dataproc-metastore/CONTRIBUTING.rst +++ b/packages/google-cloud-dataproc-metastore/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system- -- -k + $ nox -s system-3.11 -- -k .. note:: - System tests are only configured to run under Python. + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py index ac3bc60c3fb3..360a0d13ebdd 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py index ac3bc60c3fb3..360a0d13ebdd 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/types/metastore.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/types/metastore.py index 63b5978d0319..372dbcb7b6fb 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/types/metastore.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/types/metastore.py @@ -579,6 +579,8 @@ class AuxiliaryVersionConfig(proto.Message): class NetworkConfig(proto.Message): r"""Network configuration for the Dataproc Metastore service. + Next available ID: 4 + Attributes: consumers (MutableSequence[google.cloud.metastore_v1.types.NetworkConfig.Consumer]): Immutable. The consumer-side network @@ -589,6 +591,7 @@ class NetworkConfig(proto.Message): class Consumer(proto.Message): r"""Contains information of the customer's network configurations. + Next available ID: 5 .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -609,6 +612,9 @@ class Consumer(proto.Message): endpoint_uri (str): Output only. The URI of the endpoint used to access the metastore service. + endpoint_location (str): + Output only. The location of the endpoint URI. Format: + ``projects/{project}/locations/{location}``. """ subnetwork: str = proto.Field( @@ -620,6 +626,10 @@ class Consumer(proto.Message): proto.STRING, number=3, ) + endpoint_location: str = proto.Field( + proto.STRING, + number=4, + ) consumers: MutableSequence[Consumer] = proto.RepeatedField( proto.MESSAGE, diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py index ac3bc60c3fb3..360a0d13ebdd 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/types/metastore.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/types/metastore.py index 5c24b733a5eb..c0ad8b97cbed 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/types/metastore.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/types/metastore.py @@ -672,6 +672,8 @@ class AuxiliaryVersionConfig(proto.Message): class NetworkConfig(proto.Message): r"""Network configuration for the Dataproc Metastore service. + Next available ID: 4 + Attributes: consumers (MutableSequence[google.cloud.metastore_v1alpha.types.NetworkConfig.Consumer]): Immutable. The consumer-side network @@ -686,6 +688,7 @@ class NetworkConfig(proto.Message): class Consumer(proto.Message): r"""Contains information of the customer's network configurations. + Next available ID: 5 .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -706,6 +709,9 @@ class Consumer(proto.Message): endpoint_uri (str): Output only. The URI of the endpoint used to access the metastore service. + endpoint_location (str): + Output only. The location of the endpoint URI. Format: + ``projects/{project}/locations/{location}``. """ subnetwork: str = proto.Field( @@ -717,6 +723,10 @@ class Consumer(proto.Message): proto.STRING, number=3, ) + endpoint_location: str = proto.Field( + proto.STRING, + number=4, + ) consumers: MutableSequence[Consumer] = proto.RepeatedField( proto.MESSAGE, diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py index ac3bc60c3fb3..360a0d13ebdd 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/types/metastore.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/types/metastore.py index 23269a0f5359..ad32f0977dee 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/types/metastore.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/types/metastore.py @@ -672,6 +672,8 @@ class AuxiliaryVersionConfig(proto.Message): class NetworkConfig(proto.Message): r"""Network configuration for the Dataproc Metastore service. + Next available ID: 4 + Attributes: consumers (MutableSequence[google.cloud.metastore_v1beta.types.NetworkConfig.Consumer]): Immutable. The consumer-side network @@ -686,6 +688,7 @@ class NetworkConfig(proto.Message): class Consumer(proto.Message): r"""Contains information of the customer's network configurations. + Next available ID: 5 .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -706,6 +709,9 @@ class Consumer(proto.Message): endpoint_uri (str): Output only. The URI of the endpoint used to access the metastore service. + endpoint_location (str): + Output only. The location of the endpoint URI. Format: + ``projects/{project}/locations/{location}``. """ subnetwork: str = proto.Field( @@ -717,6 +723,10 @@ class Consumer(proto.Message): proto.STRING, number=3, ) + endpoint_location: str = proto.Field( + proto.STRING, + number=4, + ) consumers: MutableSequence[Consumer] = proto.RepeatedField( proto.MESSAGE, diff --git a/packages/google-cloud-dataproc-metastore/noxfile.py b/packages/google-cloud-dataproc-metastore/noxfile.py index 9a2acd8b6787..be54712bfa8f 100644 --- a/packages/google-cloud-dataproc-metastore/noxfile.py +++ b/packages/google-cloud-dataproc-metastore/noxfile.py @@ -46,7 +46,7 @@ UNIT_TEST_EXTRAS = [] UNIT_TEST_EXTRAS_BY_PYTHON = {} -SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -405,24 +405,3 @@ def prerelease_deps(session): session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json index 631c343ce1b8..61145e9be3f7 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.12.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json index 6316721caf5a..96abac734738 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.12.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json index b2696ee9c288..4a03aa222378 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.12.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py index 93a076e82a91..ad8df8fffacb 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py @@ -6109,7 +6109,11 @@ def test_create_service_rest(request_type): "encryption_config": {"kms_key": "kms_key_value"}, "network_config": { "consumers": [ - {"subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value"} + { + "subnetwork": "subnetwork_value", + "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", + } ] }, "database_type": 1, @@ -6372,7 +6376,11 @@ def test_create_service_rest_bad_request( "encryption_config": {"kms_key": "kms_key_value"}, "network_config": { "consumers": [ - {"subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value"} + { + "subnetwork": "subnetwork_value", + "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", + } ] }, "database_type": 1, @@ -6532,7 +6540,11 @@ def test_update_service_rest(request_type): "encryption_config": {"kms_key": "kms_key_value"}, "network_config": { "consumers": [ - {"subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value"} + { + "subnetwork": "subnetwork_value", + "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", + } ] }, "database_type": 1, @@ -6778,7 +6790,11 @@ def test_update_service_rest_bad_request( "encryption_config": {"kms_key": "kms_key_value"}, "network_config": { "consumers": [ - {"subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value"} + { + "subnetwork": "subnetwork_value", + "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", + } ] }, "database_type": 1, @@ -9611,6 +9627,7 @@ def test_create_backup_rest(request_type): { "subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", } ] }, @@ -9886,6 +9903,7 @@ def test_create_backup_rest_bad_request( { "subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", } ] }, diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py index ff14c6c986f5..e45b55729c23 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py @@ -6275,7 +6275,11 @@ def test_create_service_rest(request_type): "encryption_config": {"kms_key": "kms_key_value"}, "network_config": { "consumers": [ - {"subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value"} + { + "subnetwork": "subnetwork_value", + "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", + } ], "custom_routes_enabled": True, }, @@ -6543,7 +6547,11 @@ def test_create_service_rest_bad_request( "encryption_config": {"kms_key": "kms_key_value"}, "network_config": { "consumers": [ - {"subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value"} + { + "subnetwork": "subnetwork_value", + "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", + } ], "custom_routes_enabled": True, }, @@ -6709,7 +6717,11 @@ def test_update_service_rest(request_type): "encryption_config": {"kms_key": "kms_key_value"}, "network_config": { "consumers": [ - {"subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value"} + { + "subnetwork": "subnetwork_value", + "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", + } ], "custom_routes_enabled": True, }, @@ -6960,7 +6972,11 @@ def test_update_service_rest_bad_request( "encryption_config": {"kms_key": "kms_key_value"}, "network_config": { "consumers": [ - {"subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value"} + { + "subnetwork": "subnetwork_value", + "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", + } ], "custom_routes_enabled": True, }, @@ -9799,6 +9815,7 @@ def test_create_backup_rest(request_type): { "subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", } ], "custom_routes_enabled": True, @@ -10079,6 +10096,7 @@ def test_create_backup_rest_bad_request( { "subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", } ], "custom_routes_enabled": True, diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py index 4370f8a558d6..aafa686e833f 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py @@ -6275,7 +6275,11 @@ def test_create_service_rest(request_type): "encryption_config": {"kms_key": "kms_key_value"}, "network_config": { "consumers": [ - {"subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value"} + { + "subnetwork": "subnetwork_value", + "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", + } ], "custom_routes_enabled": True, }, @@ -6543,7 +6547,11 @@ def test_create_service_rest_bad_request( "encryption_config": {"kms_key": "kms_key_value"}, "network_config": { "consumers": [ - {"subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value"} + { + "subnetwork": "subnetwork_value", + "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", + } ], "custom_routes_enabled": True, }, @@ -6709,7 +6717,11 @@ def test_update_service_rest(request_type): "encryption_config": {"kms_key": "kms_key_value"}, "network_config": { "consumers": [ - {"subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value"} + { + "subnetwork": "subnetwork_value", + "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", + } ], "custom_routes_enabled": True, }, @@ -6960,7 +6972,11 @@ def test_update_service_rest_bad_request( "encryption_config": {"kms_key": "kms_key_value"}, "network_config": { "consumers": [ - {"subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value"} + { + "subnetwork": "subnetwork_value", + "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", + } ], "custom_routes_enabled": True, }, @@ -9799,6 +9815,7 @@ def test_create_backup_rest(request_type): { "subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", } ], "custom_routes_enabled": True, @@ -10079,6 +10096,7 @@ def test_create_backup_rest_bad_request( { "subnetwork": "subnetwork_value", "endpoint_uri": "endpoint_uri_value", + "endpoint_location": "endpoint_location_value", } ], "custom_routes_enabled": True, diff --git a/packages/google-cloud-dataproc/.OwlBot.yaml b/packages/google-cloud-dataproc/.OwlBot.yaml new file mode 100644 index 000000000000..ad60ba904c27 --- /dev/null +++ b/packages/google-cloud-dataproc/.OwlBot.yaml @@ -0,0 +1,27 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-preserve-regex: + - /owl-bot-staging/google-cloud-dataproc/v1beta2 + +deep-copy-regex: + - source: /google/cloud/dataproc/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-dataproc/$1 + +begin-after-commit-hash: 107ed1217b5e87048263f52cd3911d5f851aca7e + diff --git a/packages/google-cloud-dataproc/.coveragerc b/packages/google-cloud-dataproc/.coveragerc new file mode 100644 index 000000000000..d5ee82ac1f3e --- /dev/null +++ b/packages/google-cloud-dataproc/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/dataproc/__init__.py + google/cloud/dataproc/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-dataproc/.flake8 b/packages/google-cloud-dataproc/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-dataproc/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-dataproc/.gitignore b/packages/google-cloud-dataproc/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-dataproc/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-dataproc/.repo-metadata.json b/packages/google-cloud-dataproc/.repo-metadata.json new file mode 100644 index 000000000000..446ff70ba6a7 --- /dev/null +++ b/packages/google-cloud-dataproc/.repo-metadata.json @@ -0,0 +1,18 @@ +{ + "name": "dataproc", + "name_pretty": "Google Cloud Dataproc", + "product_documentation": "https://cloud.google.com/dataproc", + "client_documentation": "https://cloud.google.com/python/docs/reference/dataproc/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559745", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-dataproc", + "api_id": "dataproc.googleapis.com", + "requires_billing": true, + "codeowner_team": "@googleapis/api-dataproc", + "default_version": "v1", + "api_shortname": "dataproc", + "api_description": "is a faster, easier, more cost-effective way to run Apache Spark and Apache Hadoop." +} diff --git a/packages/google-cloud-dataproc/CHANGELOG.md b/packages/google-cloud-dataproc/CHANGELOG.md new file mode 100644 index 000000000000..b441e22cedcf --- /dev/null +++ b/packages/google-cloud-dataproc/CHANGELOG.md @@ -0,0 +1,572 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-dataproc/#history + +## [5.6.0](https://github.com/googleapis/python-dataproc/compare/v5.5.1...v5.6.0) (2023-09-15) + + +### Features + +* Add optional parameters (tarball_gcs_dir, diagnosis_interval, jobs, yarn_application_ids) in DiagnoseClusterRequest ([#560](https://github.com/googleapis/python-dataproc/issues/560)) ([59b00aa](https://github.com/googleapis/python-dataproc/commit/59b00aa5559cec35578fe086fab5df726a3b526a)) + +## [5.5.1](https://github.com/googleapis/python-dataproc/compare/v5.5.0...v5.5.1) (2023-09-13) + + +### Documentation + +* Minor formatting ([c3c65bc](https://github.com/googleapis/python-dataproc/commit/c3c65bcb86bac3984a4d1eb21201a6ba1c64cea1)) + +## [5.5.0](https://github.com/googleapis/python-dataproc/compare/v5.4.3...v5.5.0) (2023-08-23) + + +### Features + +* Support min_num_instances for primary worker and InstanceFlexibilityPolicy for secondary worker ([#555](https://github.com/googleapis/python-dataproc/issues/555)) ([8ab7c71](https://github.com/googleapis/python-dataproc/commit/8ab7c717c914ab806e02c2ae5c0988f755cf74a4)) + +## [5.4.3](https://github.com/googleapis/python-dataproc/compare/v5.4.2...v5.4.3) (2023-08-02) + + +### Documentation + +* Minor formatting ([#551](https://github.com/googleapis/python-dataproc/issues/551)) ([c480e55](https://github.com/googleapis/python-dataproc/commit/c480e55dab2ccd8a4af828fc77ef3dac86528009)) + +## [5.4.2](https://github.com/googleapis/python-dataproc/compare/v5.4.1...v5.4.2) (2023-07-04) + + +### Bug Fixes + +* Add async context manager return types ([#539](https://github.com/googleapis/python-dataproc/issues/539)) ([7c081a6](https://github.com/googleapis/python-dataproc/commit/7c081a682a6d981ac3eed932f7c8e1e67f75af69)) + +## [5.4.1](https://github.com/googleapis/python-dataproc/compare/v5.4.0...v5.4.1) (2023-03-23) + + +### Documentation + +* Fix formatting of request arg in docstring ([#528](https://github.com/googleapis/python-dataproc/issues/528)) ([c7806f5](https://github.com/googleapis/python-dataproc/commit/c7806f572156a9dfd3ce6f7eb4d048f090e85fe5)) + +## [5.4.0](https://github.com/googleapis/python-dataproc/compare/v5.3.0...v5.4.0) (2023-02-17) + + +### Features + +* Add support for new Dataproc features ([67bc8a2](https://github.com/googleapis/python-dataproc/commit/67bc8a2a9b36b62c3006ee1eb873eda101624e55)) +* Add TrinoJob ([67bc8a2](https://github.com/googleapis/python-dataproc/commit/67bc8a2a9b36b62c3006ee1eb873eda101624e55)) +* Add UsageMetrics ([67bc8a2](https://github.com/googleapis/python-dataproc/commit/67bc8a2a9b36b62c3006ee1eb873eda101624e55)) +* Add UsageSnapshot ([67bc8a2](https://github.com/googleapis/python-dataproc/commit/67bc8a2a9b36b62c3006ee1eb873eda101624e55)) +* Enable "rest" transport in Python for services supporting numeric enums ([#519](https://github.com/googleapis/python-dataproc/issues/519)) ([f1a9ba7](https://github.com/googleapis/python-dataproc/commit/f1a9ba72ff14ad7d64bfb9829d1fb4d674fa1b50)) + +## [5.3.0](https://github.com/googleapis/python-dataproc/compare/v5.2.0...v5.3.0) (2023-01-23) + + +### Features + +* Add SPOT to Preemptibility enum ([8d5e6d8](https://github.com/googleapis/python-dataproc/commit/8d5e6d8b756bffa44227bdf5dd27223e45facd57)) + + +### Bug Fixes + +* Add context manager return types ([8d5e6d8](https://github.com/googleapis/python-dataproc/commit/8d5e6d8b756bffa44227bdf5dd27223e45facd57)) + + +### Documentation + +* Add documentation for enums ([8d5e6d8](https://github.com/googleapis/python-dataproc/commit/8d5e6d8b756bffa44227bdf5dd27223e45facd57)) + +## [5.2.0](https://github.com/googleapis/python-dataproc/compare/v5.1.0...v5.2.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#510](https://github.com/googleapis/python-dataproc/issues/510)) ([d1ed81d](https://github.com/googleapis/python-dataproc/commit/d1ed81d193a9ee5e25685fd5b27e0014708c528c)) + +## [5.1.0](https://github.com/googleapis/python-dataproc/compare/v5.0.3...v5.1.0) (2023-01-07) + + +### Features + +* Add support for `google.cloud.dataproc.__version__` ([b3b13c4](https://github.com/googleapis/python-dataproc/commit/b3b13c47129f807f385125bf6c96311793724066)) +* Add typing to proto.Message based class attributes ([b3b13c4](https://github.com/googleapis/python-dataproc/commit/b3b13c47129f807f385125bf6c96311793724066)) +* Added node groups API protos ([b3b13c4](https://github.com/googleapis/python-dataproc/commit/b3b13c47129f807f385125bf6c96311793724066)) + + +### Bug Fixes + +* Add dict typing for client_options ([b3b13c4](https://github.com/googleapis/python-dataproc/commit/b3b13c47129f807f385125bf6c96311793724066)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([b3b13c4](https://github.com/googleapis/python-dataproc/commit/b3b13c47129f807f385125bf6c96311793724066)) +* Drop usage of pkg_resources ([b3b13c4](https://github.com/googleapis/python-dataproc/commit/b3b13c47129f807f385125bf6c96311793724066)) +* Fix timeout default values ([b3b13c4](https://github.com/googleapis/python-dataproc/commit/b3b13c47129f807f385125bf6c96311793724066)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([b3b13c4](https://github.com/googleapis/python-dataproc/commit/b3b13c47129f807f385125bf6c96311793724066)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([b3b13c4](https://github.com/googleapis/python-dataproc/commit/b3b13c47129f807f385125bf6c96311793724066)) + +## [5.0.3](https://github.com/googleapis/python-dataproc/compare/v5.0.2...v5.0.3) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#490](https://github.com/googleapis/python-dataproc/issues/490)) ([5142ab0](https://github.com/googleapis/python-dataproc/commit/5142ab00edc95716d04cdba0ba07c660986f8561)) + +## [5.0.2](https://github.com/googleapis/python-dataproc/compare/v5.0.1...v5.0.2) (2022-10-03) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#486](https://github.com/googleapis/python-dataproc/issues/486)) ([d7674f4](https://github.com/googleapis/python-dataproc/commit/d7674f4e2caa3d6a0da47e97252e1be11e5eea53)) + +## [5.0.1](https://github.com/googleapis/python-dataproc/compare/v5.0.0...v5.0.1) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#456](https://github.com/googleapis/python-dataproc/issues/456)) ([a446937](https://github.com/googleapis/python-dataproc/commit/a44693711df3218a083f060e00cad3801537dd9b)) +* **deps:** require proto-plus >= 1.22.0 ([a446937](https://github.com/googleapis/python-dataproc/commit/a44693711df3218a083f060e00cad3801537dd9b)) + +## [5.0.0](https://github.com/googleapis/python-dataproc/compare/v4.0.3...v5.0.0) (2022-07-19) + + +### ⚠ BREAKING CHANGES + +* Move `yarn_config` into a `oneof` +* Remove `temp_bucket` from VirtualClusterConfig, as its value was not used + +### Features + +* add audience parameter ([61a23fa](https://github.com/googleapis/python-dataproc/commit/61a23faab861c17043af4efeeb1659334234349a)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#400](https://github.com/googleapis/python-dataproc/issues/400)) ([61a23fa](https://github.com/googleapis/python-dataproc/commit/61a23faab861c17043af4efeeb1659334234349a)) +* Move `yarn_config` into a `oneof` ([61a23fa](https://github.com/googleapis/python-dataproc/commit/61a23faab861c17043af4efeeb1659334234349a)) +* Remove `temp_bucket` from VirtualClusterConfig, as its value was not used ([61a23fa](https://github.com/googleapis/python-dataproc/commit/61a23faab861c17043af4efeeb1659334234349a)) +* require python 3.7+ ([#442](https://github.com/googleapis/python-dataproc/issues/442)) ([9862ff7](https://github.com/googleapis/python-dataproc/commit/9862ff7c9086921f0a4ecf81ae175c07ac701ef3)) + +## [4.0.3](https://github.com/googleapis/python-dataproc/compare/v4.0.2...v4.0.3) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#423](https://github.com/googleapis/python-dataproc/issues/423)) ([5d1a263](https://github.com/googleapis/python-dataproc/commit/5d1a263533c1812587a9668d6bd6d7d29ac82f2f)) + + +### Documentation + +* fix changelog header to consistent size ([#424](https://github.com/googleapis/python-dataproc/issues/424)) ([00162f0](https://github.com/googleapis/python-dataproc/commit/00162f07935cca365c41dcbf8be4e5a7681d680e)) + +## [4.0.2](https://github.com/googleapis/python-dataproc/compare/v4.0.1...v4.0.2) (2022-04-06) + + +### Bug Fixes + +* resource quotas ([#377](https://github.com/googleapis/python-dataproc/issues/377)) ([122c2f7](https://github.com/googleapis/python-dataproc/commit/122c2f77a93228dda409a8fad22465f2d28c5e0d)) +* updating submit_job_to_cluster.py ([#387](https://github.com/googleapis/python-dataproc/issues/387)) ([0719d2b](https://github.com/googleapis/python-dataproc/commit/0719d2b69661f9775c00a1fc0dade2e65b4e44e9)) + +## [4.0.1](https://github.com/googleapis/python-dataproc/compare/v4.0.0...v4.0.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#357](https://github.com/googleapis/python-dataproc/issues/357)) ([3c66f42](https://github.com/googleapis/python-dataproc/commit/3c66f4232d35f1e11807d29e169fe87e028c52eb)) +* **deps:** require proto-plus>=1.15.0 ([3c66f42](https://github.com/googleapis/python-dataproc/commit/3c66f4232d35f1e11807d29e169fe87e028c52eb)) + +## [4.0.0](https://github.com/googleapis/python-dataproc/compare/v3.3.0...v4.0.0) (2022-02-26) + + +### ⚠ BREAKING CHANGES + +* add support for Virtual Dataproc cluster running on GKE cluster (#344) + +### Features + +* add support for Virtual Dataproc cluster running on GKE cluster ([#344](https://github.com/googleapis/python-dataproc/issues/344)) ([116077b](https://github.com/googleapis/python-dataproc/commit/116077b45abaccb1814002284e05e34ef387e045)) + + +### Bug Fixes + +* move GkeClusterConfig to google.cloud.dataproc_v1.types.shared ([116077b](https://github.com/googleapis/python-dataproc/commit/116077b45abaccb1814002284e05e34ef387e045)) +* remove namespaced_gke_deployment_target ([116077b](https://github.com/googleapis/python-dataproc/commit/116077b45abaccb1814002284e05e34ef387e045)) + +## [3.3.0](https://github.com/googleapis/python-dataproc/compare/v3.2.0...v3.3.0) (2022-02-18) + + +### Features + +* add api key support ([#336](https://github.com/googleapis/python-dataproc/issues/336)) ([ac22d7e](https://github.com/googleapis/python-dataproc/commit/ac22d7ef7040e85035a8d3cfc9fe0f69a014f238)) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([452460f](https://github.com/googleapis/python-dataproc/commit/452460fe8e6af9e1a99d9636c5531e489e1e4852)) + + +### Documentation + +* add generated snippets ([#342](https://github.com/googleapis/python-dataproc/issues/342)) ([98810a9](https://github.com/googleapis/python-dataproc/commit/98810a9bc7674ea81397823f5da871cd30adcbd7)) + +## [3.2.0](https://github.com/googleapis/python-dataproc/compare/v3.1.1...v3.2.0) (2022-01-17) + + +### Features + +* add Spark runtime versioning for Spark batches ([#318](https://github.com/googleapis/python-dataproc/issues/318)) ([f2e35d9](https://github.com/googleapis/python-dataproc/commit/f2e35d9735cbd0dd5a0e32d78631d70820380846)) +* auto-diagnostic of failed Spark batches ([f2e35d9](https://github.com/googleapis/python-dataproc/commit/f2e35d9735cbd0dd5a0e32d78631d70820380846)) +* custom image containers for Spark batches ([f2e35d9](https://github.com/googleapis/python-dataproc/commit/f2e35d9735cbd0dd5a0e32d78631d70820380846)) +* local SSD NVME interface support for GCE clusters ([f2e35d9](https://github.com/googleapis/python-dataproc/commit/f2e35d9735cbd0dd5a0e32d78631d70820380846)) + +## [3.1.1](https://www.github.com/googleapis/python-dataproc/compare/v3.1.0...v3.1.1) (2021-11-01) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([d4919c0](https://www.github.com/googleapis/python-dataproc/commit/d4919c029ad12b5ee44942b55c5560aaf441f5a9)) +* **deps:** require google-api-core >= 1.28.0 ([d4919c0](https://www.github.com/googleapis/python-dataproc/commit/d4919c029ad12b5ee44942b55c5560aaf441f5a9)) + + +### Documentation + +* list oneofs in docstring ([d4919c0](https://www.github.com/googleapis/python-dataproc/commit/d4919c029ad12b5ee44942b55c5560aaf441f5a9)) + +## [3.1.0](https://www.github.com/googleapis/python-dataproc/compare/v3.0.0...v3.1.0) (2021-10-26) + + +### Features + +* add context manager support in client ([#285](https://www.github.com/googleapis/python-dataproc/issues/285)) ([b54fb76](https://www.github.com/googleapis/python-dataproc/commit/b54fb7647deaea64fe6ad553514c9d0ad62a0cbc)) +* add Dataproc Serverless for Spark Batches API ([#290](https://www.github.com/googleapis/python-dataproc/issues/290)) ([f0ed26c](https://www.github.com/googleapis/python-dataproc/commit/f0ed26c6ccd2e9f438d1d5f31c5512761b0e20b9)) +* Add support for dataproc BatchController service ([#291](https://www.github.com/googleapis/python-dataproc/issues/291)) ([24a6f7d](https://www.github.com/googleapis/python-dataproc/commit/24a6f7defee1e0fd2d195f934c004769d8f1a2b7)) +* add support for python 3.10 ([#289](https://www.github.com/googleapis/python-dataproc/issues/289)) ([229f919](https://www.github.com/googleapis/python-dataproc/commit/229f919e31c39bc028cd2e6062437b0a8d061556)) + +## [3.0.0](https://www.github.com/googleapis/python-dataproc/compare/v2.6.0...v3.0.0) (2021-10-04) + + +### Features + +* delete deprecated Dataproc v1beta2 API client ([3bdeaa7](https://www.github.com/googleapis/python-dataproc/commit/3bdeaa7e707ec2af445bf7c0321959b927c9c245)) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([be5c115](https://www.github.com/googleapis/python-dataproc/commit/be5c11554f7accfe60dd5cb8da7c4888f688c282)) +* improper types in pagers generation ([1ae784b](https://www.github.com/googleapis/python-dataproc/commit/1ae784bc1610aeb389eaa2cc7a6dc6f10c96788b)) + + +### Miscellaneous Chores + +* release as 3.0.0 ([#273](https://www.github.com/googleapis/python-dataproc/issues/273)) ([3bdeaa7](https://www.github.com/googleapis/python-dataproc/commit/3bdeaa7e707ec2af445bf7c0321959b927c9c245)) + + +### Documentation + +* update cluster sample ([3bdeaa7](https://www.github.com/googleapis/python-dataproc/commit/3bdeaa7e707ec2af445bf7c0321959b927c9c245)) + +## [2.6.0](https://www.github.com/googleapis/python-dataproc/compare/v2.5.0...v2.6.0) (2021-09-21) + + +### Features + +* delete deprecated Dataproc v1beta2 API client ([#253](https://www.github.com/googleapis/python-dataproc/issues/253)) ([b0db6da](https://www.github.com/googleapis/python-dataproc/commit/b0db6da6221ed37ab2d8903fff8befb788fa55d5)) + + +### Documentation + +* update cluster sample ([#218](https://www.github.com/googleapis/python-dataproc/issues/218)) ([80706f9](https://www.github.com/googleapis/python-dataproc/commit/80706f93b32007efe43ca4740a20f924fb6e9f54)) + +## [2.5.0](https://www.github.com/googleapis/python-dataproc/compare/v2.4.0...v2.5.0) (2021-07-24) + + +### Features + +* add always_use_jwt_access ([#209](https://www.github.com/googleapis/python-dataproc/issues/209)) ([6aec13c](https://www.github.com/googleapis/python-dataproc/commit/6aec13ce39a2afc0f36878bd61cff1614ec66972)) + + +### Bug Fixes + +* Attribute error Name while executing the sample code ([#205](https://www.github.com/googleapis/python-dataproc/issues/205)) ([cb0328f](https://www.github.com/googleapis/python-dataproc/commit/cb0328f3bfec416be9aec34d027fe0f48aab4242)) +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#227](https://www.github.com/googleapis/python-dataproc/issues/227)) ([5acfcd0](https://www.github.com/googleapis/python-dataproc/commit/5acfcd019dede3684fdf23cbed8bfcebdce606af)) +* disable always_use_jwt_access ([#215](https://www.github.com/googleapis/python-dataproc/issues/215)) ([a57e253](https://www.github.com/googleapis/python-dataproc/commit/a57e25388691335b6672613210ee566ed91dc97b)) +* enable self signed jwt for grpc ([#233](https://www.github.com/googleapis/python-dataproc/issues/233)) ([7df4fef](https://www.github.com/googleapis/python-dataproc/commit/7df4fefdced730fffd9b994608575512efe8d72a)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-dataproc/issues/1127)) ([#201](https://www.github.com/googleapis/python-dataproc/issues/201)) ([feea064](https://www.github.com/googleapis/python-dataproc/commit/feea0642ea6dbd6e08d4e52c89789a6b17e4de97)) +* add Samples section to CONTRIBUTING.rst ([#228](https://www.github.com/googleapis/python-dataproc/issues/228)) ([3e248c2](https://www.github.com/googleapis/python-dataproc/commit/3e248c29470d635abf0d6fa7ae84dc8370a86bef)) + + +## [2.4.0](https://www.github.com/googleapis/python-dataproc/compare/v2.3.1...v2.4.0) (2021-05-20) + + +### Features + +* add 'from_service_account_info' factory to clients ([6525f86](https://www.github.com/googleapis/python-dataproc/commit/6525f86b698242d77606cffb42713d18724a2526)) +* support self-signed JWT flow for service accounts ([5137a6f](https://www.github.com/googleapis/python-dataproc/commit/5137a6fce856b22be884aae19ec814458fc4ce97)) +* update the Dataproc V1 API client library ([5137a6f](https://www.github.com/googleapis/python-dataproc/commit/5137a6fce856b22be884aae19ec814458fc4ce97)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([5137a6f](https://www.github.com/googleapis/python-dataproc/commit/5137a6fce856b22be884aae19ec814458fc4ce97)) +* fix sphinx identifiers ([6525f86](https://www.github.com/googleapis/python-dataproc/commit/6525f86b698242d77606cffb42713d18724a2526)) +* use correct retry deadlines ([#122](https://www.github.com/googleapis/python-dataproc/issues/122)) ([6525f86](https://www.github.com/googleapis/python-dataproc/commit/6525f86b698242d77606cffb42713d18724a2526)) + +## [2.3.1](https://www.github.com/googleapis/python-dataproc/compare/v2.3.0...v2.3.1) (2021-03-27) + + +### Bug Fixes + +* (samples) fixing samples for new machine types ([#150](https://www.github.com/googleapis/python-dataproc/issues/150)) ([3343665](https://www.github.com/googleapis/python-dataproc/commit/334366546501833149479556b55bfbc3c9562236)) + + +### Documentation + +* adding backoff to quickstart test ([#135](https://www.github.com/googleapis/python-dataproc/issues/135)) ([a22df4c](https://www.github.com/googleapis/python-dataproc/commit/a22df4c0a15b2fa51cbe0f0cc2782def1a74c198)) + +## [2.3.0](https://www.github.com/googleapis/python-dataproc/compare/v2.2.0...v2.3.0) (2021-03-01) + + +### Features + +* **v1beta1:** BREAKING CHANGE: remove DOCKER/FLINK from Component enum; adds HBASE ([#108](https://www.github.com/googleapis/python-dataproc/issues/108)) ([ee093a8](https://www.github.com/googleapis/python-dataproc/commit/ee093a88841c7f9c9ea41b066993e56b4abe267d)) + + +### Bug Fixes + +* remove gRPC send/recv limits; expose client transport ([#117](https://www.github.com/googleapis/python-dataproc/issues/117)) ([6f27109](https://www.github.com/googleapis/python-dataproc/commit/6f27109faf03dd13f25294e57960f0d9e1a9fa27)) + +## [2.2.0](https://www.github.com/googleapis/python-dataproc/compare/v2.1.0...v2.2.0) (2020-11-16) + + +### Features + +* add common resource paths, expose client transport ([#87](https://www.github.com/googleapis/python-dataproc/issues/87)) ([7ec92b7](https://www.github.com/googleapis/python-dataproc/commit/7ec92b71be9c1d0d305421bb1b1dce5d92377bba)), closes [/github.com/googleapis/python-talent/blob/ef045e8eb348db36d7a2a611e6f26b11530d273b/samples/snippets/noxfile_config.py#L27-L32](https://www.github.com/googleapis//github.com/googleapis/python-talent/blob/ef045e8eb348db36d7a2a611e6f26b11530d273b/samples/snippets/noxfile_config.py/issues/L27-L32) [#792](https://www.github.com/googleapis/python-dataproc/issues/792) + +## [2.0.2](https://www.github.com/googleapis/python-dataproc/compare/v2.0.1...v2.0.2) (2020-09-16) + + +### Documentation + +* add `submit_job` samples ([#88](https://www.github.com/googleapis/python-dataproc/issues/88)) ([e7379b5](https://www.github.com/googleapis/python-dataproc/commit/e7379b5ab45a0c1e5b6944330c3e8ae4faa115e8)) + +## [2.0.1](https://www.github.com/googleapis/python-dataproc/compare/v2.0.0...v2.0.1) (2020-09-14) + + +### Documentation + +* remove example usage from README ([#77](https://www.github.com/googleapis/python-dataproc/issues/77)) ([66c7af1](https://www.github.com/googleapis/python-dataproc/commit/66c7af157ca5f740ebfec95abb7267e361d855f6)) + +## [2.0.0](https://www.github.com/googleapis/python-dataproc/compare/v1.1.1...v2.0.0) (2020-08-10) + + +### ⚠ BREAKING CHANGES + +* migrate to use microgen (#71) + +### Features + +* migrate to use microgen ([#71](https://www.github.com/googleapis/python-dataproc/issues/71)) ([108d6ff](https://www.github.com/googleapis/python-dataproc/commit/108d6ff91c6442e743cdf449790f981709305a09)) + +## [1.1.1](https://www.github.com/googleapis/python-dataproc/compare/v1.1.0...v1.1.1) (2020-08-10) + + +### Documentation + +* change relative URLs to absolute URLs to fix broken links ([#65](https://www.github.com/googleapis/python-dataproc/issues/65)) ([65c2771](https://www.github.com/googleapis/python-dataproc/commit/65c277120e136edd5648047fcb85f8d0cd104408)) + +## [1.1.0](https://www.github.com/googleapis/python-dataproc/compare/v1.0.1...v1.1.0) (2020-07-31) + + +### Features + +* add support for temp_bucket, endpoint_config in clusters; add preemptibility for instance group configs ([#60](https://www.github.com/googleapis/python-dataproc/issues/60)) ([a80fc72](https://www.github.com/googleapis/python-dataproc/commit/a80fc727510c10c678caa125902c201c8280dcc1)) + +## [1.0.1](https://www.github.com/googleapis/python-dataproc/compare/v1.0.0...v1.0.1) (2020-07-16) + + +### Bug Fixes + +* correct protobuf type for diagnose_cluster, update retry configs ([#55](https://www.github.com/googleapis/python-dataproc/issues/55)) ([822315e](https://www.github.com/googleapis/python-dataproc/commit/822315ec3f2517ebb6ca199b72156ebd50e0518b)) + +## [1.0.0](https://www.github.com/googleapis/python-dataproc/compare/v0.8.1...v1.0.0) (2020-06-17) + + +### Features + +* release as production/stable ([#44](https://www.github.com/googleapis/python-dataproc/issues/44)) ([58f8c87](https://www.github.com/googleapis/python-dataproc/commit/58f8c87acc826e56b2e6271306c7a2078eed59ef)) + +## [0.8.1](https://www.github.com/googleapis/python-dataproc/compare/v0.8.0...v0.8.1) (2020-06-05) + + +### Bug Fixes + +* increase timeout for `ClusterController` in v1 ([#36](https://www.github.com/googleapis/python-dataproc/issues/36)) ([3137bee](https://www.github.com/googleapis/python-dataproc/commit/3137bee846002fe6c1e40d410ed0310e3fe86c0c)) + +## [0.8.0](https://www.github.com/googleapis/python-dataproc/compare/v0.7.0...v0.8.0) (2020-05-19) + + +### Features + +* add SparkR and Presto jobs to WorkflowTemplates; add new optional components; add submit_job_as_operation to v1 (via synth) ([#21](https://www.github.com/googleapis/python-dataproc/issues/21)) ([1cf10b6](https://www.github.com/googleapis/python-dataproc/commit/1cf10b6b127a63dbeb34958771c2cc8d8cb37099)) + +## [0.7.0](https://www.github.com/googleapis/python-dataproc/compare/v0.6.1...v0.7.0) (2020-03-05) + + +### Features + +* add lifecycle config and reservation affinity support to v1 (via synth) ([#10](https://www.github.com/googleapis/python-dataproc/issues/10)) ([bb36194](https://www.github.com/googleapis/python-dataproc/commit/bb36194d4b0cfb6f2c5a0358625a17c629f71b21)) + +## 0.6.1 + +11-12-2019 08:24 PST + +### Documentation +- Add python 2 sunset banner to documentation. ([#9036](https://github.com/googleapis/google-cloud-python/pull/9036)) + +## 0.6.0 + +11-07-2019 16:34 PST + + +### Implementation Changes +- Tweak proto annotations (via synth). ([#9466](https://github.com/googleapis/google-cloud-python/pull/9466)) +- Remove send/recv msg size limit (via synth). ([#8951](https://github.com/googleapis/google-cloud-python/pull/8951)) + +### New Features +- Add V1 autoscaling policy support; annotate protos (via synth). ([#9402](https://github.com/googleapis/google-cloud-python/pull/9402)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatibility badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + +## 0.5.0 + +07-24-2019 16:02 PDT + +### Implementation Changes +- Allow kwargs to be passed to create_channel (via synth). ([#8387](https://github.com/googleapis/google-cloud-python/pull/8387)) + +### New Features +- Add 'client_options' support, update list method docstrings (via synth). ([#8505](https://github.com/googleapis/google-cloud-python/pull/8505)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) +- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) + +### Internal / Testing Changes +- Pin black version (via synth). ([#8579](https://github.com/googleapis/google-cloud-python/pull/8579)) +- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) +- Declare encoding as utf-8 in pb2 files (via synth). ([#8349](https://github.com/googleapis/google-cloud-python/pull/8349)) +- Add disclaimer to auto-generated template files (via synth). ([#8311](https://github.com/googleapis/google-cloud-python/pull/8311)) +- Supress checking 'cov-fail-under' in nox default session (via synth). ([#8237](https://github.com/googleapis/google-cloud-python/pull/8237)) + +## 0.4.0 + +05-30-2019 05:52 PDT + +### Implementation Changes +- Update docs/conf.py, add routing header to method metadata, fix docstrings (via synth). ([#7924](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7924)) + +### New Features +- Add new service features for v1, including autoscaling (via synth). ([#8152](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8152)) +- Add new service features for v1beta2, including autoscaling (via synth). ([#8119](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8119)) + +### Documentation +- Add nox session `docs` ([#7429](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7429)) +- Add clarifying comment to blacken nox target. ([#7388](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7388)) + +### Internal / Testing Changes +- Re-add import of 'operations.proto' to V1 'clusters.proto' (via synth). ([#8188](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8188)) +- Add empty lines (via synth). ([#8054](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8054)) +- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7535)) +- Copy lintified proto files (via synth). ([#7465](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7465)) + +## 0.3.1 + +02-15-2019 12:36 PST + + +### Implementation Changes +- Remove unused message exports. ([#7266](https://github.com/googleapis/google-cloud-python/pull/7266)) +- Protoc-generated serialization update.. ([#7079](https://github.com/googleapis/google-cloud-python/pull/7079)) +- Trivial housekeeping change to .proto files. ([#7067](https://github.com/googleapis/google-cloud-python/pull/7067)) + +### Documentation +- Updated client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) +- Pick up stub docstring fix in GAPIC generator. ([#6967](https://github.com/googleapis/google-cloud-python/pull/6967)) + +### Internal / Testing Changes +- Copy proto files alongside protoc versions. +- Add protos as an artifact to library ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) +- Update copyright headers + +## 0.3.0 + +12-17-2018 18:20 PST + + +### Implementation Changes +- Import `iam.policy` from `google.api_core`. ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) +- Update `cluster_controller_client` GAPIC config (via synth). ([#6659](https://github.com/googleapis/google-cloud-python/pull/6659)) +- Add 'WorkflowTemplateServiceClient', optional args; update timeouts (via synth). ([#6655](https://github.com/googleapis/google-cloud-python/pull/6655)) +- Pick up enum fixes in the GAPIC generator. ([#6609](https://github.com/googleapis/google-cloud-python/pull/6609)) +- Pick up fixes in GAPIC generator. ([#6493](https://github.com/googleapis/google-cloud-python/pull/6493)) +- Fix client_info bug, update docstrings. ([#6408](https://github.com/googleapis/google-cloud-python/pull/6408)) +- Re-generate library using dataproc/synth.py ([#6056](https://github.com/googleapis/google-cloud-python/pull/6056)) +- Re-generate library using dataproc/synth.py ([#5975](https://github.com/googleapis/google-cloud-python/pull/5975)) +- Re-generate library using dataproc/synth.py ([#5949](https://github.com/googleapis/google-cloud-python/pull/5949)) + +### Dependencies +- Bump minimum `api_core` version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391)) + +### Documentation +- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) +- Update Dataproc docs URL ([#6455](https://github.com/googleapis/google-cloud-python/pull/6455)) +- Docs: fix GAX fossils ([#6264](https://github.com/googleapis/google-cloud-python/pull/6264)) +- Docs: normalize use of support level badges ([#6159](https://github.com/googleapis/google-cloud-python/pull/6159)) +- Dataproc: harmonize / DRY 'README.rst' / 'docs/index.rst'. ([#6019](https://github.com/googleapis/google-cloud-python/pull/6019)) + +### Internal / Testing Changes +- Update noxfile. +- Blacken all gen'd libs ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) +- Omit local deps ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) +- Run black at end of synth.py ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) +- Unblack dataproc gapic and protos. +- Run Black on Generated libraries ([#6666](https://github.com/googleapis/google-cloud-python/pull/6666)) +- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) +- Add synth metadata. ([#6563](https://github.com/googleapis/google-cloud-python/pull/6563)) +- Use new Nox ([#6175](https://github.com/googleapis/google-cloud-python/pull/6175)) + +## 0.2.0 + +### New Features +- Regenerate v1 endpoint. Add v1beta2 endpoint (#5717) + +## 0.1.2 + +### Implementation Changes +- Avoid overwriting '__module__' of messages from shared modules. (#5364) + +### Internal / Testing Changes +- Modify system tests to use prerelease versions of grpcio (#5304) +- Add Test runs for Python 3.7 and remove 3.4 (#5295) +- Re-enable lint for tests, remove usage of pylint (#4921) + +## 0.1.1 + +### Dependencies + +- Update dependency range for api-core to include v1.0.0 releases (#4944) + +### Testing and internal changes + +- Re-enable lint for tests, remove usage of pylint (#4921) +- Normalize all setup.py files (#4909) diff --git a/packages/google-cloud-dataproc/CODE_OF_CONDUCT.md b/packages/google-cloud-dataproc/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-dataproc/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-dataproc/CONTRIBUTING.rst b/packages/google-cloud-dataproc/CONTRIBUTING.rst new file mode 100644 index 000000000000..e3396a938beb --- /dev/null +++ b/packages/google-cloud-dataproc/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.11 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-dataproc + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-dataproc/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-dataproc/LICENSE b/packages/google-cloud-dataproc/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-dataproc/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-dataproc/MANIFEST.in b/packages/google-cloud-dataproc/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-dataproc/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-dataproc/README.rst b/packages/google-cloud-dataproc/README.rst new file mode 100644 index 000000000000..e6a45d851c74 --- /dev/null +++ b/packages/google-cloud-dataproc/README.rst @@ -0,0 +1,108 @@ +Python Client for Google Cloud Dataproc +======================================= + +|stable| |pypi| |versions| + +`Google Cloud Dataproc`_: is a faster, easier, more cost-effective way to run Apache Spark and Apache Hadoop. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-dataproc.svg + :target: https://pypi.org/project/google-cloud-dataproc/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-dataproc.svg + :target: https://pypi.org/project/google-cloud-dataproc/ +.. _Google Cloud Dataproc: https://cloud.google.com/dataproc +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/dataproc/latest +.. _Product Documentation: https://cloud.google.com/dataproc + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Cloud Dataproc.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Cloud Dataproc.: https://cloud.google.com/dataproc +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-dataproc + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-dataproc + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Cloud Dataproc + to see other available methods on the client. +- Read the `Google Cloud Dataproc Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Cloud Dataproc Product documentation: https://cloud.google.com/dataproc +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-dataproc/SECURITY.md b/packages/google-cloud-dataproc/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-dataproc/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-dataproc/dataproc-v1-py.tar.gz b/packages/google-cloud-dataproc/dataproc-v1-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-dataproc/docs/CHANGELOG.md b/packages/google-cloud-dataproc/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-dataproc/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-dataproc/docs/README.rst b/packages/google-cloud-dataproc/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-dataproc/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-dataproc/docs/_static/custom.css b/packages/google-cloud-dataproc/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-dataproc/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-dataproc/docs/_templates/layout.html b/packages/google-cloud-dataproc/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-dataproc/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-dataproc/docs/conf.py b/packages/google-cloud-dataproc/docs/conf.py new file mode 100644 index 000000000000..b0ec803979ec --- /dev/null +++ b/packages/google-cloud-dataproc/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-dataproc documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-dataproc" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-dataproc", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-dataproc-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-dataproc.tex", + "google-cloud-dataproc Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-dataproc", + "google-cloud-dataproc Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-dataproc", + "google-cloud-dataproc Documentation", + author, + "google-cloud-dataproc", + "google-cloud-dataproc Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-dataproc/docs/dataproc_v1/autoscaling_policy_service.rst b/packages/google-cloud-dataproc/docs/dataproc_v1/autoscaling_policy_service.rst new file mode 100644 index 000000000000..9b885c5744b0 --- /dev/null +++ b/packages/google-cloud-dataproc/docs/dataproc_v1/autoscaling_policy_service.rst @@ -0,0 +1,10 @@ +AutoscalingPolicyService +------------------------------------------ + +.. automodule:: google.cloud.dataproc_v1.services.autoscaling_policy_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dataproc/docs/dataproc_v1/batch_controller.rst b/packages/google-cloud-dataproc/docs/dataproc_v1/batch_controller.rst new file mode 100644 index 000000000000..e28563d2aefd --- /dev/null +++ b/packages/google-cloud-dataproc/docs/dataproc_v1/batch_controller.rst @@ -0,0 +1,10 @@ +BatchController +--------------------------------- + +.. automodule:: google.cloud.dataproc_v1.services.batch_controller + :members: + :inherited-members: + +.. automodule:: google.cloud.dataproc_v1.services.batch_controller.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dataproc/docs/dataproc_v1/cluster_controller.rst b/packages/google-cloud-dataproc/docs/dataproc_v1/cluster_controller.rst new file mode 100644 index 000000000000..d9b7f2ad4b47 --- /dev/null +++ b/packages/google-cloud-dataproc/docs/dataproc_v1/cluster_controller.rst @@ -0,0 +1,10 @@ +ClusterController +----------------------------------- + +.. automodule:: google.cloud.dataproc_v1.services.cluster_controller + :members: + :inherited-members: + +.. automodule:: google.cloud.dataproc_v1.services.cluster_controller.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dataproc/docs/dataproc_v1/job_controller.rst b/packages/google-cloud-dataproc/docs/dataproc_v1/job_controller.rst new file mode 100644 index 000000000000..5f14863b7d52 --- /dev/null +++ b/packages/google-cloud-dataproc/docs/dataproc_v1/job_controller.rst @@ -0,0 +1,10 @@ +JobController +------------------------------- + +.. automodule:: google.cloud.dataproc_v1.services.job_controller + :members: + :inherited-members: + +.. automodule:: google.cloud.dataproc_v1.services.job_controller.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dataproc/docs/dataproc_v1/node_group_controller.rst b/packages/google-cloud-dataproc/docs/dataproc_v1/node_group_controller.rst new file mode 100644 index 000000000000..55d67f48675b --- /dev/null +++ b/packages/google-cloud-dataproc/docs/dataproc_v1/node_group_controller.rst @@ -0,0 +1,6 @@ +NodeGroupController +------------------------------------- + +.. automodule:: google.cloud.dataproc_v1.services.node_group_controller + :members: + :inherited-members: diff --git a/packages/google-cloud-dataproc/docs/dataproc_v1/services.rst b/packages/google-cloud-dataproc/docs/dataproc_v1/services.rst new file mode 100644 index 000000000000..aee63982c829 --- /dev/null +++ b/packages/google-cloud-dataproc/docs/dataproc_v1/services.rst @@ -0,0 +1,11 @@ +Services for Google Cloud Dataproc v1 API +========================================= +.. toctree:: + :maxdepth: 2 + + autoscaling_policy_service + batch_controller + cluster_controller + job_controller + node_group_controller + workflow_template_service diff --git a/packages/google-cloud-dataproc/docs/dataproc_v1/types.rst b/packages/google-cloud-dataproc/docs/dataproc_v1/types.rst new file mode 100644 index 000000000000..5dde0cd62477 --- /dev/null +++ b/packages/google-cloud-dataproc/docs/dataproc_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Dataproc v1 API +====================================== + +.. automodule:: google.cloud.dataproc_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-dataproc/docs/dataproc_v1/workflow_template_service.rst b/packages/google-cloud-dataproc/docs/dataproc_v1/workflow_template_service.rst new file mode 100644 index 000000000000..0f301cee8bc9 --- /dev/null +++ b/packages/google-cloud-dataproc/docs/dataproc_v1/workflow_template_service.rst @@ -0,0 +1,10 @@ +WorkflowTemplateService +----------------------------------------- + +.. automodule:: google.cloud.dataproc_v1.services.workflow_template_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataproc_v1.services.workflow_template_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dataproc/docs/index.rst b/packages/google-cloud-dataproc/docs/index.rst new file mode 100644 index 000000000000..ad4b29bbad84 --- /dev/null +++ b/packages/google-cloud-dataproc/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + dataproc_v1/services + dataproc_v1/types + + +Changelog +--------- + +For a list of all ``google-cloud-dataproc`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-dataproc/docs/multiprocessing.rst b/packages/google-cloud-dataproc/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-dataproc/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py new file mode 100644 index 000000000000..962abee97ced --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/__init__.py @@ -0,0 +1,343 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dataproc import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.dataproc_v1.services.autoscaling_policy_service.async_client import ( + AutoscalingPolicyServiceAsyncClient, +) +from google.cloud.dataproc_v1.services.autoscaling_policy_service.client import ( + AutoscalingPolicyServiceClient, +) +from google.cloud.dataproc_v1.services.batch_controller.async_client import ( + BatchControllerAsyncClient, +) +from google.cloud.dataproc_v1.services.batch_controller.client import ( + BatchControllerClient, +) +from google.cloud.dataproc_v1.services.cluster_controller.async_client import ( + ClusterControllerAsyncClient, +) +from google.cloud.dataproc_v1.services.cluster_controller.client import ( + ClusterControllerClient, +) +from google.cloud.dataproc_v1.services.job_controller.async_client import ( + JobControllerAsyncClient, +) +from google.cloud.dataproc_v1.services.job_controller.client import JobControllerClient +from google.cloud.dataproc_v1.services.node_group_controller.async_client import ( + NodeGroupControllerAsyncClient, +) +from google.cloud.dataproc_v1.services.node_group_controller.client import ( + NodeGroupControllerClient, +) +from google.cloud.dataproc_v1.services.workflow_template_service.async_client import ( + WorkflowTemplateServiceAsyncClient, +) +from google.cloud.dataproc_v1.services.workflow_template_service.client import ( + WorkflowTemplateServiceClient, +) +from google.cloud.dataproc_v1.types.autoscaling_policies import ( + AutoscalingPolicy, + BasicAutoscalingAlgorithm, + BasicYarnAutoscalingConfig, + CreateAutoscalingPolicyRequest, + DeleteAutoscalingPolicyRequest, + GetAutoscalingPolicyRequest, + InstanceGroupAutoscalingPolicyConfig, + ListAutoscalingPoliciesRequest, + ListAutoscalingPoliciesResponse, + UpdateAutoscalingPolicyRequest, +) +from google.cloud.dataproc_v1.types.batches import ( + Batch, + CreateBatchRequest, + DeleteBatchRequest, + GetBatchRequest, + ListBatchesRequest, + ListBatchesResponse, + PySparkBatch, + SparkBatch, + SparkRBatch, + SparkSqlBatch, +) +from google.cloud.dataproc_v1.types.clusters import ( + AcceleratorConfig, + AutoscalingConfig, + AuxiliaryNodeGroup, + AuxiliaryServicesConfig, + Cluster, + ClusterConfig, + ClusterMetrics, + ClusterStatus, + ConfidentialInstanceConfig, + CreateClusterRequest, + DataprocMetricConfig, + DeleteClusterRequest, + DiagnoseClusterRequest, + DiagnoseClusterResults, + DiskConfig, + EncryptionConfig, + EndpointConfig, + GceClusterConfig, + GetClusterRequest, + IdentityConfig, + InstanceFlexibilityPolicy, + InstanceGroupConfig, + InstanceReference, + KerberosConfig, + LifecycleConfig, + ListClustersRequest, + ListClustersResponse, + ManagedGroupConfig, + MetastoreConfig, + NodeGroup, + NodeGroupAffinity, + NodeInitializationAction, + ReservationAffinity, + SecurityConfig, + ShieldedInstanceConfig, + SoftwareConfig, + StartClusterRequest, + StopClusterRequest, + UpdateClusterRequest, + VirtualClusterConfig, +) +from google.cloud.dataproc_v1.types.jobs import ( + CancelJobRequest, + DeleteJobRequest, + DriverSchedulingConfig, + GetJobRequest, + HadoopJob, + HiveJob, + Job, + JobMetadata, + JobPlacement, + JobReference, + JobScheduling, + JobStatus, + ListJobsRequest, + ListJobsResponse, + LoggingConfig, + PigJob, + PrestoJob, + PySparkJob, + QueryList, + SparkJob, + SparkRJob, + SparkSqlJob, + SubmitJobRequest, + TrinoJob, + UpdateJobRequest, + YarnApplication, +) +from google.cloud.dataproc_v1.types.node_groups import ( + CreateNodeGroupRequest, + GetNodeGroupRequest, + ResizeNodeGroupRequest, +) +from google.cloud.dataproc_v1.types.operations import ( + BatchOperationMetadata, + ClusterOperationMetadata, + ClusterOperationStatus, + NodeGroupOperationMetadata, +) +from google.cloud.dataproc_v1.types.shared import ( + Component, + EnvironmentConfig, + ExecutionConfig, + FailureAction, + GkeClusterConfig, + GkeNodePoolConfig, + GkeNodePoolTarget, + KubernetesClusterConfig, + KubernetesSoftwareConfig, + PeripheralsConfig, + RuntimeConfig, + RuntimeInfo, + SparkHistoryServerConfig, + UsageMetrics, + UsageSnapshot, +) +from google.cloud.dataproc_v1.types.workflow_templates import ( + ClusterOperation, + ClusterSelector, + CreateWorkflowTemplateRequest, + DeleteWorkflowTemplateRequest, + GetWorkflowTemplateRequest, + InstantiateInlineWorkflowTemplateRequest, + InstantiateWorkflowTemplateRequest, + ListWorkflowTemplatesRequest, + ListWorkflowTemplatesResponse, + ManagedCluster, + OrderedJob, + ParameterValidation, + RegexValidation, + TemplateParameter, + UpdateWorkflowTemplateRequest, + ValueValidation, + WorkflowGraph, + WorkflowMetadata, + WorkflowNode, + WorkflowTemplate, + WorkflowTemplatePlacement, +) + +__all__ = ( + "AutoscalingPolicyServiceClient", + "AutoscalingPolicyServiceAsyncClient", + "BatchControllerClient", + "BatchControllerAsyncClient", + "ClusterControllerClient", + "ClusterControllerAsyncClient", + "JobControllerClient", + "JobControllerAsyncClient", + "NodeGroupControllerClient", + "NodeGroupControllerAsyncClient", + "WorkflowTemplateServiceClient", + "WorkflowTemplateServiceAsyncClient", + "AutoscalingPolicy", + "BasicAutoscalingAlgorithm", + "BasicYarnAutoscalingConfig", + "CreateAutoscalingPolicyRequest", + "DeleteAutoscalingPolicyRequest", + "GetAutoscalingPolicyRequest", + "InstanceGroupAutoscalingPolicyConfig", + "ListAutoscalingPoliciesRequest", + "ListAutoscalingPoliciesResponse", + "UpdateAutoscalingPolicyRequest", + "Batch", + "CreateBatchRequest", + "DeleteBatchRequest", + "GetBatchRequest", + "ListBatchesRequest", + "ListBatchesResponse", + "PySparkBatch", + "SparkBatch", + "SparkRBatch", + "SparkSqlBatch", + "AcceleratorConfig", + "AutoscalingConfig", + "AuxiliaryNodeGroup", + "AuxiliaryServicesConfig", + "Cluster", + "ClusterConfig", + "ClusterMetrics", + "ClusterStatus", + "ConfidentialInstanceConfig", + "CreateClusterRequest", + "DataprocMetricConfig", + "DeleteClusterRequest", + "DiagnoseClusterRequest", + "DiagnoseClusterResults", + "DiskConfig", + "EncryptionConfig", + "EndpointConfig", + "GceClusterConfig", + "GetClusterRequest", + "IdentityConfig", + "InstanceFlexibilityPolicy", + "InstanceGroupConfig", + "InstanceReference", + "KerberosConfig", + "LifecycleConfig", + "ListClustersRequest", + "ListClustersResponse", + "ManagedGroupConfig", + "MetastoreConfig", + "NodeGroup", + "NodeGroupAffinity", + "NodeInitializationAction", + "ReservationAffinity", + "SecurityConfig", + "ShieldedInstanceConfig", + "SoftwareConfig", + "StartClusterRequest", + "StopClusterRequest", + "UpdateClusterRequest", + "VirtualClusterConfig", + "CancelJobRequest", + "DeleteJobRequest", + "DriverSchedulingConfig", + "GetJobRequest", + "HadoopJob", + "HiveJob", + "Job", + "JobMetadata", + "JobPlacement", + "JobReference", + "JobScheduling", + "JobStatus", + "ListJobsRequest", + "ListJobsResponse", + "LoggingConfig", + "PigJob", + "PrestoJob", + "PySparkJob", + "QueryList", + "SparkJob", + "SparkRJob", + "SparkSqlJob", + "SubmitJobRequest", + "TrinoJob", + "UpdateJobRequest", + "YarnApplication", + "CreateNodeGroupRequest", + "GetNodeGroupRequest", + "ResizeNodeGroupRequest", + "BatchOperationMetadata", + "ClusterOperationMetadata", + "ClusterOperationStatus", + "NodeGroupOperationMetadata", + "EnvironmentConfig", + "ExecutionConfig", + "GkeClusterConfig", + "GkeNodePoolConfig", + "GkeNodePoolTarget", + "KubernetesClusterConfig", + "KubernetesSoftwareConfig", + "PeripheralsConfig", + "RuntimeConfig", + "RuntimeInfo", + "SparkHistoryServerConfig", + "UsageMetrics", + "UsageSnapshot", + "Component", + "FailureAction", + "ClusterOperation", + "ClusterSelector", + "CreateWorkflowTemplateRequest", + "DeleteWorkflowTemplateRequest", + "GetWorkflowTemplateRequest", + "InstantiateInlineWorkflowTemplateRequest", + "InstantiateWorkflowTemplateRequest", + "ListWorkflowTemplatesRequest", + "ListWorkflowTemplatesResponse", + "ManagedCluster", + "OrderedJob", + "ParameterValidation", + "RegexValidation", + "TemplateParameter", + "UpdateWorkflowTemplateRequest", + "ValueValidation", + "WorkflowGraph", + "WorkflowMetadata", + "WorkflowNode", + "WorkflowTemplate", + "WorkflowTemplatePlacement", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py new file mode 100644 index 000000000000..2c0f5c4d4059 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "5.6.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/py.typed b/packages/google-cloud-dataproc/google/cloud/dataproc/py.typed new file mode 100644 index 000000000000..aac99cba9e92 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dataproc package uses inline types. diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py new file mode 100644 index 000000000000..1325e8c47c28 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/__init__.py @@ -0,0 +1,327 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dataproc_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.autoscaling_policy_service import ( + AutoscalingPolicyServiceAsyncClient, + AutoscalingPolicyServiceClient, +) +from .services.batch_controller import BatchControllerAsyncClient, BatchControllerClient +from .services.cluster_controller import ( + ClusterControllerAsyncClient, + ClusterControllerClient, +) +from .services.job_controller import JobControllerAsyncClient, JobControllerClient +from .services.node_group_controller import ( + NodeGroupControllerAsyncClient, + NodeGroupControllerClient, +) +from .services.workflow_template_service import ( + WorkflowTemplateServiceAsyncClient, + WorkflowTemplateServiceClient, +) +from .types.autoscaling_policies import ( + AutoscalingPolicy, + BasicAutoscalingAlgorithm, + BasicYarnAutoscalingConfig, + CreateAutoscalingPolicyRequest, + DeleteAutoscalingPolicyRequest, + GetAutoscalingPolicyRequest, + InstanceGroupAutoscalingPolicyConfig, + ListAutoscalingPoliciesRequest, + ListAutoscalingPoliciesResponse, + UpdateAutoscalingPolicyRequest, +) +from .types.batches import ( + Batch, + CreateBatchRequest, + DeleteBatchRequest, + GetBatchRequest, + ListBatchesRequest, + ListBatchesResponse, + PySparkBatch, + SparkBatch, + SparkRBatch, + SparkSqlBatch, +) +from .types.clusters import ( + AcceleratorConfig, + AutoscalingConfig, + AuxiliaryNodeGroup, + AuxiliaryServicesConfig, + Cluster, + ClusterConfig, + ClusterMetrics, + ClusterStatus, + ConfidentialInstanceConfig, + CreateClusterRequest, + DataprocMetricConfig, + DeleteClusterRequest, + DiagnoseClusterRequest, + DiagnoseClusterResults, + DiskConfig, + EncryptionConfig, + EndpointConfig, + GceClusterConfig, + GetClusterRequest, + IdentityConfig, + InstanceFlexibilityPolicy, + InstanceGroupConfig, + InstanceReference, + KerberosConfig, + LifecycleConfig, + ListClustersRequest, + ListClustersResponse, + ManagedGroupConfig, + MetastoreConfig, + NodeGroup, + NodeGroupAffinity, + NodeInitializationAction, + ReservationAffinity, + SecurityConfig, + ShieldedInstanceConfig, + SoftwareConfig, + StartClusterRequest, + StopClusterRequest, + UpdateClusterRequest, + VirtualClusterConfig, +) +from .types.jobs import ( + CancelJobRequest, + DeleteJobRequest, + DriverSchedulingConfig, + GetJobRequest, + HadoopJob, + HiveJob, + Job, + JobMetadata, + JobPlacement, + JobReference, + JobScheduling, + JobStatus, + ListJobsRequest, + ListJobsResponse, + LoggingConfig, + PigJob, + PrestoJob, + PySparkJob, + QueryList, + SparkJob, + SparkRJob, + SparkSqlJob, + SubmitJobRequest, + TrinoJob, + UpdateJobRequest, + YarnApplication, +) +from .types.node_groups import ( + CreateNodeGroupRequest, + GetNodeGroupRequest, + ResizeNodeGroupRequest, +) +from .types.operations import ( + BatchOperationMetadata, + ClusterOperationMetadata, + ClusterOperationStatus, + NodeGroupOperationMetadata, +) +from .types.shared import ( + Component, + EnvironmentConfig, + ExecutionConfig, + FailureAction, + GkeClusterConfig, + GkeNodePoolConfig, + GkeNodePoolTarget, + KubernetesClusterConfig, + KubernetesSoftwareConfig, + PeripheralsConfig, + RuntimeConfig, + RuntimeInfo, + SparkHistoryServerConfig, + UsageMetrics, + UsageSnapshot, +) +from .types.workflow_templates import ( + ClusterOperation, + ClusterSelector, + CreateWorkflowTemplateRequest, + DeleteWorkflowTemplateRequest, + GetWorkflowTemplateRequest, + InstantiateInlineWorkflowTemplateRequest, + InstantiateWorkflowTemplateRequest, + ListWorkflowTemplatesRequest, + ListWorkflowTemplatesResponse, + ManagedCluster, + OrderedJob, + ParameterValidation, + RegexValidation, + TemplateParameter, + UpdateWorkflowTemplateRequest, + ValueValidation, + WorkflowGraph, + WorkflowMetadata, + WorkflowNode, + WorkflowTemplate, + WorkflowTemplatePlacement, +) + +__all__ = ( + "AutoscalingPolicyServiceAsyncClient", + "BatchControllerAsyncClient", + "ClusterControllerAsyncClient", + "JobControllerAsyncClient", + "NodeGroupControllerAsyncClient", + "WorkflowTemplateServiceAsyncClient", + "AcceleratorConfig", + "AutoscalingConfig", + "AutoscalingPolicy", + "AutoscalingPolicyServiceClient", + "AuxiliaryNodeGroup", + "AuxiliaryServicesConfig", + "BasicAutoscalingAlgorithm", + "BasicYarnAutoscalingConfig", + "Batch", + "BatchControllerClient", + "BatchOperationMetadata", + "CancelJobRequest", + "Cluster", + "ClusterConfig", + "ClusterControllerClient", + "ClusterMetrics", + "ClusterOperation", + "ClusterOperationMetadata", + "ClusterOperationStatus", + "ClusterSelector", + "ClusterStatus", + "Component", + "ConfidentialInstanceConfig", + "CreateAutoscalingPolicyRequest", + "CreateBatchRequest", + "CreateClusterRequest", + "CreateNodeGroupRequest", + "CreateWorkflowTemplateRequest", + "DataprocMetricConfig", + "DeleteAutoscalingPolicyRequest", + "DeleteBatchRequest", + "DeleteClusterRequest", + "DeleteJobRequest", + "DeleteWorkflowTemplateRequest", + "DiagnoseClusterRequest", + "DiagnoseClusterResults", + "DiskConfig", + "DriverSchedulingConfig", + "EncryptionConfig", + "EndpointConfig", + "EnvironmentConfig", + "ExecutionConfig", + "FailureAction", + "GceClusterConfig", + "GetAutoscalingPolicyRequest", + "GetBatchRequest", + "GetClusterRequest", + "GetJobRequest", + "GetNodeGroupRequest", + "GetWorkflowTemplateRequest", + "GkeClusterConfig", + "GkeNodePoolConfig", + "GkeNodePoolTarget", + "HadoopJob", + "HiveJob", + "IdentityConfig", + "InstanceFlexibilityPolicy", + "InstanceGroupAutoscalingPolicyConfig", + "InstanceGroupConfig", + "InstanceReference", + "InstantiateInlineWorkflowTemplateRequest", + "InstantiateWorkflowTemplateRequest", + "Job", + "JobControllerClient", + "JobMetadata", + "JobPlacement", + "JobReference", + "JobScheduling", + "JobStatus", + "KerberosConfig", + "KubernetesClusterConfig", + "KubernetesSoftwareConfig", + "LifecycleConfig", + "ListAutoscalingPoliciesRequest", + "ListAutoscalingPoliciesResponse", + "ListBatchesRequest", + "ListBatchesResponse", + "ListClustersRequest", + "ListClustersResponse", + "ListJobsRequest", + "ListJobsResponse", + "ListWorkflowTemplatesRequest", + "ListWorkflowTemplatesResponse", + "LoggingConfig", + "ManagedCluster", + "ManagedGroupConfig", + "MetastoreConfig", + "NodeGroup", + "NodeGroupAffinity", + "NodeGroupControllerClient", + "NodeGroupOperationMetadata", + "NodeInitializationAction", + "OrderedJob", + "ParameterValidation", + "PeripheralsConfig", + "PigJob", + "PrestoJob", + "PySparkBatch", + "PySparkJob", + "QueryList", + "RegexValidation", + "ReservationAffinity", + "ResizeNodeGroupRequest", + "RuntimeConfig", + "RuntimeInfo", + "SecurityConfig", + "ShieldedInstanceConfig", + "SoftwareConfig", + "SparkBatch", + "SparkHistoryServerConfig", + "SparkJob", + "SparkRBatch", + "SparkRJob", + "SparkSqlBatch", + "SparkSqlJob", + "StartClusterRequest", + "StopClusterRequest", + "SubmitJobRequest", + "TemplateParameter", + "TrinoJob", + "UpdateAutoscalingPolicyRequest", + "UpdateClusterRequest", + "UpdateJobRequest", + "UpdateWorkflowTemplateRequest", + "UsageMetrics", + "UsageSnapshot", + "ValueValidation", + "VirtualClusterConfig", + "WorkflowGraph", + "WorkflowMetadata", + "WorkflowNode", + "WorkflowTemplate", + "WorkflowTemplatePlacement", + "WorkflowTemplateServiceClient", + "YarnApplication", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_metadata.json b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_metadata.json new file mode 100644 index 000000000000..f8a05276414e --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_metadata.json @@ -0,0 +1,633 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dataproc_v1", + "protoPackage": "google.cloud.dataproc.v1", + "schema": "1.0", + "services": { + "AutoscalingPolicyService": { + "clients": { + "grpc": { + "libraryClient": "AutoscalingPolicyServiceClient", + "rpcs": { + "CreateAutoscalingPolicy": { + "methods": [ + "create_autoscaling_policy" + ] + }, + "DeleteAutoscalingPolicy": { + "methods": [ + "delete_autoscaling_policy" + ] + }, + "GetAutoscalingPolicy": { + "methods": [ + "get_autoscaling_policy" + ] + }, + "ListAutoscalingPolicies": { + "methods": [ + "list_autoscaling_policies" + ] + }, + "UpdateAutoscalingPolicy": { + "methods": [ + "update_autoscaling_policy" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AutoscalingPolicyServiceAsyncClient", + "rpcs": { + "CreateAutoscalingPolicy": { + "methods": [ + "create_autoscaling_policy" + ] + }, + "DeleteAutoscalingPolicy": { + "methods": [ + "delete_autoscaling_policy" + ] + }, + "GetAutoscalingPolicy": { + "methods": [ + "get_autoscaling_policy" + ] + }, + "ListAutoscalingPolicies": { + "methods": [ + "list_autoscaling_policies" + ] + }, + "UpdateAutoscalingPolicy": { + "methods": [ + "update_autoscaling_policy" + ] + } + } + }, + "rest": { + "libraryClient": "AutoscalingPolicyServiceClient", + "rpcs": { + "CreateAutoscalingPolicy": { + "methods": [ + "create_autoscaling_policy" + ] + }, + "DeleteAutoscalingPolicy": { + "methods": [ + "delete_autoscaling_policy" + ] + }, + "GetAutoscalingPolicy": { + "methods": [ + "get_autoscaling_policy" + ] + }, + "ListAutoscalingPolicies": { + "methods": [ + "list_autoscaling_policies" + ] + }, + "UpdateAutoscalingPolicy": { + "methods": [ + "update_autoscaling_policy" + ] + } + } + } + } + }, + "BatchController": { + "clients": { + "grpc": { + "libraryClient": "BatchControllerClient", + "rpcs": { + "CreateBatch": { + "methods": [ + "create_batch" + ] + }, + "DeleteBatch": { + "methods": [ + "delete_batch" + ] + }, + "GetBatch": { + "methods": [ + "get_batch" + ] + }, + "ListBatches": { + "methods": [ + "list_batches" + ] + } + } + }, + "grpc-async": { + "libraryClient": "BatchControllerAsyncClient", + "rpcs": { + "CreateBatch": { + "methods": [ + "create_batch" + ] + }, + "DeleteBatch": { + "methods": [ + "delete_batch" + ] + }, + "GetBatch": { + "methods": [ + "get_batch" + ] + }, + "ListBatches": { + "methods": [ + "list_batches" + ] + } + } + }, + "rest": { + "libraryClient": "BatchControllerClient", + "rpcs": { + "CreateBatch": { + "methods": [ + "create_batch" + ] + }, + "DeleteBatch": { + "methods": [ + "delete_batch" + ] + }, + "GetBatch": { + "methods": [ + "get_batch" + ] + }, + "ListBatches": { + "methods": [ + "list_batches" + ] + } + } + } + } + }, + "ClusterController": { + "clients": { + "grpc": { + "libraryClient": "ClusterControllerClient", + "rpcs": { + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DiagnoseCluster": { + "methods": [ + "diagnose_cluster" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "StartCluster": { + "methods": [ + "start_cluster" + ] + }, + "StopCluster": { + "methods": [ + "stop_cluster" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ClusterControllerAsyncClient", + "rpcs": { + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DiagnoseCluster": { + "methods": [ + "diagnose_cluster" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "StartCluster": { + "methods": [ + "start_cluster" + ] + }, + "StopCluster": { + "methods": [ + "stop_cluster" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + } + } + }, + "rest": { + "libraryClient": "ClusterControllerClient", + "rpcs": { + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DiagnoseCluster": { + "methods": [ + "diagnose_cluster" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "StartCluster": { + "methods": [ + "start_cluster" + ] + }, + "StopCluster": { + "methods": [ + "stop_cluster" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + } + } + } + } + }, + "JobController": { + "clients": { + "grpc": { + "libraryClient": "JobControllerClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "SubmitJob": { + "methods": [ + "submit_job" + ] + }, + "SubmitJobAsOperation": { + "methods": [ + "submit_job_as_operation" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + }, + "grpc-async": { + "libraryClient": "JobControllerAsyncClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "SubmitJob": { + "methods": [ + "submit_job" + ] + }, + "SubmitJobAsOperation": { + "methods": [ + "submit_job_as_operation" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + }, + "rest": { + "libraryClient": "JobControllerClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "SubmitJob": { + "methods": [ + "submit_job" + ] + }, + "SubmitJobAsOperation": { + "methods": [ + "submit_job_as_operation" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + } + } + }, + "NodeGroupController": { + "clients": { + "grpc": { + "libraryClient": "NodeGroupControllerClient", + "rpcs": { + "CreateNodeGroup": { + "methods": [ + "create_node_group" + ] + }, + "GetNodeGroup": { + "methods": [ + "get_node_group" + ] + }, + "ResizeNodeGroup": { + "methods": [ + "resize_node_group" + ] + } + } + }, + "grpc-async": { + "libraryClient": "NodeGroupControllerAsyncClient", + "rpcs": { + "CreateNodeGroup": { + "methods": [ + "create_node_group" + ] + }, + "GetNodeGroup": { + "methods": [ + "get_node_group" + ] + }, + "ResizeNodeGroup": { + "methods": [ + "resize_node_group" + ] + } + } + }, + "rest": { + "libraryClient": "NodeGroupControllerClient", + "rpcs": { + "CreateNodeGroup": { + "methods": [ + "create_node_group" + ] + }, + "GetNodeGroup": { + "methods": [ + "get_node_group" + ] + }, + "ResizeNodeGroup": { + "methods": [ + "resize_node_group" + ] + } + } + } + } + }, + "WorkflowTemplateService": { + "clients": { + "grpc": { + "libraryClient": "WorkflowTemplateServiceClient", + "rpcs": { + "CreateWorkflowTemplate": { + "methods": [ + "create_workflow_template" + ] + }, + "DeleteWorkflowTemplate": { + "methods": [ + "delete_workflow_template" + ] + }, + "GetWorkflowTemplate": { + "methods": [ + "get_workflow_template" + ] + }, + "InstantiateInlineWorkflowTemplate": { + "methods": [ + "instantiate_inline_workflow_template" + ] + }, + "InstantiateWorkflowTemplate": { + "methods": [ + "instantiate_workflow_template" + ] + }, + "ListWorkflowTemplates": { + "methods": [ + "list_workflow_templates" + ] + }, + "UpdateWorkflowTemplate": { + "methods": [ + "update_workflow_template" + ] + } + } + }, + "grpc-async": { + "libraryClient": "WorkflowTemplateServiceAsyncClient", + "rpcs": { + "CreateWorkflowTemplate": { + "methods": [ + "create_workflow_template" + ] + }, + "DeleteWorkflowTemplate": { + "methods": [ + "delete_workflow_template" + ] + }, + "GetWorkflowTemplate": { + "methods": [ + "get_workflow_template" + ] + }, + "InstantiateInlineWorkflowTemplate": { + "methods": [ + "instantiate_inline_workflow_template" + ] + }, + "InstantiateWorkflowTemplate": { + "methods": [ + "instantiate_workflow_template" + ] + }, + "ListWorkflowTemplates": { + "methods": [ + "list_workflow_templates" + ] + }, + "UpdateWorkflowTemplate": { + "methods": [ + "update_workflow_template" + ] + } + } + }, + "rest": { + "libraryClient": "WorkflowTemplateServiceClient", + "rpcs": { + "CreateWorkflowTemplate": { + "methods": [ + "create_workflow_template" + ] + }, + "DeleteWorkflowTemplate": { + "methods": [ + "delete_workflow_template" + ] + }, + "GetWorkflowTemplate": { + "methods": [ + "get_workflow_template" + ] + }, + "InstantiateInlineWorkflowTemplate": { + "methods": [ + "instantiate_inline_workflow_template" + ] + }, + "InstantiateWorkflowTemplate": { + "methods": [ + "instantiate_workflow_template" + ] + }, + "ListWorkflowTemplates": { + "methods": [ + "list_workflow_templates" + ] + }, + "UpdateWorkflowTemplate": { + "methods": [ + "update_workflow_template" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py new file mode 100644 index 000000000000..2c0f5c4d4059 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "5.6.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/py.typed b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/py.typed new file mode 100644 index 000000000000..aac99cba9e92 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dataproc package uses inline types. diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/__init__.py new file mode 100644 index 000000000000..a7165bce0d0a --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AutoscalingPolicyServiceAsyncClient +from .client import AutoscalingPolicyServiceClient + +__all__ = ( + "AutoscalingPolicyServiceClient", + "AutoscalingPolicyServiceAsyncClient", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py new file mode 100644 index 000000000000..5f6fe9cd6814 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py @@ -0,0 +1,1385 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.dataproc_v1.services.autoscaling_policy_service import pagers +from google.cloud.dataproc_v1.types import autoscaling_policies + +from .client import AutoscalingPolicyServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, AutoscalingPolicyServiceTransport +from .transports.grpc_asyncio import AutoscalingPolicyServiceGrpcAsyncIOTransport + + +class AutoscalingPolicyServiceAsyncClient: + """The API interface for managing autoscaling policies in the + Dataproc API. + """ + + _client: AutoscalingPolicyServiceClient + + DEFAULT_ENDPOINT = AutoscalingPolicyServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AutoscalingPolicyServiceClient.DEFAULT_MTLS_ENDPOINT + + autoscaling_policy_path = staticmethod( + AutoscalingPolicyServiceClient.autoscaling_policy_path + ) + parse_autoscaling_policy_path = staticmethod( + AutoscalingPolicyServiceClient.parse_autoscaling_policy_path + ) + common_billing_account_path = staticmethod( + AutoscalingPolicyServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AutoscalingPolicyServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AutoscalingPolicyServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + AutoscalingPolicyServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + AutoscalingPolicyServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + AutoscalingPolicyServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + AutoscalingPolicyServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + AutoscalingPolicyServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + AutoscalingPolicyServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + AutoscalingPolicyServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoscalingPolicyServiceAsyncClient: The constructed client. + """ + return AutoscalingPolicyServiceClient.from_service_account_info.__func__(AutoscalingPolicyServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoscalingPolicyServiceAsyncClient: The constructed client. + """ + return AutoscalingPolicyServiceClient.from_service_account_file.__func__(AutoscalingPolicyServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AutoscalingPolicyServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AutoscalingPolicyServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AutoscalingPolicyServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(AutoscalingPolicyServiceClient).get_transport_class, + type(AutoscalingPolicyServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, AutoscalingPolicyServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the autoscaling policy service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.AutoscalingPolicyServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AutoscalingPolicyServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_autoscaling_policy( + self, + request: Optional[ + Union[autoscaling_policies.CreateAutoscalingPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + policy: Optional[autoscaling_policies.AutoscalingPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autoscaling_policies.AutoscalingPolicy: + r"""Creates new autoscaling policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_create_autoscaling_policy(): + # Create a client + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() + + # Initialize request argument(s) + policy = dataproc_v1.AutoscalingPolicy() + policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578 + policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789 + policy.worker_config.max_instances = 1389 + + request = dataproc_v1.CreateAutoscalingPolicyRequest( + parent="parent_value", + policy=policy, + ) + + # Make the request + response = await client.create_autoscaling_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.CreateAutoscalingPolicyRequest, dict]]): + The request object. A request to create an autoscaling + policy. + parent (:class:`str`): + Required. The "resource name" of the region or location, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.autoscalingPolicies.create``, + the resource name of the region has the following + format: ``projects/{project_id}/regions/{region}`` + + - For + ``projects.locations.autoscalingPolicies.create``, + the resource name of the location has the following + format: + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + policy (:class:`google.cloud.dataproc_v1.types.AutoscalingPolicy`): + Required. The autoscaling policy to + create. + + This corresponds to the ``policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.AutoscalingPolicy: + Describes an autoscaling policy for + Dataproc cluster autoscaler. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, policy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = autoscaling_policies.CreateAutoscalingPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if policy is not None: + request.policy = policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_autoscaling_policy, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_autoscaling_policy( + self, + request: Optional[ + Union[autoscaling_policies.UpdateAutoscalingPolicyRequest, dict] + ] = None, + *, + policy: Optional[autoscaling_policies.AutoscalingPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autoscaling_policies.AutoscalingPolicy: + r"""Updates (replaces) autoscaling policy. + + Disabled check for update_mask, because all updates will be full + replacements. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_update_autoscaling_policy(): + # Create a client + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() + + # Initialize request argument(s) + policy = dataproc_v1.AutoscalingPolicy() + policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578 + policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789 + policy.worker_config.max_instances = 1389 + + request = dataproc_v1.UpdateAutoscalingPolicyRequest( + policy=policy, + ) + + # Make the request + response = await client.update_autoscaling_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.UpdateAutoscalingPolicyRequest, dict]]): + The request object. A request to update an autoscaling + policy. + policy (:class:`google.cloud.dataproc_v1.types.AutoscalingPolicy`): + Required. The updated autoscaling + policy. + + This corresponds to the ``policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.AutoscalingPolicy: + Describes an autoscaling policy for + Dataproc cluster autoscaler. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([policy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = autoscaling_policies.UpdateAutoscalingPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if policy is not None: + request.policy = policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_autoscaling_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("policy.name", request.policy.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_autoscaling_policy( + self, + request: Optional[ + Union[autoscaling_policies.GetAutoscalingPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autoscaling_policies.AutoscalingPolicy: + r"""Retrieves autoscaling policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_get_autoscaling_policy(): + # Create a client + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.GetAutoscalingPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_autoscaling_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.GetAutoscalingPolicyRequest, dict]]): + The request object. A request to fetch an autoscaling + policy. + name (:class:`str`): + Required. The "resource name" of the autoscaling policy, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.autoscalingPolicies.get``, the + resource name of the policy has the following format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For ``projects.locations.autoscalingPolicies.get``, + the resource name of the policy has the following + format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.AutoscalingPolicy: + Describes an autoscaling policy for + Dataproc cluster autoscaler. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = autoscaling_policies.GetAutoscalingPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_autoscaling_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_autoscaling_policies( + self, + request: Optional[ + Union[autoscaling_policies.ListAutoscalingPoliciesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutoscalingPoliciesAsyncPager: + r"""Lists autoscaling policies in the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_list_autoscaling_policies(): + # Create a client + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.ListAutoscalingPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autoscaling_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest, dict]]): + The request object. A request to list autoscaling + policies in a project. + parent (:class:`str`): + Required. The "resource name" of the region or location, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.autoscalingPolicies.list``, + the resource name of the region has the following + format: ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.autoscalingPolicies.list``, + the resource name of the location has the following + format: + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers.ListAutoscalingPoliciesAsyncPager: + A response to a request to list + autoscaling policies in a project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = autoscaling_policies.ListAutoscalingPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_autoscaling_policies, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAutoscalingPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_autoscaling_policy( + self, + request: Optional[ + Union[autoscaling_policies.DeleteAutoscalingPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an autoscaling policy. It is an error to + delete an autoscaling policy that is in use by one or + more clusters. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_delete_autoscaling_policy(): + # Create a client + client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.DeleteAutoscalingPolicyRequest( + name="name_value", + ) + + # Make the request + await client.delete_autoscaling_policy(request=request) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest, dict]]): + The request object. A request to delete an autoscaling + policy. + Autoscaling policies in use by one or + more clusters will not be deleted. + name (:class:`str`): + Required. The "resource name" of the autoscaling policy, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.autoscalingPolicies.delete``, + the resource name of the policy has the following + format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For + ``projects.locations.autoscalingPolicies.delete``, + the resource name of the policy has the following + format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = autoscaling_policies.DeleteAutoscalingPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_autoscaling_policy, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AutoscalingPolicyServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AutoscalingPolicyServiceAsyncClient",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py new file mode 100644 index 000000000000..e68af0f06b79 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py @@ -0,0 +1,1590 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.dataproc_v1.services.autoscaling_policy_service import pagers +from google.cloud.dataproc_v1.types import autoscaling_policies + +from .transports.base import DEFAULT_CLIENT_INFO, AutoscalingPolicyServiceTransport +from .transports.grpc import AutoscalingPolicyServiceGrpcTransport +from .transports.grpc_asyncio import AutoscalingPolicyServiceGrpcAsyncIOTransport +from .transports.rest import AutoscalingPolicyServiceRestTransport + + +class AutoscalingPolicyServiceClientMeta(type): + """Metaclass for the AutoscalingPolicyService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[AutoscalingPolicyServiceTransport]] + _transport_registry["grpc"] = AutoscalingPolicyServiceGrpcTransport + _transport_registry["grpc_asyncio"] = AutoscalingPolicyServiceGrpcAsyncIOTransport + _transport_registry["rest"] = AutoscalingPolicyServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AutoscalingPolicyServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AutoscalingPolicyServiceClient(metaclass=AutoscalingPolicyServiceClientMeta): + """The API interface for managing autoscaling policies in the + Dataproc API. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataproc.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoscalingPolicyServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoscalingPolicyServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AutoscalingPolicyServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AutoscalingPolicyServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def autoscaling_policy_path( + project: str, + location: str, + autoscaling_policy: str, + ) -> str: + """Returns a fully-qualified autoscaling_policy string.""" + return "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}".format( + project=project, + location=location, + autoscaling_policy=autoscaling_policy, + ) + + @staticmethod + def parse_autoscaling_policy_path(path: str) -> Dict[str, str]: + """Parses a autoscaling_policy path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/autoscalingPolicies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AutoscalingPolicyServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the autoscaling policy service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AutoscalingPolicyServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AutoscalingPolicyServiceTransport): + # transport is a AutoscalingPolicyServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_autoscaling_policy( + self, + request: Optional[ + Union[autoscaling_policies.CreateAutoscalingPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + policy: Optional[autoscaling_policies.AutoscalingPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autoscaling_policies.AutoscalingPolicy: + r"""Creates new autoscaling policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_create_autoscaling_policy(): + # Create a client + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Initialize request argument(s) + policy = dataproc_v1.AutoscalingPolicy() + policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578 + policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789 + policy.worker_config.max_instances = 1389 + + request = dataproc_v1.CreateAutoscalingPolicyRequest( + parent="parent_value", + policy=policy, + ) + + # Make the request + response = client.create_autoscaling_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.CreateAutoscalingPolicyRequest, dict]): + The request object. A request to create an autoscaling + policy. + parent (str): + Required. The "resource name" of the region or location, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.autoscalingPolicies.create``, + the resource name of the region has the following + format: ``projects/{project_id}/regions/{region}`` + + - For + ``projects.locations.autoscalingPolicies.create``, + the resource name of the location has the following + format: + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + policy (google.cloud.dataproc_v1.types.AutoscalingPolicy): + Required. The autoscaling policy to + create. + + This corresponds to the ``policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.AutoscalingPolicy: + Describes an autoscaling policy for + Dataproc cluster autoscaler. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, policy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a autoscaling_policies.CreateAutoscalingPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, autoscaling_policies.CreateAutoscalingPolicyRequest): + request = autoscaling_policies.CreateAutoscalingPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if policy is not None: + request.policy = policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_autoscaling_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_autoscaling_policy( + self, + request: Optional[ + Union[autoscaling_policies.UpdateAutoscalingPolicyRequest, dict] + ] = None, + *, + policy: Optional[autoscaling_policies.AutoscalingPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autoscaling_policies.AutoscalingPolicy: + r"""Updates (replaces) autoscaling policy. + + Disabled check for update_mask, because all updates will be full + replacements. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_update_autoscaling_policy(): + # Create a client + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Initialize request argument(s) + policy = dataproc_v1.AutoscalingPolicy() + policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578 + policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789 + policy.worker_config.max_instances = 1389 + + request = dataproc_v1.UpdateAutoscalingPolicyRequest( + policy=policy, + ) + + # Make the request + response = client.update_autoscaling_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.UpdateAutoscalingPolicyRequest, dict]): + The request object. A request to update an autoscaling + policy. + policy (google.cloud.dataproc_v1.types.AutoscalingPolicy): + Required. The updated autoscaling + policy. + + This corresponds to the ``policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.AutoscalingPolicy: + Describes an autoscaling policy for + Dataproc cluster autoscaler. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([policy]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a autoscaling_policies.UpdateAutoscalingPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, autoscaling_policies.UpdateAutoscalingPolicyRequest): + request = autoscaling_policies.UpdateAutoscalingPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if policy is not None: + request.policy = policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_autoscaling_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("policy.name", request.policy.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_autoscaling_policy( + self, + request: Optional[ + Union[autoscaling_policies.GetAutoscalingPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autoscaling_policies.AutoscalingPolicy: + r"""Retrieves autoscaling policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_get_autoscaling_policy(): + # Create a client + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Initialize request argument(s) + request = dataproc_v1.GetAutoscalingPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_autoscaling_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.GetAutoscalingPolicyRequest, dict]): + The request object. A request to fetch an autoscaling + policy. + name (str): + Required. The "resource name" of the autoscaling policy, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.autoscalingPolicies.get``, the + resource name of the policy has the following format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For ``projects.locations.autoscalingPolicies.get``, + the resource name of the policy has the following + format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.AutoscalingPolicy: + Describes an autoscaling policy for + Dataproc cluster autoscaler. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a autoscaling_policies.GetAutoscalingPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, autoscaling_policies.GetAutoscalingPolicyRequest): + request = autoscaling_policies.GetAutoscalingPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_autoscaling_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_autoscaling_policies( + self, + request: Optional[ + Union[autoscaling_policies.ListAutoscalingPoliciesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAutoscalingPoliciesPager: + r"""Lists autoscaling policies in the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_list_autoscaling_policies(): + # Create a client + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Initialize request argument(s) + request = dataproc_v1.ListAutoscalingPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_autoscaling_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest, dict]): + The request object. A request to list autoscaling + policies in a project. + parent (str): + Required. The "resource name" of the region or location, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.autoscalingPolicies.list``, + the resource name of the region has the following + format: ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.autoscalingPolicies.list``, + the resource name of the location has the following + format: + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers.ListAutoscalingPoliciesPager: + A response to a request to list + autoscaling policies in a project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a autoscaling_policies.ListAutoscalingPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, autoscaling_policies.ListAutoscalingPoliciesRequest): + request = autoscaling_policies.ListAutoscalingPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_autoscaling_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAutoscalingPoliciesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_autoscaling_policy( + self, + request: Optional[ + Union[autoscaling_policies.DeleteAutoscalingPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an autoscaling policy. It is an error to + delete an autoscaling policy that is in use by one or + more clusters. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_delete_autoscaling_policy(): + # Create a client + client = dataproc_v1.AutoscalingPolicyServiceClient() + + # Initialize request argument(s) + request = dataproc_v1.DeleteAutoscalingPolicyRequest( + name="name_value", + ) + + # Make the request + client.delete_autoscaling_policy(request=request) + + Args: + request (Union[google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest, dict]): + The request object. A request to delete an autoscaling + policy. + Autoscaling policies in use by one or + more clusters will not be deleted. + name (str): + Required. The "resource name" of the autoscaling policy, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.autoscalingPolicies.delete``, + the resource name of the policy has the following + format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For + ``projects.locations.autoscalingPolicies.delete``, + the resource name of the policy has the following + format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a autoscaling_policies.DeleteAutoscalingPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, autoscaling_policies.DeleteAutoscalingPolicyRequest): + request = autoscaling_policies.DeleteAutoscalingPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_autoscaling_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "AutoscalingPolicyServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("AutoscalingPolicyServiceClient",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py new file mode 100644 index 000000000000..4678c17b54b3 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dataproc_v1.types import autoscaling_policies + + +class ListAutoscalingPoliciesPager: + """A pager for iterating through ``list_autoscaling_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``policies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAutoscalingPolicies`` requests and continue to iterate + through the ``policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., autoscaling_policies.ListAutoscalingPoliciesResponse], + request: autoscaling_policies.ListAutoscalingPoliciesRequest, + response: autoscaling_policies.ListAutoscalingPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest): + The initial request object. + response (google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = autoscaling_policies.ListAutoscalingPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[autoscaling_policies.ListAutoscalingPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[autoscaling_policies.AutoscalingPolicy]: + for page in self.pages: + yield from page.policies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAutoscalingPoliciesAsyncPager: + """A pager for iterating through ``list_autoscaling_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``policies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAutoscalingPolicies`` requests and continue to iterate + through the ``policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[autoscaling_policies.ListAutoscalingPoliciesResponse] + ], + request: autoscaling_policies.ListAutoscalingPoliciesRequest, + response: autoscaling_policies.ListAutoscalingPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest): + The initial request object. + response (google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = autoscaling_policies.ListAutoscalingPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[autoscaling_policies.ListAutoscalingPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[autoscaling_policies.AutoscalingPolicy]: + async def async_generator(): + async for page in self.pages: + for response in page.policies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/__init__.py new file mode 100644 index 000000000000..616399c667d8 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AutoscalingPolicyServiceTransport +from .grpc import AutoscalingPolicyServiceGrpcTransport +from .grpc_asyncio import AutoscalingPolicyServiceGrpcAsyncIOTransport +from .rest import ( + AutoscalingPolicyServiceRestInterceptor, + AutoscalingPolicyServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[AutoscalingPolicyServiceTransport]] +_transport_registry["grpc"] = AutoscalingPolicyServiceGrpcTransport +_transport_registry["grpc_asyncio"] = AutoscalingPolicyServiceGrpcAsyncIOTransport +_transport_registry["rest"] = AutoscalingPolicyServiceRestTransport + +__all__ = ( + "AutoscalingPolicyServiceTransport", + "AutoscalingPolicyServiceGrpcTransport", + "AutoscalingPolicyServiceGrpcAsyncIOTransport", + "AutoscalingPolicyServiceRestTransport", + "AutoscalingPolicyServiceRestInterceptor", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py new file mode 100644 index 000000000000..913744b0418c --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py @@ -0,0 +1,320 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version +from google.cloud.dataproc_v1.types import autoscaling_policies + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class AutoscalingPolicyServiceTransport(abc.ABC): + """Abstract transport class for AutoscalingPolicyService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "dataproc.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_autoscaling_policy: gapic_v1.method.wrap_method( + self.create_autoscaling_policy, + default_timeout=600.0, + client_info=client_info, + ), + self.update_autoscaling_policy: gapic_v1.method.wrap_method( + self.update_autoscaling_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.get_autoscaling_policy: gapic_v1.method.wrap_method( + self.get_autoscaling_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.list_autoscaling_policies: gapic_v1.method.wrap_method( + self.list_autoscaling_policies, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.delete_autoscaling_policy: gapic_v1.method.wrap_method( + self.delete_autoscaling_policy, + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.CreateAutoscalingPolicyRequest], + Union[ + autoscaling_policies.AutoscalingPolicy, + Awaitable[autoscaling_policies.AutoscalingPolicy], + ], + ]: + raise NotImplementedError() + + @property + def update_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.UpdateAutoscalingPolicyRequest], + Union[ + autoscaling_policies.AutoscalingPolicy, + Awaitable[autoscaling_policies.AutoscalingPolicy], + ], + ]: + raise NotImplementedError() + + @property + def get_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.GetAutoscalingPolicyRequest], + Union[ + autoscaling_policies.AutoscalingPolicy, + Awaitable[autoscaling_policies.AutoscalingPolicy], + ], + ]: + raise NotImplementedError() + + @property + def list_autoscaling_policies( + self, + ) -> Callable[ + [autoscaling_policies.ListAutoscalingPoliciesRequest], + Union[ + autoscaling_policies.ListAutoscalingPoliciesResponse, + Awaitable[autoscaling_policies.ListAutoscalingPoliciesResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.DeleteAutoscalingPolicyRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AutoscalingPolicyServiceTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py new file mode 100644 index 000000000000..2388dccebc61 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py @@ -0,0 +1,543 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dataproc_v1.types import autoscaling_policies + +from .base import DEFAULT_CLIENT_INFO, AutoscalingPolicyServiceTransport + + +class AutoscalingPolicyServiceGrpcTransport(AutoscalingPolicyServiceTransport): + """gRPC backend transport for AutoscalingPolicyService. + + The API interface for managing autoscaling policies in the + Dataproc API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.CreateAutoscalingPolicyRequest], + autoscaling_policies.AutoscalingPolicy, + ]: + r"""Return a callable for the create autoscaling policy method over gRPC. + + Creates new autoscaling policy. + + Returns: + Callable[[~.CreateAutoscalingPolicyRequest], + ~.AutoscalingPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_autoscaling_policy" not in self._stubs: + self._stubs["create_autoscaling_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.AutoscalingPolicyService/CreateAutoscalingPolicy", + request_serializer=autoscaling_policies.CreateAutoscalingPolicyRequest.serialize, + response_deserializer=autoscaling_policies.AutoscalingPolicy.deserialize, + ) + return self._stubs["create_autoscaling_policy"] + + @property + def update_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.UpdateAutoscalingPolicyRequest], + autoscaling_policies.AutoscalingPolicy, + ]: + r"""Return a callable for the update autoscaling policy method over gRPC. + + Updates (replaces) autoscaling policy. + + Disabled check for update_mask, because all updates will be full + replacements. + + Returns: + Callable[[~.UpdateAutoscalingPolicyRequest], + ~.AutoscalingPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_autoscaling_policy" not in self._stubs: + self._stubs["update_autoscaling_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.AutoscalingPolicyService/UpdateAutoscalingPolicy", + request_serializer=autoscaling_policies.UpdateAutoscalingPolicyRequest.serialize, + response_deserializer=autoscaling_policies.AutoscalingPolicy.deserialize, + ) + return self._stubs["update_autoscaling_policy"] + + @property + def get_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.GetAutoscalingPolicyRequest], + autoscaling_policies.AutoscalingPolicy, + ]: + r"""Return a callable for the get autoscaling policy method over gRPC. + + Retrieves autoscaling policy. + + Returns: + Callable[[~.GetAutoscalingPolicyRequest], + ~.AutoscalingPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_autoscaling_policy" not in self._stubs: + self._stubs["get_autoscaling_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.AutoscalingPolicyService/GetAutoscalingPolicy", + request_serializer=autoscaling_policies.GetAutoscalingPolicyRequest.serialize, + response_deserializer=autoscaling_policies.AutoscalingPolicy.deserialize, + ) + return self._stubs["get_autoscaling_policy"] + + @property + def list_autoscaling_policies( + self, + ) -> Callable[ + [autoscaling_policies.ListAutoscalingPoliciesRequest], + autoscaling_policies.ListAutoscalingPoliciesResponse, + ]: + r"""Return a callable for the list autoscaling policies method over gRPC. + + Lists autoscaling policies in the project. + + Returns: + Callable[[~.ListAutoscalingPoliciesRequest], + ~.ListAutoscalingPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_autoscaling_policies" not in self._stubs: + self._stubs["list_autoscaling_policies"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.AutoscalingPolicyService/ListAutoscalingPolicies", + request_serializer=autoscaling_policies.ListAutoscalingPoliciesRequest.serialize, + response_deserializer=autoscaling_policies.ListAutoscalingPoliciesResponse.deserialize, + ) + return self._stubs["list_autoscaling_policies"] + + @property + def delete_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.DeleteAutoscalingPolicyRequest], empty_pb2.Empty + ]: + r"""Return a callable for the delete autoscaling policy method over gRPC. + + Deletes an autoscaling policy. It is an error to + delete an autoscaling policy that is in use by one or + more clusters. + + Returns: + Callable[[~.DeleteAutoscalingPolicyRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_autoscaling_policy" not in self._stubs: + self._stubs["delete_autoscaling_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.AutoscalingPolicyService/DeleteAutoscalingPolicy", + request_serializer=autoscaling_policies.DeleteAutoscalingPolicyRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_autoscaling_policy"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AutoscalingPolicyServiceGrpcTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..bca9d5b877a8 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py @@ -0,0 +1,543 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataproc_v1.types import autoscaling_policies + +from .base import DEFAULT_CLIENT_INFO, AutoscalingPolicyServiceTransport +from .grpc import AutoscalingPolicyServiceGrpcTransport + + +class AutoscalingPolicyServiceGrpcAsyncIOTransport(AutoscalingPolicyServiceTransport): + """gRPC AsyncIO backend transport for AutoscalingPolicyService. + + The API interface for managing autoscaling policies in the + Dataproc API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.CreateAutoscalingPolicyRequest], + Awaitable[autoscaling_policies.AutoscalingPolicy], + ]: + r"""Return a callable for the create autoscaling policy method over gRPC. + + Creates new autoscaling policy. + + Returns: + Callable[[~.CreateAutoscalingPolicyRequest], + Awaitable[~.AutoscalingPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_autoscaling_policy" not in self._stubs: + self._stubs["create_autoscaling_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.AutoscalingPolicyService/CreateAutoscalingPolicy", + request_serializer=autoscaling_policies.CreateAutoscalingPolicyRequest.serialize, + response_deserializer=autoscaling_policies.AutoscalingPolicy.deserialize, + ) + return self._stubs["create_autoscaling_policy"] + + @property + def update_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.UpdateAutoscalingPolicyRequest], + Awaitable[autoscaling_policies.AutoscalingPolicy], + ]: + r"""Return a callable for the update autoscaling policy method over gRPC. + + Updates (replaces) autoscaling policy. + + Disabled check for update_mask, because all updates will be full + replacements. + + Returns: + Callable[[~.UpdateAutoscalingPolicyRequest], + Awaitable[~.AutoscalingPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_autoscaling_policy" not in self._stubs: + self._stubs["update_autoscaling_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.AutoscalingPolicyService/UpdateAutoscalingPolicy", + request_serializer=autoscaling_policies.UpdateAutoscalingPolicyRequest.serialize, + response_deserializer=autoscaling_policies.AutoscalingPolicy.deserialize, + ) + return self._stubs["update_autoscaling_policy"] + + @property + def get_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.GetAutoscalingPolicyRequest], + Awaitable[autoscaling_policies.AutoscalingPolicy], + ]: + r"""Return a callable for the get autoscaling policy method over gRPC. + + Retrieves autoscaling policy. + + Returns: + Callable[[~.GetAutoscalingPolicyRequest], + Awaitable[~.AutoscalingPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_autoscaling_policy" not in self._stubs: + self._stubs["get_autoscaling_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.AutoscalingPolicyService/GetAutoscalingPolicy", + request_serializer=autoscaling_policies.GetAutoscalingPolicyRequest.serialize, + response_deserializer=autoscaling_policies.AutoscalingPolicy.deserialize, + ) + return self._stubs["get_autoscaling_policy"] + + @property + def list_autoscaling_policies( + self, + ) -> Callable[ + [autoscaling_policies.ListAutoscalingPoliciesRequest], + Awaitable[autoscaling_policies.ListAutoscalingPoliciesResponse], + ]: + r"""Return a callable for the list autoscaling policies method over gRPC. + + Lists autoscaling policies in the project. + + Returns: + Callable[[~.ListAutoscalingPoliciesRequest], + Awaitable[~.ListAutoscalingPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_autoscaling_policies" not in self._stubs: + self._stubs["list_autoscaling_policies"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.AutoscalingPolicyService/ListAutoscalingPolicies", + request_serializer=autoscaling_policies.ListAutoscalingPoliciesRequest.serialize, + response_deserializer=autoscaling_policies.ListAutoscalingPoliciesResponse.deserialize, + ) + return self._stubs["list_autoscaling_policies"] + + @property + def delete_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.DeleteAutoscalingPolicyRequest], + Awaitable[empty_pb2.Empty], + ]: + r"""Return a callable for the delete autoscaling policy method over gRPC. + + Deletes an autoscaling policy. It is an error to + delete an autoscaling policy that is in use by one or + more clusters. + + Returns: + Callable[[~.DeleteAutoscalingPolicyRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_autoscaling_policy" not in self._stubs: + self._stubs["delete_autoscaling_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.AutoscalingPolicyService/DeleteAutoscalingPolicy", + request_serializer=autoscaling_policies.DeleteAutoscalingPolicyRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_autoscaling_policy"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("AutoscalingPolicyServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/rest.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/rest.py new file mode 100644 index 000000000000..6578c10c1ded --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/rest.py @@ -0,0 +1,1601 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dataproc_v1.types import autoscaling_policies + +from .base import AutoscalingPolicyServiceTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AutoscalingPolicyServiceRestInterceptor: + """Interceptor for AutoscalingPolicyService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AutoscalingPolicyServiceRestTransport. + + .. code-block:: python + class MyCustomAutoscalingPolicyServiceInterceptor(AutoscalingPolicyServiceRestInterceptor): + def pre_create_autoscaling_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_autoscaling_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_autoscaling_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_autoscaling_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_autoscaling_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_autoscaling_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_autoscaling_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_autoscaling_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_autoscaling_policy(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AutoscalingPolicyServiceRestTransport(interceptor=MyCustomAutoscalingPolicyServiceInterceptor()) + client = AutoscalingPolicyServiceClient(transport=transport) + + + """ + + def pre_create_autoscaling_policy( + self, + request: autoscaling_policies.CreateAutoscalingPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + autoscaling_policies.CreateAutoscalingPolicyRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_autoscaling_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutoscalingPolicyService server. + """ + return request, metadata + + def post_create_autoscaling_policy( + self, response: autoscaling_policies.AutoscalingPolicy + ) -> autoscaling_policies.AutoscalingPolicy: + """Post-rpc interceptor for create_autoscaling_policy + + Override in a subclass to manipulate the response + after it is returned by the AutoscalingPolicyService server but before + it is returned to user code. + """ + return response + + def pre_delete_autoscaling_policy( + self, + request: autoscaling_policies.DeleteAutoscalingPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + autoscaling_policies.DeleteAutoscalingPolicyRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_autoscaling_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutoscalingPolicyService server. + """ + return request, metadata + + def pre_get_autoscaling_policy( + self, + request: autoscaling_policies.GetAutoscalingPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + autoscaling_policies.GetAutoscalingPolicyRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_autoscaling_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutoscalingPolicyService server. + """ + return request, metadata + + def post_get_autoscaling_policy( + self, response: autoscaling_policies.AutoscalingPolicy + ) -> autoscaling_policies.AutoscalingPolicy: + """Post-rpc interceptor for get_autoscaling_policy + + Override in a subclass to manipulate the response + after it is returned by the AutoscalingPolicyService server but before + it is returned to user code. + """ + return response + + def pre_list_autoscaling_policies( + self, + request: autoscaling_policies.ListAutoscalingPoliciesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + autoscaling_policies.ListAutoscalingPoliciesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_autoscaling_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutoscalingPolicyService server. + """ + return request, metadata + + def post_list_autoscaling_policies( + self, response: autoscaling_policies.ListAutoscalingPoliciesResponse + ) -> autoscaling_policies.ListAutoscalingPoliciesResponse: + """Post-rpc interceptor for list_autoscaling_policies + + Override in a subclass to manipulate the response + after it is returned by the AutoscalingPolicyService server but before + it is returned to user code. + """ + return response + + def pre_update_autoscaling_policy( + self, + request: autoscaling_policies.UpdateAutoscalingPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + autoscaling_policies.UpdateAutoscalingPolicyRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_autoscaling_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutoscalingPolicyService server. + """ + return request, metadata + + def post_update_autoscaling_policy( + self, response: autoscaling_policies.AutoscalingPolicy + ) -> autoscaling_policies.AutoscalingPolicy: + """Post-rpc interceptor for update_autoscaling_policy + + Override in a subclass to manipulate the response + after it is returned by the AutoscalingPolicyService server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutoscalingPolicyService server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the AutoscalingPolicyService server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutoscalingPolicyService server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the AutoscalingPolicyService server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutoscalingPolicyService server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the AutoscalingPolicyService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutoscalingPolicyService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the AutoscalingPolicyService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutoscalingPolicyService server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the AutoscalingPolicyService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutoscalingPolicyService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the AutoscalingPolicyService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the AutoscalingPolicyService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the AutoscalingPolicyService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AutoscalingPolicyServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AutoscalingPolicyServiceRestInterceptor + + +class AutoscalingPolicyServiceRestTransport(AutoscalingPolicyServiceTransport): + """REST backend transport for AutoscalingPolicyService. + + The API interface for managing autoscaling policies in the + Dataproc API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AutoscalingPolicyServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AutoscalingPolicyServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateAutoscalingPolicy(AutoscalingPolicyServiceRestStub): + def __hash__(self): + return hash("CreateAutoscalingPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: autoscaling_policies.CreateAutoscalingPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autoscaling_policies.AutoscalingPolicy: + r"""Call the create autoscaling policy method over HTTP. + + Args: + request (~.autoscaling_policies.CreateAutoscalingPolicyRequest): + The request object. A request to create an autoscaling + policy. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.autoscaling_policies.AutoscalingPolicy: + Describes an autoscaling policy for + Dataproc cluster autoscaler. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/autoscalingPolicies", + "body": "policy", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/regions/*}/autoscalingPolicies", + "body": "policy", + }, + ] + request, metadata = self._interceptor.pre_create_autoscaling_policy( + request, metadata + ) + pb_request = autoscaling_policies.CreateAutoscalingPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = autoscaling_policies.AutoscalingPolicy() + pb_resp = autoscaling_policies.AutoscalingPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_autoscaling_policy(resp) + return resp + + class _DeleteAutoscalingPolicy(AutoscalingPolicyServiceRestStub): + def __hash__(self): + return hash("DeleteAutoscalingPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: autoscaling_policies.DeleteAutoscalingPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete autoscaling policy method over HTTP. + + Args: + request (~.autoscaling_policies.DeleteAutoscalingPolicyRequest): + The request object. A request to delete an autoscaling + policy. + Autoscaling policies in use by one or + more clusters will not be deleted. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/autoscalingPolicies/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/regions/*/autoscalingPolicies/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_autoscaling_policy( + request, metadata + ) + pb_request = autoscaling_policies.DeleteAutoscalingPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetAutoscalingPolicy(AutoscalingPolicyServiceRestStub): + def __hash__(self): + return hash("GetAutoscalingPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: autoscaling_policies.GetAutoscalingPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autoscaling_policies.AutoscalingPolicy: + r"""Call the get autoscaling policy method over HTTP. + + Args: + request (~.autoscaling_policies.GetAutoscalingPolicyRequest): + The request object. A request to fetch an autoscaling + policy. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.autoscaling_policies.AutoscalingPolicy: + Describes an autoscaling policy for + Dataproc cluster autoscaler. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/autoscalingPolicies/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/autoscalingPolicies/*}", + }, + ] + request, metadata = self._interceptor.pre_get_autoscaling_policy( + request, metadata + ) + pb_request = autoscaling_policies.GetAutoscalingPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = autoscaling_policies.AutoscalingPolicy() + pb_resp = autoscaling_policies.AutoscalingPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_autoscaling_policy(resp) + return resp + + class _ListAutoscalingPolicies(AutoscalingPolicyServiceRestStub): + def __hash__(self): + return hash("ListAutoscalingPolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: autoscaling_policies.ListAutoscalingPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autoscaling_policies.ListAutoscalingPoliciesResponse: + r"""Call the list autoscaling policies method over HTTP. + + Args: + request (~.autoscaling_policies.ListAutoscalingPoliciesRequest): + The request object. A request to list autoscaling + policies in a project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.autoscaling_policies.ListAutoscalingPoliciesResponse: + A response to a request to list + autoscaling policies in a project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/autoscalingPolicies", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/regions/*}/autoscalingPolicies", + }, + ] + request, metadata = self._interceptor.pre_list_autoscaling_policies( + request, metadata + ) + pb_request = autoscaling_policies.ListAutoscalingPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = autoscaling_policies.ListAutoscalingPoliciesResponse() + pb_resp = autoscaling_policies.ListAutoscalingPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_autoscaling_policies(resp) + return resp + + class _UpdateAutoscalingPolicy(AutoscalingPolicyServiceRestStub): + def __hash__(self): + return hash("UpdateAutoscalingPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: autoscaling_policies.UpdateAutoscalingPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> autoscaling_policies.AutoscalingPolicy: + r"""Call the update autoscaling policy method over HTTP. + + Args: + request (~.autoscaling_policies.UpdateAutoscalingPolicyRequest): + The request object. A request to update an autoscaling + policy. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.autoscaling_policies.AutoscalingPolicy: + Describes an autoscaling policy for + Dataproc cluster autoscaler. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}", + "body": "policy", + }, + { + "method": "put", + "uri": "/v1/{policy.name=projects/*/regions/*/autoscalingPolicies/*}", + "body": "policy", + }, + ] + request, metadata = self._interceptor.pre_update_autoscaling_policy( + request, metadata + ) + pb_request = autoscaling_policies.UpdateAutoscalingPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = autoscaling_policies.AutoscalingPolicy() + pb_resp = autoscaling_policies.AutoscalingPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_autoscaling_policy(resp) + return resp + + @property + def create_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.CreateAutoscalingPolicyRequest], + autoscaling_policies.AutoscalingPolicy, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAutoscalingPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.DeleteAutoscalingPolicyRequest], empty_pb2.Empty + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAutoscalingPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.GetAutoscalingPolicyRequest], + autoscaling_policies.AutoscalingPolicy, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAutoscalingPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_autoscaling_policies( + self, + ) -> Callable[ + [autoscaling_policies.ListAutoscalingPoliciesRequest], + autoscaling_policies.ListAutoscalingPoliciesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAutoscalingPolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_autoscaling_policy( + self, + ) -> Callable[ + [autoscaling_policies.UpdateAutoscalingPolicyRequest], + autoscaling_policies.AutoscalingPolicy, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAutoscalingPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(AutoscalingPolicyServiceRestStub): + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:getIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(AutoscalingPolicyServiceRestStub): + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:setIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(AutoscalingPolicyServiceRestStub): + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:testIamPermissions", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(AutoscalingPolicyServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/regions/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(AutoscalingPolicyServiceRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(AutoscalingPolicyServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(AutoscalingPolicyServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations}", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AutoscalingPolicyServiceRestTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/__init__.py new file mode 100644 index 000000000000..4eb32a036a9f --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import BatchControllerAsyncClient +from .client import BatchControllerClient + +__all__ = ( + "BatchControllerClient", + "BatchControllerAsyncClient", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py new file mode 100644 index 000000000000..54409569643d --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/async_client.py @@ -0,0 +1,1209 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dataproc_v1.services.batch_controller import pagers +from google.cloud.dataproc_v1.types import batches, operations, shared + +from .client import BatchControllerClient +from .transports.base import DEFAULT_CLIENT_INFO, BatchControllerTransport +from .transports.grpc_asyncio import BatchControllerGrpcAsyncIOTransport + + +class BatchControllerAsyncClient: + """The BatchController provides methods to manage batch + workloads. + """ + + _client: BatchControllerClient + + DEFAULT_ENDPOINT = BatchControllerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = BatchControllerClient.DEFAULT_MTLS_ENDPOINT + + batch_path = staticmethod(BatchControllerClient.batch_path) + parse_batch_path = staticmethod(BatchControllerClient.parse_batch_path) + service_path = staticmethod(BatchControllerClient.service_path) + parse_service_path = staticmethod(BatchControllerClient.parse_service_path) + common_billing_account_path = staticmethod( + BatchControllerClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + BatchControllerClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(BatchControllerClient.common_folder_path) + parse_common_folder_path = staticmethod( + BatchControllerClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + BatchControllerClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + BatchControllerClient.parse_common_organization_path + ) + common_project_path = staticmethod(BatchControllerClient.common_project_path) + parse_common_project_path = staticmethod( + BatchControllerClient.parse_common_project_path + ) + common_location_path = staticmethod(BatchControllerClient.common_location_path) + parse_common_location_path = staticmethod( + BatchControllerClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BatchControllerAsyncClient: The constructed client. + """ + return BatchControllerClient.from_service_account_info.__func__(BatchControllerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BatchControllerAsyncClient: The constructed client. + """ + return BatchControllerClient.from_service_account_file.__func__(BatchControllerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BatchControllerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> BatchControllerTransport: + """Returns the transport used by the client instance. + + Returns: + BatchControllerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(BatchControllerClient).get_transport_class, type(BatchControllerClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, BatchControllerTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the batch controller client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.BatchControllerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = BatchControllerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_batch( + self, + request: Optional[Union[batches.CreateBatchRequest, dict]] = None, + *, + parent: Optional[str] = None, + batch: Optional[batches.Batch] = None, + batch_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a batch workload that executes + asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_create_batch(): + # Create a client + client = dataproc_v1.BatchControllerAsyncClient() + + # Initialize request argument(s) + batch = dataproc_v1.Batch() + batch.pyspark_batch.main_python_file_uri = "main_python_file_uri_value" + + request = dataproc_v1.CreateBatchRequest( + parent="parent_value", + batch=batch, + ) + + # Make the request + operation = client.create_batch(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.CreateBatchRequest, dict]]): + The request object. A request to create a batch workload. + parent (:class:`str`): + Required. The parent resource where + this batch will be created. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + batch (:class:`google.cloud.dataproc_v1.types.Batch`): + Required. The batch to create. + This corresponds to the ``batch`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + batch_id (:class:`str`): + Optional. The ID to use for the batch, which will become + the final component of the batch's resource name. + + This value must be 4-63 characters. Valid characters are + ``/[a-z][0-9]-/``. + + This corresponds to the ``batch_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataproc_v1.types.Batch` A + representation of a batch workload in the service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, batch, batch_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = batches.CreateBatchRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if batch is not None: + request.batch = batch + if batch_id is not None: + request.batch_id = batch_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_batch, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + batches.Batch, + metadata_type=operations.BatchOperationMetadata, + ) + + # Done; return the response. + return response + + async def get_batch( + self, + request: Optional[Union[batches.GetBatchRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> batches.Batch: + r"""Gets the batch workload resource representation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_get_batch(): + # Create a client + client = dataproc_v1.BatchControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.GetBatchRequest( + name="name_value", + ) + + # Make the request + response = await client.get_batch(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.GetBatchRequest, dict]]): + The request object. A request to get the resource + representation for a batch workload. + name (:class:`str`): + Required. The fully qualified name of the batch to + retrieve in the format + "projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.Batch: + A representation of a batch workload + in the service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = batches.GetBatchRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_batch, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_batches( + self, + request: Optional[Union[batches.ListBatchesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBatchesAsyncPager: + r"""Lists batch workloads. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_list_batches(): + # Create a client + client = dataproc_v1.BatchControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.ListBatchesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_batches(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.ListBatchesRequest, dict]]): + The request object. A request to list batch workloads in + a project. + parent (:class:`str`): + Required. The parent, which owns this + collection of batches. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.services.batch_controller.pagers.ListBatchesAsyncPager: + A list of batch workloads. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = batches.ListBatchesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_batches, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBatchesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_batch( + self, + request: Optional[Union[batches.DeleteBatchRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the batch workload resource. If the batch is not in + terminal state, the delete fails and the response returns + ``FAILED_PRECONDITION``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_delete_batch(): + # Create a client + client = dataproc_v1.BatchControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.DeleteBatchRequest( + name="name_value", + ) + + # Make the request + await client.delete_batch(request=request) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.DeleteBatchRequest, dict]]): + The request object. A request to delete a batch workload. + name (:class:`str`): + Required. The fully qualified name of the batch to + retrieve in the format + "projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = batches.DeleteBatchRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_batch, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "BatchControllerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("BatchControllerAsyncClient",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py new file mode 100644 index 000000000000..249cb827f016 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py @@ -0,0 +1,1465 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dataproc_v1.services.batch_controller import pagers +from google.cloud.dataproc_v1.types import batches, operations, shared + +from .transports.base import DEFAULT_CLIENT_INFO, BatchControllerTransport +from .transports.grpc import BatchControllerGrpcTransport +from .transports.grpc_asyncio import BatchControllerGrpcAsyncIOTransport +from .transports.rest import BatchControllerRestTransport + + +class BatchControllerClientMeta(type): + """Metaclass for the BatchController client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[BatchControllerTransport]] + _transport_registry["grpc"] = BatchControllerGrpcTransport + _transport_registry["grpc_asyncio"] = BatchControllerGrpcAsyncIOTransport + _transport_registry["rest"] = BatchControllerRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[BatchControllerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class BatchControllerClient(metaclass=BatchControllerClientMeta): + """The BatchController provides methods to manage batch + workloads. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataproc.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BatchControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BatchControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> BatchControllerTransport: + """Returns the transport used by the client instance. + + Returns: + BatchControllerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def batch_path( + project: str, + location: str, + batch: str, + ) -> str: + """Returns a fully-qualified batch string.""" + return "projects/{project}/locations/{location}/batches/{batch}".format( + project=project, + location=location, + batch=batch, + ) + + @staticmethod + def parse_batch_path(path: str) -> Dict[str, str]: + """Parses a batch path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/batches/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def service_path( + project: str, + location: str, + service: str, + ) -> str: + """Returns a fully-qualified service string.""" + return "projects/{project}/locations/{location}/services/{service}".format( + project=project, + location=location, + service=service, + ) + + @staticmethod + def parse_service_path(path: str) -> Dict[str, str]: + """Parses a service path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, BatchControllerTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the batch controller client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, BatchControllerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, BatchControllerTransport): + # transport is a BatchControllerTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_batch( + self, + request: Optional[Union[batches.CreateBatchRequest, dict]] = None, + *, + parent: Optional[str] = None, + batch: Optional[batches.Batch] = None, + batch_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a batch workload that executes + asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_create_batch(): + # Create a client + client = dataproc_v1.BatchControllerClient() + + # Initialize request argument(s) + batch = dataproc_v1.Batch() + batch.pyspark_batch.main_python_file_uri = "main_python_file_uri_value" + + request = dataproc_v1.CreateBatchRequest( + parent="parent_value", + batch=batch, + ) + + # Make the request + operation = client.create_batch(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.CreateBatchRequest, dict]): + The request object. A request to create a batch workload. + parent (str): + Required. The parent resource where + this batch will be created. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + batch (google.cloud.dataproc_v1.types.Batch): + Required. The batch to create. + This corresponds to the ``batch`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + batch_id (str): + Optional. The ID to use for the batch, which will become + the final component of the batch's resource name. + + This value must be 4-63 characters. Valid characters are + ``/[a-z][0-9]-/``. + + This corresponds to the ``batch_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataproc_v1.types.Batch` A + representation of a batch workload in the service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, batch, batch_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a batches.CreateBatchRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, batches.CreateBatchRequest): + request = batches.CreateBatchRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if batch is not None: + request.batch = batch + if batch_id is not None: + request.batch_id = batch_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_batch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + batches.Batch, + metadata_type=operations.BatchOperationMetadata, + ) + + # Done; return the response. + return response + + def get_batch( + self, + request: Optional[Union[batches.GetBatchRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> batches.Batch: + r"""Gets the batch workload resource representation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_get_batch(): + # Create a client + client = dataproc_v1.BatchControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.GetBatchRequest( + name="name_value", + ) + + # Make the request + response = client.get_batch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.GetBatchRequest, dict]): + The request object. A request to get the resource + representation for a batch workload. + name (str): + Required. The fully qualified name of the batch to + retrieve in the format + "projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.Batch: + A representation of a batch workload + in the service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a batches.GetBatchRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, batches.GetBatchRequest): + request = batches.GetBatchRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_batch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_batches( + self, + request: Optional[Union[batches.ListBatchesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBatchesPager: + r"""Lists batch workloads. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_list_batches(): + # Create a client + client = dataproc_v1.BatchControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.ListBatchesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_batches(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.ListBatchesRequest, dict]): + The request object. A request to list batch workloads in + a project. + parent (str): + Required. The parent, which owns this + collection of batches. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.services.batch_controller.pagers.ListBatchesPager: + A list of batch workloads. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a batches.ListBatchesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, batches.ListBatchesRequest): + request = batches.ListBatchesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_batches] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBatchesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_batch( + self, + request: Optional[Union[batches.DeleteBatchRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the batch workload resource. If the batch is not in + terminal state, the delete fails and the response returns + ``FAILED_PRECONDITION``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_delete_batch(): + # Create a client + client = dataproc_v1.BatchControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.DeleteBatchRequest( + name="name_value", + ) + + # Make the request + client.delete_batch(request=request) + + Args: + request (Union[google.cloud.dataproc_v1.types.DeleteBatchRequest, dict]): + The request object. A request to delete a batch workload. + name (str): + Required. The fully qualified name of the batch to + retrieve in the format + "projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a batches.DeleteBatchRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, batches.DeleteBatchRequest): + request = batches.DeleteBatchRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_batch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "BatchControllerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("BatchControllerClient",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/pagers.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/pagers.py new file mode 100644 index 000000000000..f563fc9e1f75 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dataproc_v1.types import batches + + +class ListBatchesPager: + """A pager for iterating through ``list_batches`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataproc_v1.types.ListBatchesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``batches`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBatches`` requests and continue to iterate + through the ``batches`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataproc_v1.types.ListBatchesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., batches.ListBatchesResponse], + request: batches.ListBatchesRequest, + response: batches.ListBatchesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataproc_v1.types.ListBatchesRequest): + The initial request object. + response (google.cloud.dataproc_v1.types.ListBatchesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = batches.ListBatchesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[batches.ListBatchesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[batches.Batch]: + for page in self.pages: + yield from page.batches + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBatchesAsyncPager: + """A pager for iterating through ``list_batches`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataproc_v1.types.ListBatchesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``batches`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBatches`` requests and continue to iterate + through the ``batches`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataproc_v1.types.ListBatchesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[batches.ListBatchesResponse]], + request: batches.ListBatchesRequest, + response: batches.ListBatchesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataproc_v1.types.ListBatchesRequest): + The initial request object. + response (google.cloud.dataproc_v1.types.ListBatchesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = batches.ListBatchesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[batches.ListBatchesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[batches.Batch]: + async def async_generator(): + async for page in self.pages: + for response in page.batches: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/__init__.py new file mode 100644 index 000000000000..b9598f2c4b8a --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import BatchControllerTransport +from .grpc import BatchControllerGrpcTransport +from .grpc_asyncio import BatchControllerGrpcAsyncIOTransport +from .rest import BatchControllerRestInterceptor, BatchControllerRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[BatchControllerTransport]] +_transport_registry["grpc"] = BatchControllerGrpcTransport +_transport_registry["grpc_asyncio"] = BatchControllerGrpcAsyncIOTransport +_transport_registry["rest"] = BatchControllerRestTransport + +__all__ = ( + "BatchControllerTransport", + "BatchControllerGrpcTransport", + "BatchControllerGrpcAsyncIOTransport", + "BatchControllerRestTransport", + "BatchControllerRestInterceptor", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/base.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/base.py new file mode 100644 index 000000000000..3140199c7634 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/base.py @@ -0,0 +1,267 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version +from google.cloud.dataproc_v1.types import batches + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class BatchControllerTransport(abc.ABC): + """Abstract transport class for BatchController.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "dataproc.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_batch: gapic_v1.method.wrap_method( + self.create_batch, + default_timeout=None, + client_info=client_info, + ), + self.get_batch: gapic_v1.method.wrap_method( + self.get_batch, + default_timeout=None, + client_info=client_info, + ), + self.list_batches: gapic_v1.method.wrap_method( + self.list_batches, + default_timeout=None, + client_info=client_info, + ), + self.delete_batch: gapic_v1.method.wrap_method( + self.delete_batch, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_batch( + self, + ) -> Callable[ + [batches.CreateBatchRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_batch( + self, + ) -> Callable[ + [batches.GetBatchRequest], Union[batches.Batch, Awaitable[batches.Batch]] + ]: + raise NotImplementedError() + + @property + def list_batches( + self, + ) -> Callable[ + [batches.ListBatchesRequest], + Union[batches.ListBatchesResponse, Awaitable[batches.ListBatchesResponse]], + ]: + raise NotImplementedError() + + @property + def delete_batch( + self, + ) -> Callable[ + [batches.DeleteBatchRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("BatchControllerTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py new file mode 100644 index 000000000000..3cc3ed17d9e6 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py @@ -0,0 +1,512 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dataproc_v1.types import batches + +from .base import DEFAULT_CLIENT_INFO, BatchControllerTransport + + +class BatchControllerGrpcTransport(BatchControllerTransport): + """gRPC backend transport for BatchController. + + The BatchController provides methods to manage batch + workloads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_batch( + self, + ) -> Callable[[batches.CreateBatchRequest], operations_pb2.Operation]: + r"""Return a callable for the create batch method over gRPC. + + Creates a batch workload that executes + asynchronously. + + Returns: + Callable[[~.CreateBatchRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_batch" not in self._stubs: + self._stubs["create_batch"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.BatchController/CreateBatch", + request_serializer=batches.CreateBatchRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_batch"] + + @property + def get_batch(self) -> Callable[[batches.GetBatchRequest], batches.Batch]: + r"""Return a callable for the get batch method over gRPC. + + Gets the batch workload resource representation. + + Returns: + Callable[[~.GetBatchRequest], + ~.Batch]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_batch" not in self._stubs: + self._stubs["get_batch"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.BatchController/GetBatch", + request_serializer=batches.GetBatchRequest.serialize, + response_deserializer=batches.Batch.deserialize, + ) + return self._stubs["get_batch"] + + @property + def list_batches( + self, + ) -> Callable[[batches.ListBatchesRequest], batches.ListBatchesResponse]: + r"""Return a callable for the list batches method over gRPC. + + Lists batch workloads. + + Returns: + Callable[[~.ListBatchesRequest], + ~.ListBatchesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_batches" not in self._stubs: + self._stubs["list_batches"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.BatchController/ListBatches", + request_serializer=batches.ListBatchesRequest.serialize, + response_deserializer=batches.ListBatchesResponse.deserialize, + ) + return self._stubs["list_batches"] + + @property + def delete_batch(self) -> Callable[[batches.DeleteBatchRequest], empty_pb2.Empty]: + r"""Return a callable for the delete batch method over gRPC. + + Deletes the batch workload resource. If the batch is not in + terminal state, the delete fails and the response returns + ``FAILED_PRECONDITION``. + + Returns: + Callable[[~.DeleteBatchRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_batch" not in self._stubs: + self._stubs["delete_batch"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.BatchController/DeleteBatch", + request_serializer=batches.DeleteBatchRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_batch"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("BatchControllerGrpcTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py new file mode 100644 index 000000000000..3fed41d8cf92 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py @@ -0,0 +1,517 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataproc_v1.types import batches + +from .base import DEFAULT_CLIENT_INFO, BatchControllerTransport +from .grpc import BatchControllerGrpcTransport + + +class BatchControllerGrpcAsyncIOTransport(BatchControllerTransport): + """gRPC AsyncIO backend transport for BatchController. + + The BatchController provides methods to manage batch + workloads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_batch( + self, + ) -> Callable[[batches.CreateBatchRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create batch method over gRPC. + + Creates a batch workload that executes + asynchronously. + + Returns: + Callable[[~.CreateBatchRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_batch" not in self._stubs: + self._stubs["create_batch"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.BatchController/CreateBatch", + request_serializer=batches.CreateBatchRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_batch"] + + @property + def get_batch( + self, + ) -> Callable[[batches.GetBatchRequest], Awaitable[batches.Batch]]: + r"""Return a callable for the get batch method over gRPC. + + Gets the batch workload resource representation. + + Returns: + Callable[[~.GetBatchRequest], + Awaitable[~.Batch]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_batch" not in self._stubs: + self._stubs["get_batch"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.BatchController/GetBatch", + request_serializer=batches.GetBatchRequest.serialize, + response_deserializer=batches.Batch.deserialize, + ) + return self._stubs["get_batch"] + + @property + def list_batches( + self, + ) -> Callable[[batches.ListBatchesRequest], Awaitable[batches.ListBatchesResponse]]: + r"""Return a callable for the list batches method over gRPC. + + Lists batch workloads. + + Returns: + Callable[[~.ListBatchesRequest], + Awaitable[~.ListBatchesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_batches" not in self._stubs: + self._stubs["list_batches"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.BatchController/ListBatches", + request_serializer=batches.ListBatchesRequest.serialize, + response_deserializer=batches.ListBatchesResponse.deserialize, + ) + return self._stubs["list_batches"] + + @property + def delete_batch( + self, + ) -> Callable[[batches.DeleteBatchRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete batch method over gRPC. + + Deletes the batch workload resource. If the batch is not in + terminal state, the delete fails and the response returns + ``FAILED_PRECONDITION``. + + Returns: + Callable[[~.DeleteBatchRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_batch" not in self._stubs: + self._stubs["delete_batch"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.BatchController/DeleteBatch", + request_serializer=batches.DeleteBatchRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_batch"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("BatchControllerGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/rest.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/rest.py new file mode 100644 index 000000000000..5b227bafb4f0 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/rest.py @@ -0,0 +1,1462 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dataproc_v1.types import batches + +from .base import BatchControllerTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class BatchControllerRestInterceptor: + """Interceptor for BatchController. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the BatchControllerRestTransport. + + .. code-block:: python + class MyCustomBatchControllerInterceptor(BatchControllerRestInterceptor): + def pre_create_batch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_batch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_batch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_batch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_batch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_batches(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_batches(self, response): + logging.log(f"Received response: {response}") + return response + + transport = BatchControllerRestTransport(interceptor=MyCustomBatchControllerInterceptor()) + client = BatchControllerClient(transport=transport) + + + """ + + def pre_create_batch( + self, request: batches.CreateBatchRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[batches.CreateBatchRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_batch + + Override in a subclass to manipulate the request or metadata + before they are sent to the BatchController server. + """ + return request, metadata + + def post_create_batch( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_batch + + Override in a subclass to manipulate the response + after it is returned by the BatchController server but before + it is returned to user code. + """ + return response + + def pre_delete_batch( + self, request: batches.DeleteBatchRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[batches.DeleteBatchRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_batch + + Override in a subclass to manipulate the request or metadata + before they are sent to the BatchController server. + """ + return request, metadata + + def pre_get_batch( + self, request: batches.GetBatchRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[batches.GetBatchRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_batch + + Override in a subclass to manipulate the request or metadata + before they are sent to the BatchController server. + """ + return request, metadata + + def post_get_batch(self, response: batches.Batch) -> batches.Batch: + """Post-rpc interceptor for get_batch + + Override in a subclass to manipulate the response + after it is returned by the BatchController server but before + it is returned to user code. + """ + return response + + def pre_list_batches( + self, request: batches.ListBatchesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[batches.ListBatchesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_batches + + Override in a subclass to manipulate the request or metadata + before they are sent to the BatchController server. + """ + return request, metadata + + def post_list_batches( + self, response: batches.ListBatchesResponse + ) -> batches.ListBatchesResponse: + """Post-rpc interceptor for list_batches + + Override in a subclass to manipulate the response + after it is returned by the BatchController server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BatchController server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BatchController server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the BatchController server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the BatchController server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the BatchController server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the BatchController server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BatchController server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the BatchController server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BatchController server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the BatchController server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BatchController server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the BatchController server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BatchController server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the BatchController server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BatchControllerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BatchControllerRestInterceptor + + +class BatchControllerRestTransport(BatchControllerTransport): + """REST backend transport for BatchController. + + The BatchController provides methods to manage batch + workloads. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[BatchControllerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BatchControllerRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/regions/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations}", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateBatch(BatchControllerRestStub): + def __hash__(self): + return hash("CreateBatch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: batches.CreateBatchRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create batch method over HTTP. + + Args: + request (~.batches.CreateBatchRequest): + The request object. A request to create a batch workload. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/batches", + "body": "batch", + }, + ] + request, metadata = self._interceptor.pre_create_batch(request, metadata) + pb_request = batches.CreateBatchRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_batch(resp) + return resp + + class _DeleteBatch(BatchControllerRestStub): + def __hash__(self): + return hash("DeleteBatch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: batches.DeleteBatchRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete batch method over HTTP. + + Args: + request (~.batches.DeleteBatchRequest): + The request object. A request to delete a batch workload. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/batches/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_batch(request, metadata) + pb_request = batches.DeleteBatchRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetBatch(BatchControllerRestStub): + def __hash__(self): + return hash("GetBatch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: batches.GetBatchRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> batches.Batch: + r"""Call the get batch method over HTTP. + + Args: + request (~.batches.GetBatchRequest): + The request object. A request to get the resource + representation for a batch workload. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.batches.Batch: + A representation of a batch workload + in the service. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/batches/*}", + }, + ] + request, metadata = self._interceptor.pre_get_batch(request, metadata) + pb_request = batches.GetBatchRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = batches.Batch() + pb_resp = batches.Batch.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_batch(resp) + return resp + + class _ListBatches(BatchControllerRestStub): + def __hash__(self): + return hash("ListBatches") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: batches.ListBatchesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> batches.ListBatchesResponse: + r"""Call the list batches method over HTTP. + + Args: + request (~.batches.ListBatchesRequest): + The request object. A request to list batch workloads in + a project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.batches.ListBatchesResponse: + A list of batch workloads. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/batches", + }, + ] + request, metadata = self._interceptor.pre_list_batches(request, metadata) + pb_request = batches.ListBatchesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = batches.ListBatchesResponse() + pb_resp = batches.ListBatchesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_batches(resp) + return resp + + @property + def create_batch( + self, + ) -> Callable[[batches.CreateBatchRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBatch(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_batch(self) -> Callable[[batches.DeleteBatchRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBatch(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_batch(self) -> Callable[[batches.GetBatchRequest], batches.Batch]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBatch(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_batches( + self, + ) -> Callable[[batches.ListBatchesRequest], batches.ListBatchesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBatches(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(BatchControllerRestStub): + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:getIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(BatchControllerRestStub): + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:setIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(BatchControllerRestStub): + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:testIamPermissions", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(BatchControllerRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/regions/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(BatchControllerRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(BatchControllerRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(BatchControllerRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations}", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("BatchControllerRestTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/__init__.py new file mode 100644 index 000000000000..2b6ba887a8cb --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ClusterControllerAsyncClient +from .client import ClusterControllerClient + +__all__ = ( + "ClusterControllerClient", + "ClusterControllerAsyncClient", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py new file mode 100644 index 000000000000..67febfbc385a --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/async_client.py @@ -0,0 +1,2000 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dataproc_v1.services.cluster_controller import pagers +from google.cloud.dataproc_v1.types import clusters, operations + +from .client import ClusterControllerClient +from .transports.base import DEFAULT_CLIENT_INFO, ClusterControllerTransport +from .transports.grpc_asyncio import ClusterControllerGrpcAsyncIOTransport + + +class ClusterControllerAsyncClient: + """The ClusterControllerService provides methods to manage + clusters of Compute Engine instances. + """ + + _client: ClusterControllerClient + + DEFAULT_ENDPOINT = ClusterControllerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ClusterControllerClient.DEFAULT_MTLS_ENDPOINT + + cluster_path = staticmethod(ClusterControllerClient.cluster_path) + parse_cluster_path = staticmethod(ClusterControllerClient.parse_cluster_path) + node_group_path = staticmethod(ClusterControllerClient.node_group_path) + parse_node_group_path = staticmethod(ClusterControllerClient.parse_node_group_path) + service_path = staticmethod(ClusterControllerClient.service_path) + parse_service_path = staticmethod(ClusterControllerClient.parse_service_path) + common_billing_account_path = staticmethod( + ClusterControllerClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ClusterControllerClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ClusterControllerClient.common_folder_path) + parse_common_folder_path = staticmethod( + ClusterControllerClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ClusterControllerClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ClusterControllerClient.parse_common_organization_path + ) + common_project_path = staticmethod(ClusterControllerClient.common_project_path) + parse_common_project_path = staticmethod( + ClusterControllerClient.parse_common_project_path + ) + common_location_path = staticmethod(ClusterControllerClient.common_location_path) + parse_common_location_path = staticmethod( + ClusterControllerClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterControllerAsyncClient: The constructed client. + """ + return ClusterControllerClient.from_service_account_info.__func__(ClusterControllerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterControllerAsyncClient: The constructed client. + """ + return ClusterControllerClient.from_service_account_file.__func__(ClusterControllerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ClusterControllerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ClusterControllerTransport: + """Returns the transport used by the client instance. + + Returns: + ClusterControllerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ClusterControllerClient).get_transport_class, type(ClusterControllerClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ClusterControllerTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cluster controller client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ClusterControllerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ClusterControllerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_cluster( + self, + request: Optional[Union[clusters.CreateClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + cluster: Optional[clusters.Cluster] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_create_cluster(): + # Create a client + client = dataproc_v1.ClusterControllerAsyncClient() + + # Initialize request argument(s) + cluster = dataproc_v1.Cluster() + cluster.project_id = "project_id_value" + cluster.cluster_name = "cluster_name_value" + + request = dataproc_v1.CreateClusterRequest( + project_id="project_id_value", + region="region_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.CreateClusterRequest, dict]]): + The request object. A request to create a cluster. + project_id (:class:`str`): + Required. The ID of the Google Cloud + Platform project that the cluster + belongs to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (:class:`str`): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (:class:`google.cloud.dataproc_v1.types.Cluster`): + Required. The cluster to create. + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a Dataproc cluster + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, cluster]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clusters.CreateClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if cluster is not None: + request.cluster = cluster + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clusters.Cluster, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + + async def update_cluster( + self, + request: Optional[Union[clusters.UpdateClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + cluster_name: Optional[str] = None, + cluster: Optional[clusters.Cluster] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + The cluster must be in a + [``RUNNING``][google.cloud.dataproc.v1.ClusterStatus.State] + state or an error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_update_cluster(): + # Create a client + client = dataproc_v1.ClusterControllerAsyncClient() + + # Initialize request argument(s) + cluster = dataproc_v1.Cluster() + cluster.project_id = "project_id_value" + cluster.cluster_name = "cluster_name_value" + + request = dataproc_v1.UpdateClusterRequest( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.UpdateClusterRequest, dict]]): + The request object. A request to update a cluster. + project_id (:class:`str`): + Required. The ID of the Google Cloud + Platform project the cluster belongs to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (:class:`str`): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_name (:class:`str`): + Required. The cluster name. + This corresponds to the ``cluster_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (:class:`google.cloud.dataproc_v1.types.Cluster`): + Required. The changes to the cluster. + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Specifies the path, relative to ``Cluster``, + of the field to update. For example, to change the + number of workers in a cluster to 5, the ``update_mask`` + parameter would be specified as + ``config.worker_config.num_instances``, and the + ``PATCH`` request body would specify the new value, as + follows: + + :: + + { + "config":{ + "workerConfig":{ + "numInstances":"5" + } + } + } + + Similarly, to change the number of preemptible workers + in a cluster to 5, the ``update_mask`` parameter would + be ``config.secondary_worker_config.num_instances``, and + the ``PATCH`` request body would be set as follows: + + :: + + { + "config":{ + "secondaryWorkerConfig":{ + "numInstances":"5" + } + } + } + + Note: Currently, only the following fields can be + updated: + + .. raw:: html + + + + + + + + + + + + + + + + + + + + + + + +
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
config.autoscaling_config.policy_uriUse, stop using, or + change autoscaling policies
+ + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a Dataproc cluster + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project_id, region, cluster_name, cluster, update_mask] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clusters.UpdateClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if cluster_name is not None: + request.cluster_name = cluster_name + if cluster is not None: + request.cluster = cluster + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clusters.Cluster, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + + async def stop_cluster( + self, + request: Optional[Union[clusters.StopClusterRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Stops a cluster in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_stop_cluster(): + # Create a client + client = dataproc_v1.ClusterControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.StopClusterRequest( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Make the request + operation = client.stop_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.StopClusterRequest, dict]]): + The request object. A request to stop a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a Dataproc cluster + + """ + # Create or coerce a protobuf request object. + request = clusters.StopClusterRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.stop_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clusters.Cluster, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + + async def start_cluster( + self, + request: Optional[Union[clusters.StartClusterRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts a cluster in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_start_cluster(): + # Create a client + client = dataproc_v1.ClusterControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.StartClusterRequest( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Make the request + operation = client.start_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.StartClusterRequest, dict]]): + The request object. A request to start a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a Dataproc cluster + + """ + # Create or coerce a protobuf request object. + request = clusters.StartClusterRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.start_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clusters.Cluster, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_cluster( + self, + request: Optional[Union[clusters.DeleteClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + cluster_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_delete_cluster(): + # Create a client + client = dataproc_v1.ClusterControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.DeleteClusterRequest( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.DeleteClusterRequest, dict]]): + The request object. A request to delete a cluster. + project_id (:class:`str`): + Required. The ID of the Google Cloud + Platform project that the cluster + belongs to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (:class:`str`): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_name (:class:`str`): + Required. The cluster name. + This corresponds to the ``cluster_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, cluster_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clusters.DeleteClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if cluster_name is not None: + request.cluster_name = cluster_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + + async def get_cluster( + self, + request: Optional[Union[clusters.GetClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + cluster_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clusters.Cluster: + r"""Gets the resource representation for a cluster in a + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_get_cluster(): + # Create a client + client = dataproc_v1.ClusterControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.GetClusterRequest( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Make the request + response = await client.get_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.GetClusterRequest, dict]]): + The request object. Request to get the resource + representation for a cluster in a + project. + project_id (:class:`str`): + Required. The ID of the Google Cloud + Platform project that the cluster + belongs to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (:class:`str`): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_name (:class:`str`): + Required. The cluster name. + This corresponds to the ``cluster_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.Cluster: + Describes the identifying + information, config, and status of a + Dataproc cluster + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, cluster_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clusters.GetClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if cluster_name is not None: + request.cluster_name = cluster_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_clusters( + self, + request: Optional[Union[clusters.ListClustersRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListClustersAsyncPager: + r"""Lists all regions/{region}/clusters in a project + alphabetically. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_list_clusters(): + # Create a client + client = dataproc_v1.ClusterControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.ListClustersRequest( + project_id="project_id_value", + region="region_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.ListClustersRequest, dict]]): + The request object. A request to list the clusters in a + project. + project_id (:class:`str`): + Required. The ID of the Google Cloud + Platform project that the cluster + belongs to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (:class:`str`): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. A filter constraining the clusters to list. + Filters are case-sensitive and have the following + syntax: + + field = value [AND [field = value]] ... + + where **field** is one of ``status.state``, + ``clusterName``, or ``labels.[KEY]``, and ``[KEY]`` is a + label key. **value** can be ``*`` to match all values. + ``status.state`` can be one of the following: + ``ACTIVE``, ``INACTIVE``, ``CREATING``, ``RUNNING``, + ``ERROR``, ``DELETING``, or ``UPDATING``. ``ACTIVE`` + contains the ``CREATING``, ``UPDATING``, and ``RUNNING`` + states. ``INACTIVE`` contains the ``DELETING`` and + ``ERROR`` states. ``clusterName`` is the name of the + cluster provided at creation time. Only the logical + ``AND`` operator is supported; space-separated items are + treated as having an implicit ``AND`` operator. + + Example filter: + + status.state = ACTIVE AND clusterName = mycluster AND + labels.env = staging AND labels.starred = \* + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.services.cluster_controller.pagers.ListClustersAsyncPager: + The list of all clusters in a + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clusters.ListClustersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_clusters, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListClustersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def diagnose_cluster( + self, + request: Optional[Union[clusters.DiagnoseClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + cluster_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Gets cluster diagnostic information. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + After the operation completes, + [Operation.response][google.longrunning.Operation.response] + contains + `DiagnoseClusterResults `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_diagnose_cluster(): + # Create a client + client = dataproc_v1.ClusterControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.DiagnoseClusterRequest( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Make the request + operation = client.diagnose_cluster(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.DiagnoseClusterRequest, dict]]): + The request object. A request to collect cluster + diagnostic information. + project_id (:class:`str`): + Required. The ID of the Google Cloud + Platform project that the cluster + belongs to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (:class:`str`): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_name (:class:`str`): + Required. The cluster name. + This corresponds to the ``cluster_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataproc_v1.types.DiagnoseClusterResults` + The location of diagnostic output. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, cluster_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clusters.DiagnoseClusterRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if cluster_name is not None: + request.cluster_name = cluster_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.diagnose_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clusters.DiagnoseClusterResults, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ClusterControllerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ClusterControllerAsyncClient",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py new file mode 100644 index 000000000000..65d32cc35551 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py @@ -0,0 +1,2222 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dataproc_v1.services.cluster_controller import pagers +from google.cloud.dataproc_v1.types import clusters, operations + +from .transports.base import DEFAULT_CLIENT_INFO, ClusterControllerTransport +from .transports.grpc import ClusterControllerGrpcTransport +from .transports.grpc_asyncio import ClusterControllerGrpcAsyncIOTransport +from .transports.rest import ClusterControllerRestTransport + + +class ClusterControllerClientMeta(type): + """Metaclass for the ClusterController client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ClusterControllerTransport]] + _transport_registry["grpc"] = ClusterControllerGrpcTransport + _transport_registry["grpc_asyncio"] = ClusterControllerGrpcAsyncIOTransport + _transport_registry["rest"] = ClusterControllerRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ClusterControllerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ClusterControllerClient(metaclass=ClusterControllerClientMeta): + """The ClusterControllerService provides methods to manage + clusters of Compute Engine instances. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataproc.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ClusterControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ClusterControllerTransport: + """Returns the transport used by the client instance. + + Returns: + ClusterControllerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def cluster_path( + project: str, + location: str, + cluster: str, + ) -> str: + """Returns a fully-qualified cluster string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, + location=location, + cluster=cluster, + ) + + @staticmethod + def parse_cluster_path(path: str) -> Dict[str, str]: + """Parses a cluster path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def node_group_path( + project: str, + region: str, + cluster: str, + node_group: str, + ) -> str: + """Returns a fully-qualified node_group string.""" + return "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format( + project=project, + region=region, + cluster=cluster, + node_group=node_group, + ) + + @staticmethod + def parse_node_group_path(path: str) -> Dict[str, str]: + """Parses a node_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/clusters/(?P.+?)/nodeGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def service_path( + project: str, + location: str, + service: str, + ) -> str: + """Returns a fully-qualified service string.""" + return "projects/{project}/locations/{location}/services/{service}".format( + project=project, + location=location, + service=service, + ) + + @staticmethod + def parse_service_path(path: str) -> Dict[str, str]: + """Parses a service path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ClusterControllerTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cluster controller client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ClusterControllerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ClusterControllerTransport): + # transport is a ClusterControllerTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_cluster( + self, + request: Optional[Union[clusters.CreateClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + cluster: Optional[clusters.Cluster] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_create_cluster(): + # Create a client + client = dataproc_v1.ClusterControllerClient() + + # Initialize request argument(s) + cluster = dataproc_v1.Cluster() + cluster.project_id = "project_id_value" + cluster.cluster_name = "cluster_name_value" + + request = dataproc_v1.CreateClusterRequest( + project_id="project_id_value", + region="region_value", + cluster=cluster, + ) + + # Make the request + operation = client.create_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.CreateClusterRequest, dict]): + The request object. A request to create a cluster. + project_id (str): + Required. The ID of the Google Cloud + Platform project that the cluster + belongs to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (google.cloud.dataproc_v1.types.Cluster): + Required. The cluster to create. + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a Dataproc cluster + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, cluster]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clusters.CreateClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clusters.CreateClusterRequest): + request = clusters.CreateClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if cluster is not None: + request.cluster = cluster + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clusters.Cluster, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + + def update_cluster( + self, + request: Optional[Union[clusters.UpdateClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + cluster_name: Optional[str] = None, + cluster: Optional[clusters.Cluster] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + The cluster must be in a + [``RUNNING``][google.cloud.dataproc.v1.ClusterStatus.State] + state or an error is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_update_cluster(): + # Create a client + client = dataproc_v1.ClusterControllerClient() + + # Initialize request argument(s) + cluster = dataproc_v1.Cluster() + cluster.project_id = "project_id_value" + cluster.cluster_name = "cluster_name_value" + + request = dataproc_v1.UpdateClusterRequest( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + cluster=cluster, + ) + + # Make the request + operation = client.update_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.UpdateClusterRequest, dict]): + The request object. A request to update a cluster. + project_id (str): + Required. The ID of the Google Cloud + Platform project the cluster belongs to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_name (str): + Required. The cluster name. + This corresponds to the ``cluster_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster (google.cloud.dataproc_v1.types.Cluster): + Required. The changes to the cluster. + This corresponds to the ``cluster`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Specifies the path, relative to ``Cluster``, + of the field to update. For example, to change the + number of workers in a cluster to 5, the ``update_mask`` + parameter would be specified as + ``config.worker_config.num_instances``, and the + ``PATCH`` request body would specify the new value, as + follows: + + :: + + { + "config":{ + "workerConfig":{ + "numInstances":"5" + } + } + } + + Similarly, to change the number of preemptible workers + in a cluster to 5, the ``update_mask`` parameter would + be ``config.secondary_worker_config.num_instances``, and + the ``PATCH`` request body would be set as follows: + + :: + + { + "config":{ + "secondaryWorkerConfig":{ + "numInstances":"5" + } + } + } + + Note: Currently, only the following fields can be + updated: + + .. raw:: html + + + + + + + + + + + + + + + + + + + + + + + +
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
config.autoscaling_config.policy_uriUse, stop using, or + change autoscaling policies
+ + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a Dataproc cluster + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project_id, region, cluster_name, cluster, update_mask] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clusters.UpdateClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clusters.UpdateClusterRequest): + request = clusters.UpdateClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if cluster_name is not None: + request.cluster_name = cluster_name + if cluster is not None: + request.cluster = cluster + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clusters.Cluster, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + + def stop_cluster( + self, + request: Optional[Union[clusters.StopClusterRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Stops a cluster in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_stop_cluster(): + # Create a client + client = dataproc_v1.ClusterControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.StopClusterRequest( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Make the request + operation = client.stop_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.StopClusterRequest, dict]): + The request object. A request to stop a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a Dataproc cluster + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clusters.StopClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clusters.StopClusterRequest): + request = clusters.StopClusterRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clusters.Cluster, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + + def start_cluster( + self, + request: Optional[Union[clusters.StartClusterRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Starts a cluster in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_start_cluster(): + # Create a client + client = dataproc_v1.ClusterControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.StartClusterRequest( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Make the request + operation = client.start_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.StartClusterRequest, dict]): + The request object. A request to start a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a Dataproc cluster + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clusters.StartClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clusters.StartClusterRequest): + request = clusters.StartClusterRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clusters.Cluster, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + + def delete_cluster( + self, + request: Optional[Union[clusters.DeleteClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + cluster_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_delete_cluster(): + # Create a client + client = dataproc_v1.ClusterControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.DeleteClusterRequest( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Make the request + operation = client.delete_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.DeleteClusterRequest, dict]): + The request object. A request to delete a cluster. + project_id (str): + Required. The ID of the Google Cloud + Platform project that the cluster + belongs to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_name (str): + Required. The cluster name. + This corresponds to the ``cluster_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, cluster_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clusters.DeleteClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clusters.DeleteClusterRequest): + request = clusters.DeleteClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if cluster_name is not None: + request.cluster_name = cluster_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + + def get_cluster( + self, + request: Optional[Union[clusters.GetClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + cluster_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clusters.Cluster: + r"""Gets the resource representation for a cluster in a + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_get_cluster(): + # Create a client + client = dataproc_v1.ClusterControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.GetClusterRequest( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Make the request + response = client.get_cluster(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.GetClusterRequest, dict]): + The request object. Request to get the resource + representation for a cluster in a + project. + project_id (str): + Required. The ID of the Google Cloud + Platform project that the cluster + belongs to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_name (str): + Required. The cluster name. + This corresponds to the ``cluster_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.Cluster: + Describes the identifying + information, config, and status of a + Dataproc cluster + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, cluster_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clusters.GetClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clusters.GetClusterRequest): + request = clusters.GetClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if cluster_name is not None: + request.cluster_name = cluster_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_clusters( + self, + request: Optional[Union[clusters.ListClustersRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListClustersPager: + r"""Lists all regions/{region}/clusters in a project + alphabetically. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_list_clusters(): + # Create a client + client = dataproc_v1.ClusterControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.ListClustersRequest( + project_id="project_id_value", + region="region_value", + ) + + # Make the request + page_result = client.list_clusters(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.ListClustersRequest, dict]): + The request object. A request to list the clusters in a + project. + project_id (str): + Required. The ID of the Google Cloud + Platform project that the cluster + belongs to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + Optional. A filter constraining the clusters to list. + Filters are case-sensitive and have the following + syntax: + + field = value [AND [field = value]] ... + + where **field** is one of ``status.state``, + ``clusterName``, or ``labels.[KEY]``, and ``[KEY]`` is a + label key. **value** can be ``*`` to match all values. + ``status.state`` can be one of the following: + ``ACTIVE``, ``INACTIVE``, ``CREATING``, ``RUNNING``, + ``ERROR``, ``DELETING``, or ``UPDATING``. ``ACTIVE`` + contains the ``CREATING``, ``UPDATING``, and ``RUNNING`` + states. ``INACTIVE`` contains the ``DELETING`` and + ``ERROR`` states. ``clusterName`` is the name of the + cluster provided at creation time. Only the logical + ``AND`` operator is supported; space-separated items are + treated as having an implicit ``AND`` operator. + + Example filter: + + status.state = ACTIVE AND clusterName = mycluster AND + labels.env = staging AND labels.starred = \* + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.services.cluster_controller.pagers.ListClustersPager: + The list of all clusters in a + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clusters.ListClustersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clusters.ListClustersRequest): + request = clusters.ListClustersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_clusters] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListClustersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def diagnose_cluster( + self, + request: Optional[Union[clusters.DiagnoseClusterRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + cluster_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Gets cluster diagnostic information. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + After the operation completes, + [Operation.response][google.longrunning.Operation.response] + contains + `DiagnoseClusterResults `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_diagnose_cluster(): + # Create a client + client = dataproc_v1.ClusterControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.DiagnoseClusterRequest( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Make the request + operation = client.diagnose_cluster(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.DiagnoseClusterRequest, dict]): + The request object. A request to collect cluster + diagnostic information. + project_id (str): + Required. The ID of the Google Cloud + Platform project that the cluster + belongs to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + cluster_name (str): + Required. The cluster name. + This corresponds to the ``cluster_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataproc_v1.types.DiagnoseClusterResults` + The location of diagnostic output. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, cluster_name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clusters.DiagnoseClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clusters.DiagnoseClusterRequest): + request = clusters.DiagnoseClusterRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if cluster_name is not None: + request.cluster_name = cluster_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.diagnose_cluster] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("cluster_name", request.cluster_name), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clusters.DiagnoseClusterResults, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "ClusterControllerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ClusterControllerClient",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/pagers.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/pagers.py new file mode 100644 index 000000000000..018029c73b82 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dataproc_v1.types import clusters + + +class ListClustersPager: + """A pager for iterating through ``list_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataproc_v1.types.ListClustersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``clusters`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListClusters`` requests and continue to iterate + through the ``clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataproc_v1.types.ListClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., clusters.ListClustersResponse], + request: clusters.ListClustersRequest, + response: clusters.ListClustersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataproc_v1.types.ListClustersRequest): + The initial request object. + response (google.cloud.dataproc_v1.types.ListClustersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clusters.ListClustersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clusters.ListClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[clusters.Cluster]: + for page in self.pages: + yield from page.clusters + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListClustersAsyncPager: + """A pager for iterating through ``list_clusters`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataproc_v1.types.ListClustersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``clusters`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListClusters`` requests and continue to iterate + through the ``clusters`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataproc_v1.types.ListClustersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[clusters.ListClustersResponse]], + request: clusters.ListClustersRequest, + response: clusters.ListClustersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataproc_v1.types.ListClustersRequest): + The initial request object. + response (google.cloud.dataproc_v1.types.ListClustersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clusters.ListClustersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clusters.ListClustersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[clusters.Cluster]: + async def async_generator(): + async for page in self.pages: + for response in page.clusters: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/__init__.py new file mode 100644 index 000000000000..33bd31ad98c6 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ClusterControllerTransport +from .grpc import ClusterControllerGrpcTransport +from .grpc_asyncio import ClusterControllerGrpcAsyncIOTransport +from .rest import ClusterControllerRestInterceptor, ClusterControllerRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ClusterControllerTransport]] +_transport_registry["grpc"] = ClusterControllerGrpcTransport +_transport_registry["grpc_asyncio"] = ClusterControllerGrpcAsyncIOTransport +_transport_registry["rest"] = ClusterControllerRestTransport + +__all__ = ( + "ClusterControllerTransport", + "ClusterControllerGrpcTransport", + "ClusterControllerGrpcAsyncIOTransport", + "ClusterControllerRestTransport", + "ClusterControllerRestInterceptor", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py new file mode 100644 index 000000000000..a3edb089782b --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py @@ -0,0 +1,382 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version +from google.cloud.dataproc_v1.types import clusters + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ClusterControllerTransport(abc.ABC): + """Abstract transport class for ClusterController.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "dataproc.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_cluster: gapic_v1.method.wrap_method( + self.create_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.update_cluster: gapic_v1.method.wrap_method( + self.update_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.stop_cluster: gapic_v1.method.wrap_method( + self.stop_cluster, + default_timeout=None, + client_info=client_info, + ), + self.start_cluster: gapic_v1.method.wrap_method( + self.start_cluster, + default_timeout=None, + client_info=client_info, + ), + self.delete_cluster: gapic_v1.method.wrap_method( + self.delete_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_cluster: gapic_v1.method.wrap_method( + self.get_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_clusters: gapic_v1.method.wrap_method( + self.list_clusters, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.diagnose_cluster: gapic_v1.method.wrap_method( + self.diagnose_cluster, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_cluster( + self, + ) -> Callable[ + [clusters.CreateClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_cluster( + self, + ) -> Callable[ + [clusters.UpdateClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def stop_cluster( + self, + ) -> Callable[ + [clusters.StopClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def start_cluster( + self, + ) -> Callable[ + [clusters.StartClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_cluster( + self, + ) -> Callable[ + [clusters.DeleteClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_cluster( + self, + ) -> Callable[ + [clusters.GetClusterRequest], + Union[clusters.Cluster, Awaitable[clusters.Cluster]], + ]: + raise NotImplementedError() + + @property + def list_clusters( + self, + ) -> Callable[ + [clusters.ListClustersRequest], + Union[clusters.ListClustersResponse, Awaitable[clusters.ListClustersResponse]], + ]: + raise NotImplementedError() + + @property + def diagnose_cluster( + self, + ) -> Callable[ + [clusters.DiagnoseClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ClusterControllerTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py new file mode 100644 index 000000000000..3337f8fbac34 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py @@ -0,0 +1,635 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dataproc_v1.types import clusters + +from .base import DEFAULT_CLIENT_INFO, ClusterControllerTransport + + +class ClusterControllerGrpcTransport(ClusterControllerTransport): + """gRPC backend transport for ClusterController. + + The ClusterControllerService provides methods to manage + clusters of Compute Engine instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_cluster( + self, + ) -> Callable[[clusters.CreateClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the create cluster method over gRPC. + + Creates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + + Returns: + Callable[[~.CreateClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cluster" not in self._stubs: + self._stubs["create_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/CreateCluster", + request_serializer=clusters.CreateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_cluster"] + + @property + def update_cluster( + self, + ) -> Callable[[clusters.UpdateClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the update cluster method over gRPC. + + Updates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + The cluster must be in a + [``RUNNING``][google.cloud.dataproc.v1.ClusterStatus.State] + state or an error is returned. + + Returns: + Callable[[~.UpdateClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cluster" not in self._stubs: + self._stubs["update_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/UpdateCluster", + request_serializer=clusters.UpdateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_cluster"] + + @property + def stop_cluster( + self, + ) -> Callable[[clusters.StopClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the stop cluster method over gRPC. + + Stops a cluster in a project. + + Returns: + Callable[[~.StopClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_cluster" not in self._stubs: + self._stubs["stop_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/StopCluster", + request_serializer=clusters.StopClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["stop_cluster"] + + @property + def start_cluster( + self, + ) -> Callable[[clusters.StartClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the start cluster method over gRPC. + + Starts a cluster in a project. + + Returns: + Callable[[~.StartClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_cluster" not in self._stubs: + self._stubs["start_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/StartCluster", + request_serializer=clusters.StartClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["start_cluster"] + + @property + def delete_cluster( + self, + ) -> Callable[[clusters.DeleteClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the delete cluster method over gRPC. + + Deletes a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + + Returns: + Callable[[~.DeleteClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cluster" not in self._stubs: + self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/DeleteCluster", + request_serializer=clusters.DeleteClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_cluster"] + + @property + def get_cluster(self) -> Callable[[clusters.GetClusterRequest], clusters.Cluster]: + r"""Return a callable for the get cluster method over gRPC. + + Gets the resource representation for a cluster in a + project. + + Returns: + Callable[[~.GetClusterRequest], + ~.Cluster]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster" not in self._stubs: + self._stubs["get_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/GetCluster", + request_serializer=clusters.GetClusterRequest.serialize, + response_deserializer=clusters.Cluster.deserialize, + ) + return self._stubs["get_cluster"] + + @property + def list_clusters( + self, + ) -> Callable[[clusters.ListClustersRequest], clusters.ListClustersResponse]: + r"""Return a callable for the list clusters method over gRPC. + + Lists all regions/{region}/clusters in a project + alphabetically. + + Returns: + Callable[[~.ListClustersRequest], + ~.ListClustersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_clusters" not in self._stubs: + self._stubs["list_clusters"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/ListClusters", + request_serializer=clusters.ListClustersRequest.serialize, + response_deserializer=clusters.ListClustersResponse.deserialize, + ) + return self._stubs["list_clusters"] + + @property + def diagnose_cluster( + self, + ) -> Callable[[clusters.DiagnoseClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the diagnose cluster method over gRPC. + + Gets cluster diagnostic information. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + After the operation completes, + [Operation.response][google.longrunning.Operation.response] + contains + `DiagnoseClusterResults `__. + + Returns: + Callable[[~.DiagnoseClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "diagnose_cluster" not in self._stubs: + self._stubs["diagnose_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/DiagnoseCluster", + request_serializer=clusters.DiagnoseClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["diagnose_cluster"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ClusterControllerGrpcTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py new file mode 100644 index 000000000000..b2fda9a2b687 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py @@ -0,0 +1,642 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataproc_v1.types import clusters + +from .base import DEFAULT_CLIENT_INFO, ClusterControllerTransport +from .grpc import ClusterControllerGrpcTransport + + +class ClusterControllerGrpcAsyncIOTransport(ClusterControllerTransport): + """gRPC AsyncIO backend transport for ClusterController. + + The ClusterControllerService provides methods to manage + clusters of Compute Engine instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_cluster( + self, + ) -> Callable[[clusters.CreateClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create cluster method over gRPC. + + Creates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + + Returns: + Callable[[~.CreateClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cluster" not in self._stubs: + self._stubs["create_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/CreateCluster", + request_serializer=clusters.CreateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_cluster"] + + @property + def update_cluster( + self, + ) -> Callable[[clusters.UpdateClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update cluster method over gRPC. + + Updates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + The cluster must be in a + [``RUNNING``][google.cloud.dataproc.v1.ClusterStatus.State] + state or an error is returned. + + Returns: + Callable[[~.UpdateClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cluster" not in self._stubs: + self._stubs["update_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/UpdateCluster", + request_serializer=clusters.UpdateClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_cluster"] + + @property + def stop_cluster( + self, + ) -> Callable[[clusters.StopClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the stop cluster method over gRPC. + + Stops a cluster in a project. + + Returns: + Callable[[~.StopClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_cluster" not in self._stubs: + self._stubs["stop_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/StopCluster", + request_serializer=clusters.StopClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["stop_cluster"] + + @property + def start_cluster( + self, + ) -> Callable[[clusters.StartClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the start cluster method over gRPC. + + Starts a cluster in a project. + + Returns: + Callable[[~.StartClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_cluster" not in self._stubs: + self._stubs["start_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/StartCluster", + request_serializer=clusters.StartClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["start_cluster"] + + @property + def delete_cluster( + self, + ) -> Callable[[clusters.DeleteClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete cluster method over gRPC. + + Deletes a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + + Returns: + Callable[[~.DeleteClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cluster" not in self._stubs: + self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/DeleteCluster", + request_serializer=clusters.DeleteClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_cluster"] + + @property + def get_cluster( + self, + ) -> Callable[[clusters.GetClusterRequest], Awaitable[clusters.Cluster]]: + r"""Return a callable for the get cluster method over gRPC. + + Gets the resource representation for a cluster in a + project. + + Returns: + Callable[[~.GetClusterRequest], + Awaitable[~.Cluster]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cluster" not in self._stubs: + self._stubs["get_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/GetCluster", + request_serializer=clusters.GetClusterRequest.serialize, + response_deserializer=clusters.Cluster.deserialize, + ) + return self._stubs["get_cluster"] + + @property + def list_clusters( + self, + ) -> Callable[ + [clusters.ListClustersRequest], Awaitable[clusters.ListClustersResponse] + ]: + r"""Return a callable for the list clusters method over gRPC. + + Lists all regions/{region}/clusters in a project + alphabetically. + + Returns: + Callable[[~.ListClustersRequest], + Awaitable[~.ListClustersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_clusters" not in self._stubs: + self._stubs["list_clusters"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/ListClusters", + request_serializer=clusters.ListClustersRequest.serialize, + response_deserializer=clusters.ListClustersResponse.deserialize, + ) + return self._stubs["list_clusters"] + + @property + def diagnose_cluster( + self, + ) -> Callable[ + [clusters.DiagnoseClusterRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the diagnose cluster method over gRPC. + + Gets cluster diagnostic information. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will + be + `ClusterOperationMetadata `__. + After the operation completes, + [Operation.response][google.longrunning.Operation.response] + contains + `DiagnoseClusterResults `__. + + Returns: + Callable[[~.DiagnoseClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "diagnose_cluster" not in self._stubs: + self._stubs["diagnose_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/DiagnoseCluster", + request_serializer=clusters.DiagnoseClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["diagnose_cluster"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("ClusterControllerGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/rest.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/rest.py new file mode 100644 index 000000000000..76907687aa07 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/rest.py @@ -0,0 +1,2040 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.dataproc_v1.types import clusters + +from .base import ClusterControllerTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ClusterControllerRestInterceptor: + """Interceptor for ClusterController. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ClusterControllerRestTransport. + + .. code-block:: python + class MyCustomClusterControllerInterceptor(ClusterControllerRestInterceptor): + def pre_create_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_diagnose_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_diagnose_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_clusters(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_clusters(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_start_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_start_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_stop_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stop_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_cluster(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_cluster(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ClusterControllerRestTransport(interceptor=MyCustomClusterControllerInterceptor()) + client = ClusterControllerClient(transport=transport) + + + """ + + def pre_create_cluster( + self, + request: clusters.CreateClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[clusters.CreateClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_create_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_cluster + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + def pre_delete_cluster( + self, + request: clusters.DeleteClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[clusters.DeleteClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_delete_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_cluster + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + def pre_diagnose_cluster( + self, + request: clusters.DiagnoseClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[clusters.DiagnoseClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for diagnose_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_diagnose_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for diagnose_cluster + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + def pre_get_cluster( + self, request: clusters.GetClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[clusters.GetClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_get_cluster(self, response: clusters.Cluster) -> clusters.Cluster: + """Post-rpc interceptor for get_cluster + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + def pre_list_clusters( + self, request: clusters.ListClustersRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[clusters.ListClustersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_clusters + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_list_clusters( + self, response: clusters.ListClustersResponse + ) -> clusters.ListClustersResponse: + """Post-rpc interceptor for list_clusters + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + def pre_start_cluster( + self, request: clusters.StartClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[clusters.StartClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for start_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_start_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for start_cluster + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + def pre_stop_cluster( + self, request: clusters.StopClusterRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[clusters.StopClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stop_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_stop_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for stop_cluster + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + def pre_update_cluster( + self, + request: clusters.UpdateClusterRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[clusters.UpdateClusterRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_cluster + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_update_cluster( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_cluster + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ClusterController server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ClusterController server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ClusterControllerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ClusterControllerRestInterceptor + + +class ClusterControllerRestTransport(ClusterControllerTransport): + """REST backend transport for ClusterController. + + The ClusterControllerService provides methods to manage + clusters of Compute Engine instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ClusterControllerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ClusterControllerRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/regions/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations}", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateCluster(ClusterControllerRestStub): + def __hash__(self): + return hash("CreateCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: clusters.CreateClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create cluster method over HTTP. + + Args: + request (~.clusters.CreateClusterRequest): + The request object. A request to create a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/regions/{region}/clusters", + "body": "cluster", + }, + ] + request, metadata = self._interceptor.pre_create_cluster(request, metadata) + pb_request = clusters.CreateClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_cluster(resp) + return resp + + class _DeleteCluster(ClusterControllerRestStub): + def __hash__(self): + return hash("DeleteCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: clusters.DeleteClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete cluster method over HTTP. + + Args: + request (~.clusters.DeleteClusterRequest): + The request object. A request to delete a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}", + }, + ] + request, metadata = self._interceptor.pre_delete_cluster(request, metadata) + pb_request = clusters.DeleteClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_cluster(resp) + return resp + + class _DiagnoseCluster(ClusterControllerRestStub): + def __hash__(self): + return hash("DiagnoseCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: clusters.DiagnoseClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the diagnose cluster method over HTTP. + + Args: + request (~.clusters.DiagnoseClusterRequest): + The request object. A request to collect cluster + diagnostic information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_diagnose_cluster( + request, metadata + ) + pb_request = clusters.DiagnoseClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_diagnose_cluster(resp) + return resp + + class _GetCluster(ClusterControllerRestStub): + def __hash__(self): + return hash("GetCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: clusters.GetClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clusters.Cluster: + r"""Call the get cluster method over HTTP. + + Args: + request (~.clusters.GetClusterRequest): + The request object. Request to get the resource + representation for a cluster in a + project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.clusters.Cluster: + Describes the identifying + information, config, and status of a + Dataproc cluster + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}", + }, + ] + request, metadata = self._interceptor.pre_get_cluster(request, metadata) + pb_request = clusters.GetClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = clusters.Cluster() + pb_resp = clusters.Cluster.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cluster(resp) + return resp + + class _ListClusters(ClusterControllerRestStub): + def __hash__(self): + return hash("ListClusters") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: clusters.ListClustersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clusters.ListClustersResponse: + r"""Call the list clusters method over HTTP. + + Args: + request (~.clusters.ListClustersRequest): + The request object. A request to list the clusters in a + project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.clusters.ListClustersResponse: + The list of all clusters in a + project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/projects/{project_id}/regions/{region}/clusters", + }, + ] + request, metadata = self._interceptor.pre_list_clusters(request, metadata) + pb_request = clusters.ListClustersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = clusters.ListClustersResponse() + pb_resp = clusters.ListClustersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_clusters(resp) + return resp + + class _StartCluster(ClusterControllerRestStub): + def __hash__(self): + return hash("StartCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: clusters.StartClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the start cluster method over HTTP. + + Args: + request (~.clusters.StartClusterRequest): + The request object. A request to start a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:start", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_start_cluster(request, metadata) + pb_request = clusters.StartClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_start_cluster(resp) + return resp + + class _StopCluster(ClusterControllerRestStub): + def __hash__(self): + return hash("StopCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: clusters.StopClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the stop cluster method over HTTP. + + Args: + request (~.clusters.StopClusterRequest): + The request object. A request to stop a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:stop", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_stop_cluster(request, metadata) + pb_request = clusters.StopClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_stop_cluster(resp) + return resp + + class _UpdateCluster(ClusterControllerRestStub): + def __hash__(self): + return hash("UpdateCluster") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: clusters.UpdateClusterRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update cluster method over HTTP. + + Args: + request (~.clusters.UpdateClusterRequest): + The request object. A request to update a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}", + "body": "cluster", + }, + ] + request, metadata = self._interceptor.pre_update_cluster(request, metadata) + pb_request = clusters.UpdateClusterRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_cluster(resp) + return resp + + @property + def create_cluster( + self, + ) -> Callable[[clusters.CreateClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_cluster( + self, + ) -> Callable[[clusters.DeleteClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def diagnose_cluster( + self, + ) -> Callable[[clusters.DiagnoseClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DiagnoseCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cluster(self) -> Callable[[clusters.GetClusterRequest], clusters.Cluster]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_clusters( + self, + ) -> Callable[[clusters.ListClustersRequest], clusters.ListClustersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListClusters(self._session, self._host, self._interceptor) # type: ignore + + @property + def start_cluster( + self, + ) -> Callable[[clusters.StartClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StartCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def stop_cluster( + self, + ) -> Callable[[clusters.StopClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StopCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_cluster( + self, + ) -> Callable[[clusters.UpdateClusterRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCluster(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(ClusterControllerRestStub): + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:getIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(ClusterControllerRestStub): + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:setIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(ClusterControllerRestStub): + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:testIamPermissions", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(ClusterControllerRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/regions/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(ClusterControllerRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(ClusterControllerRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(ClusterControllerRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations}", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ClusterControllerRestTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/__init__.py new file mode 100644 index 000000000000..a7fd34f4c71b --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import JobControllerAsyncClient +from .client import JobControllerClient + +__all__ = ( + "JobControllerClient", + "JobControllerAsyncClient", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py new file mode 100644 index 000000000000..f0ac2b1e0c0c --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/async_client.py @@ -0,0 +1,1715 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.dataproc_v1.services.job_controller import pagers +from google.cloud.dataproc_v1.types import jobs + +from .client import JobControllerClient +from .transports.base import DEFAULT_CLIENT_INFO, JobControllerTransport +from .transports.grpc_asyncio import JobControllerGrpcAsyncIOTransport + + +class JobControllerAsyncClient: + """The JobController provides methods to manage jobs.""" + + _client: JobControllerClient + + DEFAULT_ENDPOINT = JobControllerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = JobControllerClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + JobControllerClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + JobControllerClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(JobControllerClient.common_folder_path) + parse_common_folder_path = staticmethod( + JobControllerClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + JobControllerClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + JobControllerClient.parse_common_organization_path + ) + common_project_path = staticmethod(JobControllerClient.common_project_path) + parse_common_project_path = staticmethod( + JobControllerClient.parse_common_project_path + ) + common_location_path = staticmethod(JobControllerClient.common_location_path) + parse_common_location_path = staticmethod( + JobControllerClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobControllerAsyncClient: The constructed client. + """ + return JobControllerClient.from_service_account_info.__func__(JobControllerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobControllerAsyncClient: The constructed client. + """ + return JobControllerClient.from_service_account_file.__func__(JobControllerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return JobControllerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> JobControllerTransport: + """Returns the transport used by the client instance. + + Returns: + JobControllerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(JobControllerClient).get_transport_class, type(JobControllerClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, JobControllerTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the job controller client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.JobControllerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = JobControllerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def submit_job( + self, + request: Optional[Union[jobs.SubmitJobRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + job: Optional[jobs.Job] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Submits a job to a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_submit_job(): + # Create a client + client = dataproc_v1.JobControllerAsyncClient() + + # Initialize request argument(s) + job = dataproc_v1.Job() + job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" + job.placement.cluster_name = "cluster_name_value" + + request = dataproc_v1.SubmitJobRequest( + project_id="project_id_value", + region="region_value", + job=job, + ) + + # Make the request + response = await client.submit_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.SubmitJobRequest, dict]]): + The request object. A request to submit a job. + project_id (:class:`str`): + Required. The ID of the Google Cloud + Platform project that the job belongs + to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (:class:`str`): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (:class:`google.cloud.dataproc_v1.types.Job`): + Required. The job resource. + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.Job: + A Dataproc job resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, job]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = jobs.SubmitJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.submit_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def submit_job_as_operation( + self, + request: Optional[Union[jobs.SubmitJobRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + job: Optional[jobs.Job] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Submits job to a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_submit_job_as_operation(): + # Create a client + client = dataproc_v1.JobControllerAsyncClient() + + # Initialize request argument(s) + job = dataproc_v1.Job() + job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" + job.placement.cluster_name = "cluster_name_value" + + request = dataproc_v1.SubmitJobRequest( + project_id="project_id_value", + region="region_value", + job=job, + ) + + # Make the request + operation = client.submit_job_as_operation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.SubmitJobRequest, dict]]): + The request object. A request to submit a job. + project_id (:class:`str`): + Required. The ID of the Google Cloud + Platform project that the job belongs + to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (:class:`str`): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (:class:`google.cloud.dataproc_v1.types.Job`): + Required. The job resource. + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataproc_v1.types.Job` A Dataproc + job resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, job]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = jobs.SubmitJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.submit_job_as_operation, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + jobs.Job, + metadata_type=jobs.JobMetadata, + ) + + # Done; return the response. + return response + + async def get_job( + self, + request: Optional[Union[jobs.GetJobRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Gets the resource representation for a job in a + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_get_job(): + # Create a client + client = dataproc_v1.JobControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.GetJobRequest( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + # Make the request + response = await client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.GetJobRequest, dict]]): + The request object. A request to get the resource + representation for a job in a project. + project_id (:class:`str`): + Required. The ID of the Google Cloud + Platform project that the job belongs + to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (:class:`str`): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_id (:class:`str`): + Required. The job ID. + This corresponds to the ``job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.Job: + A Dataproc job resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, job_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = jobs.GetJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if job_id is not None: + request.job_id = job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_jobs( + self, + request: Optional[Union[jobs.ListJobsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsAsyncPager: + r"""Lists regions/{region}/jobs in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_list_jobs(): + # Create a client + client = dataproc_v1.JobControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.ListJobsRequest( + project_id="project_id_value", + region="region_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.ListJobsRequest, dict]]): + The request object. A request to list jobs in a project. + project_id (:class:`str`): + Required. The ID of the Google Cloud + Platform project that the job belongs + to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (:class:`str`): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. A filter constraining the jobs to list. + Filters are case-sensitive and have the following + syntax: + + [field = value] AND [field [= value]] ... + + where **field** is ``status.state`` or ``labels.[KEY]``, + and ``[KEY]`` is a label key. **value** can be ``*`` to + match all values. ``status.state`` can be either + ``ACTIVE`` or ``NON_ACTIVE``. Only the logical ``AND`` + operator is supported; space-separated items are treated + as having an implicit ``AND`` operator. + + Example filter: + + status.state = ACTIVE AND labels.env = staging AND + labels.starred = \* + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.services.job_controller.pagers.ListJobsAsyncPager: + A list of jobs in a project. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = jobs.ListJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_jobs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_job( + self, + request: Optional[Union[jobs.UpdateJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Updates a job in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_update_job(): + # Create a client + client = dataproc_v1.JobControllerAsyncClient() + + # Initialize request argument(s) + job = dataproc_v1.Job() + job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" + job.placement.cluster_name = "cluster_name_value" + + request = dataproc_v1.UpdateJobRequest( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + job=job, + ) + + # Make the request + response = await client.update_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.UpdateJobRequest, dict]]): + The request object. A request to update a job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.Job: + A Dataproc job resource. + """ + # Create or coerce a protobuf request object. + request = jobs.UpdateJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_job( + self, + request: Optional[Union[jobs.CancelJobRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Starts a job cancellation request. To access the job resource + after cancellation, call + `regions/{region}/jobs.list `__ + or + `regions/{region}/jobs.get `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_cancel_job(): + # Create a client + client = dataproc_v1.JobControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.CancelJobRequest( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + # Make the request + response = await client.cancel_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.CancelJobRequest, dict]]): + The request object. A request to cancel a job. + project_id (:class:`str`): + Required. The ID of the Google Cloud + Platform project that the job belongs + to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (:class:`str`): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_id (:class:`str`): + Required. The job ID. + This corresponds to the ``job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.Job: + A Dataproc job resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, job_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = jobs.CancelJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if job_id is not None: + request.job_id = job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job( + self, + request: Optional[Union[jobs.DeleteJobRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the job from the project. If the job is active, the + delete fails, and the response returns ``FAILED_PRECONDITION``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_delete_job(): + # Create a client + client = dataproc_v1.JobControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.DeleteJobRequest( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + # Make the request + await client.delete_job(request=request) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.DeleteJobRequest, dict]]): + The request object. A request to delete a job. + project_id (:class:`str`): + Required. The ID of the Google Cloud + Platform project that the job belongs + to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (:class:`str`): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_id (:class:`str`): + Required. The job ID. + This corresponds to the ``job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, job_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = jobs.DeleteJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if job_id is not None: + request.job_id = job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "JobControllerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("JobControllerAsyncClient",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py new file mode 100644 index 000000000000..325c6d4eb378 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py @@ -0,0 +1,1861 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.dataproc_v1.services.job_controller import pagers +from google.cloud.dataproc_v1.types import jobs + +from .transports.base import DEFAULT_CLIENT_INFO, JobControllerTransport +from .transports.grpc import JobControllerGrpcTransport +from .transports.grpc_asyncio import JobControllerGrpcAsyncIOTransport +from .transports.rest import JobControllerRestTransport + + +class JobControllerClientMeta(type): + """Metaclass for the JobController client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[JobControllerTransport]] + _transport_registry["grpc"] = JobControllerGrpcTransport + _transport_registry["grpc_asyncio"] = JobControllerGrpcAsyncIOTransport + _transport_registry["rest"] = JobControllerRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[JobControllerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class JobControllerClient(metaclass=JobControllerClientMeta): + """The JobController provides methods to manage jobs.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataproc.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> JobControllerTransport: + """Returns the transport used by the client instance. + + Returns: + JobControllerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, JobControllerTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the job controller client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, JobControllerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, JobControllerTransport): + # transport is a JobControllerTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def submit_job( + self, + request: Optional[Union[jobs.SubmitJobRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + job: Optional[jobs.Job] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Submits a job to a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_submit_job(): + # Create a client + client = dataproc_v1.JobControllerClient() + + # Initialize request argument(s) + job = dataproc_v1.Job() + job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" + job.placement.cluster_name = "cluster_name_value" + + request = dataproc_v1.SubmitJobRequest( + project_id="project_id_value", + region="region_value", + job=job, + ) + + # Make the request + response = client.submit_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.SubmitJobRequest, dict]): + The request object. A request to submit a job. + project_id (str): + Required. The ID of the Google Cloud + Platform project that the job belongs + to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (google.cloud.dataproc_v1.types.Job): + Required. The job resource. + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.Job: + A Dataproc job resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, job]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a jobs.SubmitJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.SubmitJobRequest): + request = jobs.SubmitJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.submit_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def submit_job_as_operation( + self, + request: Optional[Union[jobs.SubmitJobRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + job: Optional[jobs.Job] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Submits job to a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_submit_job_as_operation(): + # Create a client + client = dataproc_v1.JobControllerClient() + + # Initialize request argument(s) + job = dataproc_v1.Job() + job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" + job.placement.cluster_name = "cluster_name_value" + + request = dataproc_v1.SubmitJobRequest( + project_id="project_id_value", + region="region_value", + job=job, + ) + + # Make the request + operation = client.submit_job_as_operation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.SubmitJobRequest, dict]): + The request object. A request to submit a job. + project_id (str): + Required. The ID of the Google Cloud + Platform project that the job belongs + to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (google.cloud.dataproc_v1.types.Job): + Required. The job resource. + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.dataproc_v1.types.Job` A Dataproc + job resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, job]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a jobs.SubmitJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.SubmitJobRequest): + request = jobs.SubmitJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.submit_job_as_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + jobs.Job, + metadata_type=jobs.JobMetadata, + ) + + # Done; return the response. + return response + + def get_job( + self, + request: Optional[Union[jobs.GetJobRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Gets the resource representation for a job in a + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_get_job(): + # Create a client + client = dataproc_v1.JobControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.GetJobRequest( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.GetJobRequest, dict]): + The request object. A request to get the resource + representation for a job in a project. + project_id (str): + Required. The ID of the Google Cloud + Platform project that the job belongs + to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_id (str): + Required. The job ID. + This corresponds to the ``job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.Job: + A Dataproc job resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, job_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a jobs.GetJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.GetJobRequest): + request = jobs.GetJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if job_id is not None: + request.job_id = job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_jobs( + self, + request: Optional[Union[jobs.ListJobsRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsPager: + r"""Lists regions/{region}/jobs in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_list_jobs(): + # Create a client + client = dataproc_v1.JobControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.ListJobsRequest( + project_id="project_id_value", + region="region_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.ListJobsRequest, dict]): + The request object. A request to list jobs in a project. + project_id (str): + Required. The ID of the Google Cloud + Platform project that the job belongs + to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + Optional. A filter constraining the jobs to list. + Filters are case-sensitive and have the following + syntax: + + [field = value] AND [field [= value]] ... + + where **field** is ``status.state`` or ``labels.[KEY]``, + and ``[KEY]`` is a label key. **value** can be ``*`` to + match all values. ``status.state`` can be either + ``ACTIVE`` or ``NON_ACTIVE``. Only the logical ``AND`` + operator is supported; space-separated items are treated + as having an implicit ``AND`` operator. + + Example filter: + + status.state = ACTIVE AND labels.env = staging AND + labels.starred = \* + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.services.job_controller.pagers.ListJobsPager: + A list of jobs in a project. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a jobs.ListJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.ListJobsRequest): + request = jobs.ListJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_job( + self, + request: Optional[Union[jobs.UpdateJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Updates a job in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_update_job(): + # Create a client + client = dataproc_v1.JobControllerClient() + + # Initialize request argument(s) + job = dataproc_v1.Job() + job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" + job.placement.cluster_name = "cluster_name_value" + + request = dataproc_v1.UpdateJobRequest( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + job=job, + ) + + # Make the request + response = client.update_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.UpdateJobRequest, dict]): + The request object. A request to update a job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.Job: + A Dataproc job resource. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.UpdateJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.UpdateJobRequest): + request = jobs.UpdateJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_job( + self, + request: Optional[Union[jobs.CancelJobRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Starts a job cancellation request. To access the job resource + after cancellation, call + `regions/{region}/jobs.list `__ + or + `regions/{region}/jobs.get `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_cancel_job(): + # Create a client + client = dataproc_v1.JobControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.CancelJobRequest( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + # Make the request + response = client.cancel_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.CancelJobRequest, dict]): + The request object. A request to cancel a job. + project_id (str): + Required. The ID of the Google Cloud + Platform project that the job belongs + to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_id (str): + Required. The job ID. + This corresponds to the ``job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.Job: + A Dataproc job resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, job_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a jobs.CancelJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.CancelJobRequest): + request = jobs.CancelJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if job_id is not None: + request.job_id = job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job( + self, + request: Optional[Union[jobs.DeleteJobRequest, dict]] = None, + *, + project_id: Optional[str] = None, + region: Optional[str] = None, + job_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the job from the project. If the job is active, the + delete fails, and the response returns ``FAILED_PRECONDITION``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_delete_job(): + # Create a client + client = dataproc_v1.JobControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.DeleteJobRequest( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + # Make the request + client.delete_job(request=request) + + Args: + request (Union[google.cloud.dataproc_v1.types.DeleteJobRequest, dict]): + The request object. A request to delete a job. + project_id (str): + Required. The ID of the Google Cloud + Platform project that the job belongs + to. + + This corresponds to the ``project_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Required. The Dataproc region in + which to handle the request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_id (str): + Required. The job ID. + This corresponds to the ``job_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project_id, region, job_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a jobs.DeleteJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.DeleteJobRequest): + request = jobs.DeleteJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project_id is not None: + request.project_id = project_id + if region is not None: + request.region = region + if job_id is not None: + request.job_id = job_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", request.project_id), + ("region", request.region), + ("job_id", request.job_id), + ) + ), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "JobControllerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("JobControllerClient",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/pagers.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/pagers.py new file mode 100644 index 000000000000..64d92d931a12 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dataproc_v1.types import jobs + + +class ListJobsPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataproc_v1.types.ListJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataproc_v1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., jobs.ListJobsResponse], + request: jobs.ListJobsRequest, + response: jobs.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataproc_v1.types.ListJobsRequest): + The initial request object. + response (google.cloud.dataproc_v1.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = jobs.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[jobs.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[jobs.Job]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListJobsAsyncPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataproc_v1.types.ListJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataproc_v1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[jobs.ListJobsResponse]], + request: jobs.ListJobsRequest, + response: jobs.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataproc_v1.types.ListJobsRequest): + The initial request object. + response (google.cloud.dataproc_v1.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = jobs.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[jobs.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[jobs.Job]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/__init__.py new file mode 100644 index 000000000000..1f4f9601616b --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import JobControllerTransport +from .grpc import JobControllerGrpcTransport +from .grpc_asyncio import JobControllerGrpcAsyncIOTransport +from .rest import JobControllerRestInterceptor, JobControllerRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[JobControllerTransport]] +_transport_registry["grpc"] = JobControllerGrpcTransport +_transport_registry["grpc_asyncio"] = JobControllerGrpcAsyncIOTransport +_transport_registry["rest"] = JobControllerRestTransport + +__all__ = ( + "JobControllerTransport", + "JobControllerGrpcTransport", + "JobControllerGrpcAsyncIOTransport", + "JobControllerRestTransport", + "JobControllerRestInterceptor", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/base.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/base.py new file mode 100644 index 000000000000..e3835800274f --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/base.py @@ -0,0 +1,367 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version +from google.cloud.dataproc_v1.types import jobs + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class JobControllerTransport(abc.ABC): + """Abstract transport class for JobController.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "dataproc.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.submit_job: gapic_v1.method.wrap_method( + self.submit_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=client_info, + ), + self.submit_job_as_operation: gapic_v1.method.wrap_method( + self.submit_job_as_operation, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=client_info, + ), + self.get_job: gapic_v1.method.wrap_method( + self.get_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=client_info, + ), + self.list_jobs: gapic_v1.method.wrap_method( + self.list_jobs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=client_info, + ), + self.update_job: gapic_v1.method.wrap_method( + self.update_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=client_info, + ), + self.cancel_job: gapic_v1.method.wrap_method( + self.cancel_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=client_info, + ), + self.delete_job: gapic_v1.method.wrap_method( + self.delete_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def submit_job( + self, + ) -> Callable[[jobs.SubmitJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: + raise NotImplementedError() + + @property + def submit_job_as_operation( + self, + ) -> Callable[ + [jobs.SubmitJobRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_job( + self, + ) -> Callable[[jobs.GetJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: + raise NotImplementedError() + + @property + def list_jobs( + self, + ) -> Callable[ + [jobs.ListJobsRequest], + Union[jobs.ListJobsResponse, Awaitable[jobs.ListJobsResponse]], + ]: + raise NotImplementedError() + + @property + def update_job( + self, + ) -> Callable[[jobs.UpdateJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: + raise NotImplementedError() + + @property + def cancel_job( + self, + ) -> Callable[[jobs.CancelJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: + raise NotImplementedError() + + @property + def delete_job( + self, + ) -> Callable[ + [jobs.DeleteJobRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("JobControllerTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py new file mode 100644 index 000000000000..06e9484203a7 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py @@ -0,0 +1,584 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dataproc_v1.types import jobs + +from .base import DEFAULT_CLIENT_INFO, JobControllerTransport + + +class JobControllerGrpcTransport(JobControllerTransport): + """gRPC backend transport for JobController. + + The JobController provides methods to manage jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def submit_job(self) -> Callable[[jobs.SubmitJobRequest], jobs.Job]: + r"""Return a callable for the submit job method over gRPC. + + Submits a job to a cluster. + + Returns: + Callable[[~.SubmitJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "submit_job" not in self._stubs: + self._stubs["submit_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.JobController/SubmitJob", + request_serializer=jobs.SubmitJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["submit_job"] + + @property + def submit_job_as_operation( + self, + ) -> Callable[[jobs.SubmitJobRequest], operations_pb2.Operation]: + r"""Return a callable for the submit job as operation method over gRPC. + + Submits job to a cluster. + + Returns: + Callable[[~.SubmitJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "submit_job_as_operation" not in self._stubs: + self._stubs["submit_job_as_operation"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.JobController/SubmitJobAsOperation", + request_serializer=jobs.SubmitJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["submit_job_as_operation"] + + @property + def get_job(self) -> Callable[[jobs.GetJobRequest], jobs.Job]: + r"""Return a callable for the get job method over gRPC. + + Gets the resource representation for a job in a + project. + + Returns: + Callable[[~.GetJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job" not in self._stubs: + self._stubs["get_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.JobController/GetJob", + request_serializer=jobs.GetJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["get_job"] + + @property + def list_jobs(self) -> Callable[[jobs.ListJobsRequest], jobs.ListJobsResponse]: + r"""Return a callable for the list jobs method over gRPC. + + Lists regions/{region}/jobs in a project. + + Returns: + Callable[[~.ListJobsRequest], + ~.ListJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_jobs" not in self._stubs: + self._stubs["list_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.JobController/ListJobs", + request_serializer=jobs.ListJobsRequest.serialize, + response_deserializer=jobs.ListJobsResponse.deserialize, + ) + return self._stubs["list_jobs"] + + @property + def update_job(self) -> Callable[[jobs.UpdateJobRequest], jobs.Job]: + r"""Return a callable for the update job method over gRPC. + + Updates a job in a project. + + Returns: + Callable[[~.UpdateJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_job" not in self._stubs: + self._stubs["update_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.JobController/UpdateJob", + request_serializer=jobs.UpdateJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["update_job"] + + @property + def cancel_job(self) -> Callable[[jobs.CancelJobRequest], jobs.Job]: + r"""Return a callable for the cancel job method over gRPC. + + Starts a job cancellation request. To access the job resource + after cancellation, call + `regions/{region}/jobs.list `__ + or + `regions/{region}/jobs.get `__. + + Returns: + Callable[[~.CancelJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_job" not in self._stubs: + self._stubs["cancel_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.JobController/CancelJob", + request_serializer=jobs.CancelJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["cancel_job"] + + @property + def delete_job(self) -> Callable[[jobs.DeleteJobRequest], empty_pb2.Empty]: + r"""Return a callable for the delete job method over gRPC. + + Deletes the job from the project. If the job is active, the + delete fails, and the response returns ``FAILED_PRECONDITION``. + + Returns: + Callable[[~.DeleteJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_job" not in self._stubs: + self._stubs["delete_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.JobController/DeleteJob", + request_serializer=jobs.DeleteJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_job"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("JobControllerGrpcTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py new file mode 100644 index 000000000000..d9d9c66e475f --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py @@ -0,0 +1,589 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataproc_v1.types import jobs + +from .base import DEFAULT_CLIENT_INFO, JobControllerTransport +from .grpc import JobControllerGrpcTransport + + +class JobControllerGrpcAsyncIOTransport(JobControllerTransport): + """gRPC AsyncIO backend transport for JobController. + + The JobController provides methods to manage jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def submit_job(self) -> Callable[[jobs.SubmitJobRequest], Awaitable[jobs.Job]]: + r"""Return a callable for the submit job method over gRPC. + + Submits a job to a cluster. + + Returns: + Callable[[~.SubmitJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "submit_job" not in self._stubs: + self._stubs["submit_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.JobController/SubmitJob", + request_serializer=jobs.SubmitJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["submit_job"] + + @property + def submit_job_as_operation( + self, + ) -> Callable[[jobs.SubmitJobRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the submit job as operation method over gRPC. + + Submits job to a cluster. + + Returns: + Callable[[~.SubmitJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "submit_job_as_operation" not in self._stubs: + self._stubs["submit_job_as_operation"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.JobController/SubmitJobAsOperation", + request_serializer=jobs.SubmitJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["submit_job_as_operation"] + + @property + def get_job(self) -> Callable[[jobs.GetJobRequest], Awaitable[jobs.Job]]: + r"""Return a callable for the get job method over gRPC. + + Gets the resource representation for a job in a + project. + + Returns: + Callable[[~.GetJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job" not in self._stubs: + self._stubs["get_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.JobController/GetJob", + request_serializer=jobs.GetJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["get_job"] + + @property + def list_jobs( + self, + ) -> Callable[[jobs.ListJobsRequest], Awaitable[jobs.ListJobsResponse]]: + r"""Return a callable for the list jobs method over gRPC. + + Lists regions/{region}/jobs in a project. + + Returns: + Callable[[~.ListJobsRequest], + Awaitable[~.ListJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_jobs" not in self._stubs: + self._stubs["list_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.JobController/ListJobs", + request_serializer=jobs.ListJobsRequest.serialize, + response_deserializer=jobs.ListJobsResponse.deserialize, + ) + return self._stubs["list_jobs"] + + @property + def update_job(self) -> Callable[[jobs.UpdateJobRequest], Awaitable[jobs.Job]]: + r"""Return a callable for the update job method over gRPC. + + Updates a job in a project. + + Returns: + Callable[[~.UpdateJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_job" not in self._stubs: + self._stubs["update_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.JobController/UpdateJob", + request_serializer=jobs.UpdateJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["update_job"] + + @property + def cancel_job(self) -> Callable[[jobs.CancelJobRequest], Awaitable[jobs.Job]]: + r"""Return a callable for the cancel job method over gRPC. + + Starts a job cancellation request. To access the job resource + after cancellation, call + `regions/{region}/jobs.list `__ + or + `regions/{region}/jobs.get `__. + + Returns: + Callable[[~.CancelJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_job" not in self._stubs: + self._stubs["cancel_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.JobController/CancelJob", + request_serializer=jobs.CancelJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs["cancel_job"] + + @property + def delete_job( + self, + ) -> Callable[[jobs.DeleteJobRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job method over gRPC. + + Deletes the job from the project. If the job is active, the + delete fails, and the response returns ``FAILED_PRECONDITION``. + + Returns: + Callable[[~.DeleteJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_job" not in self._stubs: + self._stubs["delete_job"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.JobController/DeleteJob", + request_serializer=jobs.DeleteJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_job"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("JobControllerGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/rest.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/rest.py new file mode 100644 index 000000000000..115c56f9a86e --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/rest.py @@ -0,0 +1,1842 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dataproc_v1.types import jobs + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import JobControllerTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class JobControllerRestInterceptor: + """Interceptor for JobController. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the JobControllerRestTransport. + + .. code-block:: python + class MyCustomJobControllerInterceptor(JobControllerRestInterceptor): + def pre_cancel_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_submit_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_submit_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_submit_job_as_operation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_submit_job_as_operation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_job(self, response): + logging.log(f"Received response: {response}") + return response + + transport = JobControllerRestTransport(interceptor=MyCustomJobControllerInterceptor()) + client = JobControllerClient(transport=transport) + + + """ + + def pre_cancel_job( + self, request: jobs.CancelJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.CancelJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobController server. + """ + return request, metadata + + def post_cancel_job(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for cancel_job + + Override in a subclass to manipulate the response + after it is returned by the JobController server but before + it is returned to user code. + """ + return response + + def pre_delete_job( + self, request: jobs.DeleteJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.DeleteJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobController server. + """ + return request, metadata + + def pre_get_job( + self, request: jobs.GetJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.GetJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobController server. + """ + return request, metadata + + def post_get_job(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for get_job + + Override in a subclass to manipulate the response + after it is returned by the JobController server but before + it is returned to user code. + """ + return response + + def pre_list_jobs( + self, request: jobs.ListJobsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.ListJobsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobController server. + """ + return request, metadata + + def post_list_jobs(self, response: jobs.ListJobsResponse) -> jobs.ListJobsResponse: + """Post-rpc interceptor for list_jobs + + Override in a subclass to manipulate the response + after it is returned by the JobController server but before + it is returned to user code. + """ + return response + + def pre_submit_job( + self, request: jobs.SubmitJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.SubmitJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for submit_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobController server. + """ + return request, metadata + + def post_submit_job(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for submit_job + + Override in a subclass to manipulate the response + after it is returned by the JobController server but before + it is returned to user code. + """ + return response + + def pre_submit_job_as_operation( + self, request: jobs.SubmitJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.SubmitJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for submit_job_as_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobController server. + """ + return request, metadata + + def post_submit_job_as_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for submit_job_as_operation + + Override in a subclass to manipulate the response + after it is returned by the JobController server but before + it is returned to user code. + """ + return response + + def pre_update_job( + self, request: jobs.UpdateJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[jobs.UpdateJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobController server. + """ + return request, metadata + + def post_update_job(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for update_job + + Override in a subclass to manipulate the response + after it is returned by the JobController server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobController server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the JobController server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobController server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the JobController server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobController server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the JobController server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobController server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the JobController server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobController server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the JobController server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobController server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the JobController server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobController server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the JobController server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class JobControllerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: JobControllerRestInterceptor + + +class JobControllerRestTransport(JobControllerTransport): + """REST backend transport for JobController. + + The JobController provides methods to manage jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[JobControllerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or JobControllerRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/regions/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations}", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CancelJob(JobControllerRestStub): + def __hash__(self): + return hash("CancelJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: jobs.CancelJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Call the cancel job method over HTTP. + + Args: + request (~.jobs.CancelJobRequest): + The request object. A request to cancel a job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + A Dataproc job resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_cancel_job(request, metadata) + pb_request = jobs.CancelJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel_job(resp) + return resp + + class _DeleteJob(JobControllerRestStub): + def __hash__(self): + return hash("DeleteJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: jobs.DeleteJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete job method over HTTP. + + Args: + request (~.jobs.DeleteJobRequest): + The request object. A request to delete a job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}", + }, + ] + request, metadata = self._interceptor.pre_delete_job(request, metadata) + pb_request = jobs.DeleteJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetJob(JobControllerRestStub): + def __hash__(self): + return hash("GetJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: jobs.GetJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Call the get job method over HTTP. + + Args: + request (~.jobs.GetJobRequest): + The request object. A request to get the resource + representation for a job in a project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + A Dataproc job resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}", + }, + ] + request, metadata = self._interceptor.pre_get_job(request, metadata) + pb_request = jobs.GetJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job(resp) + return resp + + class _ListJobs(JobControllerRestStub): + def __hash__(self): + return hash("ListJobs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: jobs.ListJobsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.ListJobsResponse: + r"""Call the list jobs method over HTTP. + + Args: + request (~.jobs.ListJobsRequest): + The request object. A request to list jobs in a project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.ListJobsResponse: + A list of jobs in a project. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/projects/{project_id}/regions/{region}/jobs", + }, + ] + request, metadata = self._interceptor.pre_list_jobs(request, metadata) + pb_request = jobs.ListJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.ListJobsResponse() + pb_resp = jobs.ListJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_jobs(resp) + return resp + + class _SubmitJob(JobControllerRestStub): + def __hash__(self): + return hash("SubmitJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: jobs.SubmitJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Call the submit job method over HTTP. + + Args: + request (~.jobs.SubmitJobRequest): + The request object. A request to submit a job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + A Dataproc job resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/regions/{region}/jobs:submit", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_submit_job(request, metadata) + pb_request = jobs.SubmitJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_submit_job(resp) + return resp + + class _SubmitJobAsOperation(JobControllerRestStub): + def __hash__(self): + return hash("SubmitJobAsOperation") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: jobs.SubmitJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the submit job as operation method over HTTP. + + Args: + request (~.jobs.SubmitJobRequest): + The request object. A request to submit a job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/regions/{region}/jobs:submitAsOperation", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_submit_job_as_operation( + request, metadata + ) + pb_request = jobs.SubmitJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_submit_job_as_operation(resp) + return resp + + class _UpdateJob(JobControllerRestStub): + def __hash__(self): + return hash("UpdateJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: jobs.UpdateJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Call the update job method over HTTP. + + Args: + request (~.jobs.UpdateJobRequest): + The request object. A request to update a job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + A Dataproc job resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}", + "body": "job", + }, + ] + request, metadata = self._interceptor.pre_update_job(request, metadata) + pb_request = jobs.UpdateJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_job(resp) + return resp + + @property + def cancel_job(self) -> Callable[[jobs.CancelJobRequest], jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_job(self) -> Callable[[jobs.DeleteJobRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job(self) -> Callable[[jobs.GetJobRequest], jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_jobs(self) -> Callable[[jobs.ListJobsRequest], jobs.ListJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def submit_job(self) -> Callable[[jobs.SubmitJobRequest], jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SubmitJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def submit_job_as_operation( + self, + ) -> Callable[[jobs.SubmitJobRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SubmitJobAsOperation(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_job(self) -> Callable[[jobs.UpdateJobRequest], jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(JobControllerRestStub): + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:getIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(JobControllerRestStub): + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:setIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(JobControllerRestStub): + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:testIamPermissions", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(JobControllerRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/regions/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(JobControllerRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(JobControllerRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(JobControllerRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations}", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("JobControllerRestTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/__init__.py new file mode 100644 index 000000000000..2e2d69d0ea5e --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import NodeGroupControllerAsyncClient +from .client import NodeGroupControllerClient + +__all__ = ( + "NodeGroupControllerClient", + "NodeGroupControllerAsyncClient", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/async_client.py new file mode 100644 index 000000000000..c2b55949e2ef --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/async_client.py @@ -0,0 +1,1144 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.dataproc_v1.types import clusters, node_groups, operations + +from .client import NodeGroupControllerClient +from .transports.base import DEFAULT_CLIENT_INFO, NodeGroupControllerTransport +from .transports.grpc_asyncio import NodeGroupControllerGrpcAsyncIOTransport + + +class NodeGroupControllerAsyncClient: + """The ``NodeGroupControllerService`` provides methods to manage node + groups of Compute Engine managed instances. + """ + + _client: NodeGroupControllerClient + + DEFAULT_ENDPOINT = NodeGroupControllerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = NodeGroupControllerClient.DEFAULT_MTLS_ENDPOINT + + node_group_path = staticmethod(NodeGroupControllerClient.node_group_path) + parse_node_group_path = staticmethod( + NodeGroupControllerClient.parse_node_group_path + ) + common_billing_account_path = staticmethod( + NodeGroupControllerClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + NodeGroupControllerClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(NodeGroupControllerClient.common_folder_path) + parse_common_folder_path = staticmethod( + NodeGroupControllerClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + NodeGroupControllerClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + NodeGroupControllerClient.parse_common_organization_path + ) + common_project_path = staticmethod(NodeGroupControllerClient.common_project_path) + parse_common_project_path = staticmethod( + NodeGroupControllerClient.parse_common_project_path + ) + common_location_path = staticmethod(NodeGroupControllerClient.common_location_path) + parse_common_location_path = staticmethod( + NodeGroupControllerClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NodeGroupControllerAsyncClient: The constructed client. + """ + return NodeGroupControllerClient.from_service_account_info.__func__(NodeGroupControllerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NodeGroupControllerAsyncClient: The constructed client. + """ + return NodeGroupControllerClient.from_service_account_file.__func__(NodeGroupControllerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return NodeGroupControllerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> NodeGroupControllerTransport: + """Returns the transport used by the client instance. + + Returns: + NodeGroupControllerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(NodeGroupControllerClient).get_transport_class, + type(NodeGroupControllerClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, NodeGroupControllerTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the node group controller client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.NodeGroupControllerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = NodeGroupControllerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_node_group( + self, + request: Optional[Union[node_groups.CreateNodeGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + node_group: Optional[clusters.NodeGroup] = None, + node_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a node group in a cluster. The returned + [Operation.metadata][google.longrunning.Operation.metadata] is + `NodeGroupOperationMetadata `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_create_node_group(): + # Create a client + client = dataproc_v1.NodeGroupControllerAsyncClient() + + # Initialize request argument(s) + node_group = dataproc_v1.NodeGroup() + node_group.roles = ['DRIVER'] + + request = dataproc_v1.CreateNodeGroupRequest( + parent="parent_value", + node_group=node_group, + ) + + # Make the request + operation = client.create_node_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.CreateNodeGroupRequest, dict]]): + The request object. A request to create a node group. + parent (:class:`str`): + Required. The parent resource where this node group will + be created. Format: + ``projects/{project}/regions/{region}/clusters/{cluster}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (:class:`google.cloud.dataproc_v1.types.NodeGroup`): + Required. The node group to create. + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group_id (:class:`str`): + Optional. An optional node group ID. Generated if not + specified. + + The ID must contain only letters (a-z, A-Z), numbers + (0-9), underscores (_), and hyphens (-). Cannot begin or + end with underscore or hyphen. Must consist of from 3 to + 33 characters. + + This corresponds to the ``node_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.NodeGroup` Dataproc Node Group. + **The Dataproc NodeGroup resource is not related to + the Dataproc + [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] + resource.** + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, node_group, node_group_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = node_groups.CreateNodeGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if node_group is not None: + request.node_group = node_group + if node_group_id is not None: + request.node_group_id = node_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_node_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clusters.NodeGroup, + metadata_type=operations.NodeGroupOperationMetadata, + ) + + # Done; return the response. + return response + + async def resize_node_group( + self, + request: Optional[Union[node_groups.ResizeNodeGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + size: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Resizes a node group in a cluster. The returned + [Operation.metadata][google.longrunning.Operation.metadata] is + `NodeGroupOperationMetadata `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_resize_node_group(): + # Create a client + client = dataproc_v1.NodeGroupControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.ResizeNodeGroupRequest( + name="name_value", + size=443, + ) + + # Make the request + operation = client.resize_node_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.ResizeNodeGroupRequest, dict]]): + The request object. A request to resize a node group. + name (:class:`str`): + Required. The name of the node group to resize. Format: + ``projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{nodeGroup}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + size (:class:`int`): + Required. The number of running + instances for the node group to + maintain. The group adds or removes + instances to maintain the number of + instances specified by this parameter. + + This corresponds to the ``size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.NodeGroup` Dataproc Node Group. + **The Dataproc NodeGroup resource is not related to + the Dataproc + [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] + resource.** + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, size]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = node_groups.ResizeNodeGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if size is not None: + request.size = size + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resize_node_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clusters.NodeGroup, + metadata_type=operations.NodeGroupOperationMetadata, + ) + + # Done; return the response. + return response + + async def get_node_group( + self, + request: Optional[Union[node_groups.GetNodeGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clusters.NodeGroup: + r"""Gets the resource representation for a node group in + a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_get_node_group(): + # Create a client + client = dataproc_v1.NodeGroupControllerAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.GetNodeGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_node_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.GetNodeGroupRequest, dict]]): + The request object. A request to get a node group . + name (:class:`str`): + Required. The name of the node group to retrieve. + Format: + ``projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{nodeGroup}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.NodeGroup: + Dataproc Node Group. + **The Dataproc NodeGroup resource is not related to + the Dataproc + [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] + resource.** + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = node_groups.GetNodeGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_node_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "NodeGroupControllerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("NodeGroupControllerAsyncClient",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py new file mode 100644 index 000000000000..df0e7d9b3800 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py @@ -0,0 +1,1379 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.dataproc_v1.types import clusters, node_groups, operations + +from .transports.base import DEFAULT_CLIENT_INFO, NodeGroupControllerTransport +from .transports.grpc import NodeGroupControllerGrpcTransport +from .transports.grpc_asyncio import NodeGroupControllerGrpcAsyncIOTransport +from .transports.rest import NodeGroupControllerRestTransport + + +class NodeGroupControllerClientMeta(type): + """Metaclass for the NodeGroupController client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[NodeGroupControllerTransport]] + _transport_registry["grpc"] = NodeGroupControllerGrpcTransport + _transport_registry["grpc_asyncio"] = NodeGroupControllerGrpcAsyncIOTransport + _transport_registry["rest"] = NodeGroupControllerRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[NodeGroupControllerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class NodeGroupControllerClient(metaclass=NodeGroupControllerClientMeta): + """The ``NodeGroupControllerService`` provides methods to manage node + groups of Compute Engine managed instances. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataproc.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NodeGroupControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + NodeGroupControllerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> NodeGroupControllerTransport: + """Returns the transport used by the client instance. + + Returns: + NodeGroupControllerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def node_group_path( + project: str, + region: str, + cluster: str, + node_group: str, + ) -> str: + """Returns a fully-qualified node_group string.""" + return "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format( + project=project, + region=region, + cluster=cluster, + node_group=node_group, + ) + + @staticmethod + def parse_node_group_path(path: str) -> Dict[str, str]: + """Parses a node_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/clusters/(?P.+?)/nodeGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, NodeGroupControllerTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the node group controller client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, NodeGroupControllerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, NodeGroupControllerTransport): + # transport is a NodeGroupControllerTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_node_group( + self, + request: Optional[Union[node_groups.CreateNodeGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + node_group: Optional[clusters.NodeGroup] = None, + node_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a node group in a cluster. The returned + [Operation.metadata][google.longrunning.Operation.metadata] is + `NodeGroupOperationMetadata `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_create_node_group(): + # Create a client + client = dataproc_v1.NodeGroupControllerClient() + + # Initialize request argument(s) + node_group = dataproc_v1.NodeGroup() + node_group.roles = ['DRIVER'] + + request = dataproc_v1.CreateNodeGroupRequest( + parent="parent_value", + node_group=node_group, + ) + + # Make the request + operation = client.create_node_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.CreateNodeGroupRequest, dict]): + The request object. A request to create a node group. + parent (str): + Required. The parent resource where this node group will + be created. Format: + ``projects/{project}/regions/{region}/clusters/{cluster}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (google.cloud.dataproc_v1.types.NodeGroup): + Required. The node group to create. + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group_id (str): + Optional. An optional node group ID. Generated if not + specified. + + The ID must contain only letters (a-z, A-Z), numbers + (0-9), underscores (_), and hyphens (-). Cannot begin or + end with underscore or hyphen. Must consist of from 3 to + 33 characters. + + This corresponds to the ``node_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.NodeGroup` Dataproc Node Group. + **The Dataproc NodeGroup resource is not related to + the Dataproc + [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] + resource.** + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, node_group, node_group_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a node_groups.CreateNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, node_groups.CreateNodeGroupRequest): + request = node_groups.CreateNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if node_group is not None: + request.node_group = node_group + if node_group_id is not None: + request.node_group_id = node_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_node_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clusters.NodeGroup, + metadata_type=operations.NodeGroupOperationMetadata, + ) + + # Done; return the response. + return response + + def resize_node_group( + self, + request: Optional[Union[node_groups.ResizeNodeGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + size: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Resizes a node group in a cluster. The returned + [Operation.metadata][google.longrunning.Operation.metadata] is + `NodeGroupOperationMetadata `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_resize_node_group(): + # Create a client + client = dataproc_v1.NodeGroupControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.ResizeNodeGroupRequest( + name="name_value", + size=443, + ) + + # Make the request + operation = client.resize_node_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.ResizeNodeGroupRequest, dict]): + The request object. A request to resize a node group. + name (str): + Required. The name of the node group to resize. Format: + ``projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{nodeGroup}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + size (int): + Required. The number of running + instances for the node group to + maintain. The group adds or removes + instances to maintain the number of + instances specified by this parameter. + + This corresponds to the ``size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.NodeGroup` Dataproc Node Group. + **The Dataproc NodeGroup resource is not related to + the Dataproc + [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] + resource.** + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, size]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a node_groups.ResizeNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, node_groups.ResizeNodeGroupRequest): + request = node_groups.ResizeNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if size is not None: + request.size = size + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resize_node_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clusters.NodeGroup, + metadata_type=operations.NodeGroupOperationMetadata, + ) + + # Done; return the response. + return response + + def get_node_group( + self, + request: Optional[Union[node_groups.GetNodeGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clusters.NodeGroup: + r"""Gets the resource representation for a node group in + a cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_get_node_group(): + # Create a client + client = dataproc_v1.NodeGroupControllerClient() + + # Initialize request argument(s) + request = dataproc_v1.GetNodeGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_node_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.GetNodeGroupRequest, dict]): + The request object. A request to get a node group . + name (str): + Required. The name of the node group to retrieve. + Format: + ``projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{nodeGroup}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.NodeGroup: + Dataproc Node Group. + **The Dataproc NodeGroup resource is not related to + the Dataproc + [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] + resource.** + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a node_groups.GetNodeGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, node_groups.GetNodeGroupRequest): + request = node_groups.GetNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_node_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "NodeGroupControllerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("NodeGroupControllerClient",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/__init__.py new file mode 100644 index 000000000000..bc0643080a3a --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import NodeGroupControllerTransport +from .grpc import NodeGroupControllerGrpcTransport +from .grpc_asyncio import NodeGroupControllerGrpcAsyncIOTransport +from .rest import NodeGroupControllerRestInterceptor, NodeGroupControllerRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[NodeGroupControllerTransport]] +_transport_registry["grpc"] = NodeGroupControllerGrpcTransport +_transport_registry["grpc_asyncio"] = NodeGroupControllerGrpcAsyncIOTransport +_transport_registry["rest"] = NodeGroupControllerRestTransport + +__all__ = ( + "NodeGroupControllerTransport", + "NodeGroupControllerGrpcTransport", + "NodeGroupControllerGrpcAsyncIOTransport", + "NodeGroupControllerRestTransport", + "NodeGroupControllerRestInterceptor", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/base.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/base.py new file mode 100644 index 000000000000..6d9f85ca1ecb --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/base.py @@ -0,0 +1,254 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version +from google.cloud.dataproc_v1.types import clusters, node_groups + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class NodeGroupControllerTransport(abc.ABC): + """Abstract transport class for NodeGroupController.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "dataproc.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_node_group: gapic_v1.method.wrap_method( + self.create_node_group, + default_timeout=None, + client_info=client_info, + ), + self.resize_node_group: gapic_v1.method.wrap_method( + self.resize_node_group, + default_timeout=None, + client_info=client_info, + ), + self.get_node_group: gapic_v1.method.wrap_method( + self.get_node_group, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_node_group( + self, + ) -> Callable[ + [node_groups.CreateNodeGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def resize_node_group( + self, + ) -> Callable[ + [node_groups.ResizeNodeGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_node_group( + self, + ) -> Callable[ + [node_groups.GetNodeGroupRequest], + Union[clusters.NodeGroup, Awaitable[clusters.NodeGroup]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("NodeGroupControllerTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/grpc.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/grpc.py new file mode 100644 index 000000000000..7b989599d906 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/grpc.py @@ -0,0 +1,491 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dataproc_v1.types import clusters, node_groups + +from .base import DEFAULT_CLIENT_INFO, NodeGroupControllerTransport + + +class NodeGroupControllerGrpcTransport(NodeGroupControllerTransport): + """gRPC backend transport for NodeGroupController. + + The ``NodeGroupControllerService`` provides methods to manage node + groups of Compute Engine managed instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_node_group( + self, + ) -> Callable[[node_groups.CreateNodeGroupRequest], operations_pb2.Operation]: + r"""Return a callable for the create node group method over gRPC. + + Creates a node group in a cluster. The returned + [Operation.metadata][google.longrunning.Operation.metadata] is + `NodeGroupOperationMetadata `__. + + Returns: + Callable[[~.CreateNodeGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_node_group" not in self._stubs: + self._stubs["create_node_group"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.NodeGroupController/CreateNodeGroup", + request_serializer=node_groups.CreateNodeGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_node_group"] + + @property + def resize_node_group( + self, + ) -> Callable[[node_groups.ResizeNodeGroupRequest], operations_pb2.Operation]: + r"""Return a callable for the resize node group method over gRPC. + + Resizes a node group in a cluster. The returned + [Operation.metadata][google.longrunning.Operation.metadata] is + `NodeGroupOperationMetadata `__. + + Returns: + Callable[[~.ResizeNodeGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resize_node_group" not in self._stubs: + self._stubs["resize_node_group"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.NodeGroupController/ResizeNodeGroup", + request_serializer=node_groups.ResizeNodeGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["resize_node_group"] + + @property + def get_node_group( + self, + ) -> Callable[[node_groups.GetNodeGroupRequest], clusters.NodeGroup]: + r"""Return a callable for the get node group method over gRPC. + + Gets the resource representation for a node group in + a cluster. + + Returns: + Callable[[~.GetNodeGroupRequest], + ~.NodeGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_node_group" not in self._stubs: + self._stubs["get_node_group"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.NodeGroupController/GetNodeGroup", + request_serializer=node_groups.GetNodeGroupRequest.serialize, + response_deserializer=clusters.NodeGroup.deserialize, + ) + return self._stubs["get_node_group"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("NodeGroupControllerGrpcTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/grpc_asyncio.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/grpc_asyncio.py new file mode 100644 index 000000000000..42a037ccc268 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/grpc_asyncio.py @@ -0,0 +1,496 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataproc_v1.types import clusters, node_groups + +from .base import DEFAULT_CLIENT_INFO, NodeGroupControllerTransport +from .grpc import NodeGroupControllerGrpcTransport + + +class NodeGroupControllerGrpcAsyncIOTransport(NodeGroupControllerTransport): + """gRPC AsyncIO backend transport for NodeGroupController. + + The ``NodeGroupControllerService`` provides methods to manage node + groups of Compute Engine managed instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_node_group( + self, + ) -> Callable[ + [node_groups.CreateNodeGroupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create node group method over gRPC. + + Creates a node group in a cluster. The returned + [Operation.metadata][google.longrunning.Operation.metadata] is + `NodeGroupOperationMetadata `__. + + Returns: + Callable[[~.CreateNodeGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_node_group" not in self._stubs: + self._stubs["create_node_group"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.NodeGroupController/CreateNodeGroup", + request_serializer=node_groups.CreateNodeGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_node_group"] + + @property + def resize_node_group( + self, + ) -> Callable[ + [node_groups.ResizeNodeGroupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the resize node group method over gRPC. + + Resizes a node group in a cluster. The returned + [Operation.metadata][google.longrunning.Operation.metadata] is + `NodeGroupOperationMetadata `__. + + Returns: + Callable[[~.ResizeNodeGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resize_node_group" not in self._stubs: + self._stubs["resize_node_group"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.NodeGroupController/ResizeNodeGroup", + request_serializer=node_groups.ResizeNodeGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["resize_node_group"] + + @property + def get_node_group( + self, + ) -> Callable[[node_groups.GetNodeGroupRequest], Awaitable[clusters.NodeGroup]]: + r"""Return a callable for the get node group method over gRPC. + + Gets the resource representation for a node group in + a cluster. + + Returns: + Callable[[~.GetNodeGroupRequest], + Awaitable[~.NodeGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_node_group" not in self._stubs: + self._stubs["get_node_group"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.NodeGroupController/GetNodeGroup", + request_serializer=node_groups.GetNodeGroupRequest.serialize, + response_deserializer=clusters.NodeGroup.deserialize, + ) + return self._stubs["get_node_group"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("NodeGroupControllerGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/rest.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/rest.py new file mode 100644 index 000000000000..62dc0839ca6d --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/rest.py @@ -0,0 +1,1389 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.dataproc_v1.types import clusters, node_groups + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import NodeGroupControllerTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class NodeGroupControllerRestInterceptor: + """Interceptor for NodeGroupController. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the NodeGroupControllerRestTransport. + + .. code-block:: python + class MyCustomNodeGroupControllerInterceptor(NodeGroupControllerRestInterceptor): + def pre_create_node_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_node_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_node_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_node_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_resize_node_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_resize_node_group(self, response): + logging.log(f"Received response: {response}") + return response + + transport = NodeGroupControllerRestTransport(interceptor=MyCustomNodeGroupControllerInterceptor()) + client = NodeGroupControllerClient(transport=transport) + + + """ + + def pre_create_node_group( + self, + request: node_groups.CreateNodeGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[node_groups.CreateNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_node_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroupController server. + """ + return request, metadata + + def post_create_node_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_node_group + + Override in a subclass to manipulate the response + after it is returned by the NodeGroupController server but before + it is returned to user code. + """ + return response + + def pre_get_node_group( + self, + request: node_groups.GetNodeGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[node_groups.GetNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_node_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroupController server. + """ + return request, metadata + + def post_get_node_group(self, response: clusters.NodeGroup) -> clusters.NodeGroup: + """Post-rpc interceptor for get_node_group + + Override in a subclass to manipulate the response + after it is returned by the NodeGroupController server but before + it is returned to user code. + """ + return response + + def pre_resize_node_group( + self, + request: node_groups.ResizeNodeGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[node_groups.ResizeNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for resize_node_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroupController server. + """ + return request, metadata + + def post_resize_node_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for resize_node_group + + Override in a subclass to manipulate the response + after it is returned by the NodeGroupController server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroupController server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the NodeGroupController server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroupController server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the NodeGroupController server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroupController server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the NodeGroupController server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroupController server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the NodeGroupController server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroupController server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the NodeGroupController server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroupController server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the NodeGroupController server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroupController server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the NodeGroupController server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class NodeGroupControllerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: NodeGroupControllerRestInterceptor + + +class NodeGroupControllerRestTransport(NodeGroupControllerTransport): + """REST backend transport for NodeGroupController. + + The ``NodeGroupControllerService`` provides methods to manage node + groups of Compute Engine managed instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[NodeGroupControllerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or NodeGroupControllerRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/regions/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations}", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateNodeGroup(NodeGroupControllerRestStub): + def __hash__(self): + return hash("CreateNodeGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: node_groups.CreateNodeGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create node group method over HTTP. + + Args: + request (~.node_groups.CreateNodeGroupRequest): + The request object. A request to create a node group. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/regions/*/clusters/*}/nodeGroups", + "body": "node_group", + }, + ] + request, metadata = self._interceptor.pre_create_node_group( + request, metadata + ) + pb_request = node_groups.CreateNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_node_group(resp) + return resp + + class _GetNodeGroup(NodeGroupControllerRestStub): + def __hash__(self): + return hash("GetNodeGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: node_groups.GetNodeGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clusters.NodeGroup: + r"""Call the get node group method over HTTP. + + Args: + request (~.node_groups.GetNodeGroupRequest): + The request object. A request to get a node group . + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.clusters.NodeGroup: + Dataproc Node Group. **The Dataproc ``NodeGroup`` + resource is not related to the Dataproc + [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] + resource.** + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/clusters/*/nodeGroups/*}", + }, + ] + request, metadata = self._interceptor.pre_get_node_group(request, metadata) + pb_request = node_groups.GetNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = clusters.NodeGroup() + pb_resp = clusters.NodeGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_node_group(resp) + return resp + + class _ResizeNodeGroup(NodeGroupControllerRestStub): + def __hash__(self): + return hash("ResizeNodeGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: node_groups.ResizeNodeGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the resize node group method over HTTP. + + Args: + request (~.node_groups.ResizeNodeGroupRequest): + The request object. A request to resize a node group. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/regions/*/clusters/*/nodeGroups/*}:resize", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_resize_node_group( + request, metadata + ) + pb_request = node_groups.ResizeNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resize_node_group(resp) + return resp + + @property + def create_node_group( + self, + ) -> Callable[[node_groups.CreateNodeGroupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateNodeGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_node_group( + self, + ) -> Callable[[node_groups.GetNodeGroupRequest], clusters.NodeGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetNodeGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def resize_node_group( + self, + ) -> Callable[[node_groups.ResizeNodeGroupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ResizeNodeGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(NodeGroupControllerRestStub): + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:getIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(NodeGroupControllerRestStub): + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:setIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(NodeGroupControllerRestStub): + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:testIamPermissions", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(NodeGroupControllerRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/regions/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(NodeGroupControllerRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(NodeGroupControllerRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(NodeGroupControllerRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations}", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("NodeGroupControllerRestTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/__init__.py new file mode 100644 index 000000000000..0e9ed4969eb9 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import WorkflowTemplateServiceAsyncClient +from .client import WorkflowTemplateServiceClient + +__all__ = ( + "WorkflowTemplateServiceClient", + "WorkflowTemplateServiceAsyncClient", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py new file mode 100644 index 000000000000..47ff01da3b9b --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py @@ -0,0 +1,1791 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dataproc_v1.services.workflow_template_service import pagers +from google.cloud.dataproc_v1.types import workflow_templates + +from .client import WorkflowTemplateServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, WorkflowTemplateServiceTransport +from .transports.grpc_asyncio import WorkflowTemplateServiceGrpcAsyncIOTransport + + +class WorkflowTemplateServiceAsyncClient: + """The API interface for managing Workflow Templates in the + Dataproc API. + """ + + _client: WorkflowTemplateServiceClient + + DEFAULT_ENDPOINT = WorkflowTemplateServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = WorkflowTemplateServiceClient.DEFAULT_MTLS_ENDPOINT + + node_group_path = staticmethod(WorkflowTemplateServiceClient.node_group_path) + parse_node_group_path = staticmethod( + WorkflowTemplateServiceClient.parse_node_group_path + ) + service_path = staticmethod(WorkflowTemplateServiceClient.service_path) + parse_service_path = staticmethod(WorkflowTemplateServiceClient.parse_service_path) + workflow_template_path = staticmethod( + WorkflowTemplateServiceClient.workflow_template_path + ) + parse_workflow_template_path = staticmethod( + WorkflowTemplateServiceClient.parse_workflow_template_path + ) + common_billing_account_path = staticmethod( + WorkflowTemplateServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + WorkflowTemplateServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(WorkflowTemplateServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + WorkflowTemplateServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + WorkflowTemplateServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + WorkflowTemplateServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + WorkflowTemplateServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + WorkflowTemplateServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + WorkflowTemplateServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + WorkflowTemplateServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkflowTemplateServiceAsyncClient: The constructed client. + """ + return WorkflowTemplateServiceClient.from_service_account_info.__func__(WorkflowTemplateServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkflowTemplateServiceAsyncClient: The constructed client. + """ + return WorkflowTemplateServiceClient.from_service_account_file.__func__(WorkflowTemplateServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return WorkflowTemplateServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> WorkflowTemplateServiceTransport: + """Returns the transport used by the client instance. + + Returns: + WorkflowTemplateServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(WorkflowTemplateServiceClient).get_transport_class, + type(WorkflowTemplateServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, WorkflowTemplateServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the workflow template service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.WorkflowTemplateServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = WorkflowTemplateServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_workflow_template( + self, + request: Optional[ + Union[workflow_templates.CreateWorkflowTemplateRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + template: Optional[workflow_templates.WorkflowTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workflow_templates.WorkflowTemplate: + r"""Creates new workflow template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_create_workflow_template(): + # Create a client + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() + + # Initialize request argument(s) + template = dataproc_v1.WorkflowTemplate() + template.id = "id_value" + template.placement.managed_cluster.cluster_name = "cluster_name_value" + template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" + template.jobs.step_id = "step_id_value" + + request = dataproc_v1.CreateWorkflowTemplateRequest( + parent="parent_value", + template=template, + ) + + # Make the request + response = await client.create_workflow_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.CreateWorkflowTemplateRequest, dict]]): + The request object. A request to create a workflow + template. + parent (:class:`str`): + Required. The resource name of the region or location, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates.create``, + the resource name of the region has the following + format: ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.create``, + the resource name of the location has the following + format: + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + template (:class:`google.cloud.dataproc_v1.types.WorkflowTemplate`): + Required. The Dataproc workflow + template to create. + + This corresponds to the ``template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.WorkflowTemplate: + A Dataproc workflow template + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, template]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workflow_templates.CreateWorkflowTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if template is not None: + request.template = template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_workflow_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_workflow_template( + self, + request: Optional[ + Union[workflow_templates.GetWorkflowTemplateRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workflow_templates.WorkflowTemplate: + r"""Retrieves the latest workflow template. + + Can retrieve previously instantiated template by + specifying optional version parameter. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_get_workflow_template(): + # Create a client + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.GetWorkflowTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_workflow_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.GetWorkflowTemplateRequest, dict]]): + The request object. A request to fetch a workflow + template. + name (:class:`str`): + Required. The resource name of the workflow template, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates.get``, the + resource name of the template has the following + format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.get``, the + resource name of the template has the following + format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.WorkflowTemplate: + A Dataproc workflow template + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workflow_templates.GetWorkflowTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_workflow_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def instantiate_workflow_template( + self, + request: Optional[ + Union[workflow_templates.InstantiateWorkflowTemplateRequest, dict] + ] = None, + *, + name: Optional[str] = None, + parameters: Optional[MutableMapping[str, str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Instantiates a template and begins execution. + + The returned Operation can be used to track execution of + workflow by polling + [operations.get][google.longrunning.Operations.GetOperation]. + The Operation will complete when entire workflow is finished. + + The running workflow can be aborted via + [operations.cancel][google.longrunning.Operations.CancelOperation]. + This will cause any inflight jobs to be cancelled and + workflow-owned clusters to be deleted. + + The [Operation.metadata][google.longrunning.Operation.metadata] + will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. + + On successful completion, + [Operation.response][google.longrunning.Operation.response] will + be [Empty][google.protobuf.Empty]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_instantiate_workflow_template(): + # Create a client + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.InstantiateWorkflowTemplateRequest( + name="name_value", + ) + + # Make the request + operation = client.instantiate_workflow_template(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest, dict]]): + The request object. A request to instantiate a workflow + template. + name (:class:`str`): + Required. The resource name of the workflow template, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For + ``projects.regions.workflowTemplates.instantiate``, + the resource name of the template has the following + format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For + ``projects.locations.workflowTemplates.instantiate``, + the resource name of the template has the following + format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parameters (:class:`MutableMapping[str, str]`): + Optional. Map from parameter names to + values that should be used for those + parameters. Values may not exceed 1000 + characters. + + This corresponds to the ``parameters`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, parameters]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workflow_templates.InstantiateWorkflowTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + if parameters: + request.parameters.update(parameters) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.instantiate_workflow_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=workflow_templates.WorkflowMetadata, + ) + + # Done; return the response. + return response + + async def instantiate_inline_workflow_template( + self, + request: Optional[ + Union[workflow_templates.InstantiateInlineWorkflowTemplateRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + template: Optional[workflow_templates.WorkflowTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Instantiates a template and begins execution. + + This method is equivalent to executing the sequence + [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], + [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], + [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. + + The returned Operation can be used to track execution of + workflow by polling + [operations.get][google.longrunning.Operations.GetOperation]. + The Operation will complete when entire workflow is finished. + + The running workflow can be aborted via + [operations.cancel][google.longrunning.Operations.CancelOperation]. + This will cause any inflight jobs to be cancelled and + workflow-owned clusters to be deleted. + + The [Operation.metadata][google.longrunning.Operation.metadata] + will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. + + On successful completion, + [Operation.response][google.longrunning.Operation.response] will + be [Empty][google.protobuf.Empty]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_instantiate_inline_workflow_template(): + # Create a client + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() + + # Initialize request argument(s) + template = dataproc_v1.WorkflowTemplate() + template.id = "id_value" + template.placement.managed_cluster.cluster_name = "cluster_name_value" + template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" + template.jobs.step_id = "step_id_value" + + request = dataproc_v1.InstantiateInlineWorkflowTemplateRequest( + parent="parent_value", + template=template, + ) + + # Make the request + operation = client.instantiate_inline_workflow_template(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.InstantiateInlineWorkflowTemplateRequest, dict]]): + The request object. A request to instantiate an inline + workflow template. + parent (:class:`str`): + Required. The resource name of the region or location, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For + ``projects.regions.workflowTemplates,instantiateinline``, + the resource name of the region has the following + format: ``projects/{project_id}/regions/{region}`` + + - For + ``projects.locations.workflowTemplates.instantiateinline``, + the resource name of the location has the following + format: + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + template (:class:`google.cloud.dataproc_v1.types.WorkflowTemplate`): + Required. The workflow template to + instantiate. + + This corresponds to the ``template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, template]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workflow_templates.InstantiateInlineWorkflowTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if template is not None: + request.template = template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.instantiate_inline_workflow_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=workflow_templates.WorkflowMetadata, + ) + + # Done; return the response. + return response + + async def update_workflow_template( + self, + request: Optional[ + Union[workflow_templates.UpdateWorkflowTemplateRequest, dict] + ] = None, + *, + template: Optional[workflow_templates.WorkflowTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workflow_templates.WorkflowTemplate: + r"""Updates (replaces) workflow template. The updated + template must contain version that matches the current + server version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_update_workflow_template(): + # Create a client + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() + + # Initialize request argument(s) + template = dataproc_v1.WorkflowTemplate() + template.id = "id_value" + template.placement.managed_cluster.cluster_name = "cluster_name_value" + template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" + template.jobs.step_id = "step_id_value" + + request = dataproc_v1.UpdateWorkflowTemplateRequest( + template=template, + ) + + # Make the request + response = await client.update_workflow_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.UpdateWorkflowTemplateRequest, dict]]): + The request object. A request to update a workflow + template. + template (:class:`google.cloud.dataproc_v1.types.WorkflowTemplate`): + Required. The updated workflow template. + + The ``template.version`` field must match the current + version. + + This corresponds to the ``template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.WorkflowTemplate: + A Dataproc workflow template + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([template]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workflow_templates.UpdateWorkflowTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if template is not None: + request.template = template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_workflow_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("template.name", request.template.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_workflow_templates( + self, + request: Optional[ + Union[workflow_templates.ListWorkflowTemplatesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkflowTemplatesAsyncPager: + r"""Lists workflows that match the specified filter in + the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_list_workflow_templates(): + # Create a client + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.ListWorkflowTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workflow_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest, dict]]): + The request object. A request to list workflow templates + in a project. + parent (:class:`str`): + Required. The resource name of the region or location, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates,list``, the + resource name of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.list``, + the resource name of the location has the following + format: + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.services.workflow_template_service.pagers.ListWorkflowTemplatesAsyncPager: + A response to a request to list + workflow templates in a project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workflow_templates.ListWorkflowTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_workflow_templates, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListWorkflowTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_workflow_template( + self, + request: Optional[ + Union[workflow_templates.DeleteWorkflowTemplateRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a workflow template. It does not cancel + in-progress workflows. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + async def sample_delete_workflow_template(): + # Create a client + client = dataproc_v1.WorkflowTemplateServiceAsyncClient() + + # Initialize request argument(s) + request = dataproc_v1.DeleteWorkflowTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_workflow_template(request=request) + + Args: + request (Optional[Union[google.cloud.dataproc_v1.types.DeleteWorkflowTemplateRequest, dict]]): + The request object. A request to delete a workflow + template. + Currently started workflows will remain + running. + name (:class:`str`): + Required. The resource name of the workflow template, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates.delete``, + the resource name of the template has the following + format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For + ``projects.locations.workflowTemplates.instantiate``, + the resource name of the template has the following + format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = workflow_templates.DeleteWorkflowTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_workflow_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "WorkflowTemplateServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("WorkflowTemplateServiceAsyncClient",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py new file mode 100644 index 000000000000..ef4f66df8c45 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py @@ -0,0 +1,2000 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dataproc_v1.services.workflow_template_service import pagers +from google.cloud.dataproc_v1.types import workflow_templates + +from .transports.base import DEFAULT_CLIENT_INFO, WorkflowTemplateServiceTransport +from .transports.grpc import WorkflowTemplateServiceGrpcTransport +from .transports.grpc_asyncio import WorkflowTemplateServiceGrpcAsyncIOTransport +from .transports.rest import WorkflowTemplateServiceRestTransport + + +class WorkflowTemplateServiceClientMeta(type): + """Metaclass for the WorkflowTemplateService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[WorkflowTemplateServiceTransport]] + _transport_registry["grpc"] = WorkflowTemplateServiceGrpcTransport + _transport_registry["grpc_asyncio"] = WorkflowTemplateServiceGrpcAsyncIOTransport + _transport_registry["rest"] = WorkflowTemplateServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[WorkflowTemplateServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class WorkflowTemplateServiceClient(metaclass=WorkflowTemplateServiceClientMeta): + """The API interface for managing Workflow Templates in the + Dataproc API. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataproc.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkflowTemplateServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + WorkflowTemplateServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> WorkflowTemplateServiceTransport: + """Returns the transport used by the client instance. + + Returns: + WorkflowTemplateServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def node_group_path( + project: str, + region: str, + cluster: str, + node_group: str, + ) -> str: + """Returns a fully-qualified node_group string.""" + return "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format( + project=project, + region=region, + cluster=cluster, + node_group=node_group, + ) + + @staticmethod + def parse_node_group_path(path: str) -> Dict[str, str]: + """Parses a node_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/clusters/(?P.+?)/nodeGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def service_path( + project: str, + location: str, + service: str, + ) -> str: + """Returns a fully-qualified service string.""" + return "projects/{project}/locations/{location}/services/{service}".format( + project=project, + location=location, + service=service, + ) + + @staticmethod + def parse_service_path(path: str) -> Dict[str, str]: + """Parses a service path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def workflow_template_path( + project: str, + region: str, + workflow_template: str, + ) -> str: + """Returns a fully-qualified workflow_template string.""" + return "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}".format( + project=project, + region=region, + workflow_template=workflow_template, + ) + + @staticmethod + def parse_workflow_template_path(path: str) -> Dict[str, str]: + """Parses a workflow_template path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/workflowTemplates/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, WorkflowTemplateServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the workflow template service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, WorkflowTemplateServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, WorkflowTemplateServiceTransport): + # transport is a WorkflowTemplateServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_workflow_template( + self, + request: Optional[ + Union[workflow_templates.CreateWorkflowTemplateRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + template: Optional[workflow_templates.WorkflowTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workflow_templates.WorkflowTemplate: + r"""Creates new workflow template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_create_workflow_template(): + # Create a client + client = dataproc_v1.WorkflowTemplateServiceClient() + + # Initialize request argument(s) + template = dataproc_v1.WorkflowTemplate() + template.id = "id_value" + template.placement.managed_cluster.cluster_name = "cluster_name_value" + template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" + template.jobs.step_id = "step_id_value" + + request = dataproc_v1.CreateWorkflowTemplateRequest( + parent="parent_value", + template=template, + ) + + # Make the request + response = client.create_workflow_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.CreateWorkflowTemplateRequest, dict]): + The request object. A request to create a workflow + template. + parent (str): + Required. The resource name of the region or location, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates.create``, + the resource name of the region has the following + format: ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.create``, + the resource name of the location has the following + format: + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + template (google.cloud.dataproc_v1.types.WorkflowTemplate): + Required. The Dataproc workflow + template to create. + + This corresponds to the ``template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.WorkflowTemplate: + A Dataproc workflow template + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, template]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workflow_templates.CreateWorkflowTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workflow_templates.CreateWorkflowTemplateRequest): + request = workflow_templates.CreateWorkflowTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if template is not None: + request.template = template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_workflow_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_workflow_template( + self, + request: Optional[ + Union[workflow_templates.GetWorkflowTemplateRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workflow_templates.WorkflowTemplate: + r"""Retrieves the latest workflow template. + + Can retrieve previously instantiated template by + specifying optional version parameter. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_get_workflow_template(): + # Create a client + client = dataproc_v1.WorkflowTemplateServiceClient() + + # Initialize request argument(s) + request = dataproc_v1.GetWorkflowTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_workflow_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.GetWorkflowTemplateRequest, dict]): + The request object. A request to fetch a workflow + template. + name (str): + Required. The resource name of the workflow template, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates.get``, the + resource name of the template has the following + format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.get``, the + resource name of the template has the following + format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.WorkflowTemplate: + A Dataproc workflow template + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workflow_templates.GetWorkflowTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workflow_templates.GetWorkflowTemplateRequest): + request = workflow_templates.GetWorkflowTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_workflow_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def instantiate_workflow_template( + self, + request: Optional[ + Union[workflow_templates.InstantiateWorkflowTemplateRequest, dict] + ] = None, + *, + name: Optional[str] = None, + parameters: Optional[MutableMapping[str, str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Instantiates a template and begins execution. + + The returned Operation can be used to track execution of + workflow by polling + [operations.get][google.longrunning.Operations.GetOperation]. + The Operation will complete when entire workflow is finished. + + The running workflow can be aborted via + [operations.cancel][google.longrunning.Operations.CancelOperation]. + This will cause any inflight jobs to be cancelled and + workflow-owned clusters to be deleted. + + The [Operation.metadata][google.longrunning.Operation.metadata] + will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. + + On successful completion, + [Operation.response][google.longrunning.Operation.response] will + be [Empty][google.protobuf.Empty]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_instantiate_workflow_template(): + # Create a client + client = dataproc_v1.WorkflowTemplateServiceClient() + + # Initialize request argument(s) + request = dataproc_v1.InstantiateWorkflowTemplateRequest( + name="name_value", + ) + + # Make the request + operation = client.instantiate_workflow_template(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest, dict]): + The request object. A request to instantiate a workflow + template. + name (str): + Required. The resource name of the workflow template, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For + ``projects.regions.workflowTemplates.instantiate``, + the resource name of the template has the following + format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For + ``projects.locations.workflowTemplates.instantiate``, + the resource name of the template has the following + format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + parameters (MutableMapping[str, str]): + Optional. Map from parameter names to + values that should be used for those + parameters. Values may not exceed 1000 + characters. + + This corresponds to the ``parameters`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, parameters]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workflow_templates.InstantiateWorkflowTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, workflow_templates.InstantiateWorkflowTemplateRequest + ): + request = workflow_templates.InstantiateWorkflowTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if parameters is not None: + request.parameters = parameters + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.instantiate_workflow_template + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=workflow_templates.WorkflowMetadata, + ) + + # Done; return the response. + return response + + def instantiate_inline_workflow_template( + self, + request: Optional[ + Union[workflow_templates.InstantiateInlineWorkflowTemplateRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + template: Optional[workflow_templates.WorkflowTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Instantiates a template and begins execution. + + This method is equivalent to executing the sequence + [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], + [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], + [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. + + The returned Operation can be used to track execution of + workflow by polling + [operations.get][google.longrunning.Operations.GetOperation]. + The Operation will complete when entire workflow is finished. + + The running workflow can be aborted via + [operations.cancel][google.longrunning.Operations.CancelOperation]. + This will cause any inflight jobs to be cancelled and + workflow-owned clusters to be deleted. + + The [Operation.metadata][google.longrunning.Operation.metadata] + will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. + + On successful completion, + [Operation.response][google.longrunning.Operation.response] will + be [Empty][google.protobuf.Empty]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_instantiate_inline_workflow_template(): + # Create a client + client = dataproc_v1.WorkflowTemplateServiceClient() + + # Initialize request argument(s) + template = dataproc_v1.WorkflowTemplate() + template.id = "id_value" + template.placement.managed_cluster.cluster_name = "cluster_name_value" + template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" + template.jobs.step_id = "step_id_value" + + request = dataproc_v1.InstantiateInlineWorkflowTemplateRequest( + parent="parent_value", + template=template, + ) + + # Make the request + operation = client.instantiate_inline_workflow_template(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.InstantiateInlineWorkflowTemplateRequest, dict]): + The request object. A request to instantiate an inline + workflow template. + parent (str): + Required. The resource name of the region or location, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For + ``projects.regions.workflowTemplates,instantiateinline``, + the resource name of the region has the following + format: ``projects/{project_id}/regions/{region}`` + + - For + ``projects.locations.workflowTemplates.instantiateinline``, + the resource name of the location has the following + format: + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + template (google.cloud.dataproc_v1.types.WorkflowTemplate): + Required. The workflow template to + instantiate. + + This corresponds to the ``template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, template]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workflow_templates.InstantiateInlineWorkflowTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, workflow_templates.InstantiateInlineWorkflowTemplateRequest + ): + request = workflow_templates.InstantiateInlineWorkflowTemplateRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if template is not None: + request.template = template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.instantiate_inline_workflow_template + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=workflow_templates.WorkflowMetadata, + ) + + # Done; return the response. + return response + + def update_workflow_template( + self, + request: Optional[ + Union[workflow_templates.UpdateWorkflowTemplateRequest, dict] + ] = None, + *, + template: Optional[workflow_templates.WorkflowTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workflow_templates.WorkflowTemplate: + r"""Updates (replaces) workflow template. The updated + template must contain version that matches the current + server version. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_update_workflow_template(): + # Create a client + client = dataproc_v1.WorkflowTemplateServiceClient() + + # Initialize request argument(s) + template = dataproc_v1.WorkflowTemplate() + template.id = "id_value" + template.placement.managed_cluster.cluster_name = "cluster_name_value" + template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" + template.jobs.step_id = "step_id_value" + + request = dataproc_v1.UpdateWorkflowTemplateRequest( + template=template, + ) + + # Make the request + response = client.update_workflow_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.UpdateWorkflowTemplateRequest, dict]): + The request object. A request to update a workflow + template. + template (google.cloud.dataproc_v1.types.WorkflowTemplate): + Required. The updated workflow template. + + The ``template.version`` field must match the current + version. + + This corresponds to the ``template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.types.WorkflowTemplate: + A Dataproc workflow template + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([template]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workflow_templates.UpdateWorkflowTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workflow_templates.UpdateWorkflowTemplateRequest): + request = workflow_templates.UpdateWorkflowTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if template is not None: + request.template = template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_workflow_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("template.name", request.template.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_workflow_templates( + self, + request: Optional[ + Union[workflow_templates.ListWorkflowTemplatesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListWorkflowTemplatesPager: + r"""Lists workflows that match the specified filter in + the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_list_workflow_templates(): + # Create a client + client = dataproc_v1.WorkflowTemplateServiceClient() + + # Initialize request argument(s) + request = dataproc_v1.ListWorkflowTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_workflow_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest, dict]): + The request object. A request to list workflow templates + in a project. + parent (str): + Required. The resource name of the region or location, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates,list``, the + resource name of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.list``, + the resource name of the location has the following + format: + ``projects/{project_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataproc_v1.services.workflow_template_service.pagers.ListWorkflowTemplatesPager: + A response to a request to list + workflow templates in a project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workflow_templates.ListWorkflowTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workflow_templates.ListWorkflowTemplatesRequest): + request = workflow_templates.ListWorkflowTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_workflow_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListWorkflowTemplatesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_workflow_template( + self, + request: Optional[ + Union[workflow_templates.DeleteWorkflowTemplateRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a workflow template. It does not cancel + in-progress workflows. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataproc_v1 + + def sample_delete_workflow_template(): + # Create a client + client = dataproc_v1.WorkflowTemplateServiceClient() + + # Initialize request argument(s) + request = dataproc_v1.DeleteWorkflowTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_workflow_template(request=request) + + Args: + request (Union[google.cloud.dataproc_v1.types.DeleteWorkflowTemplateRequest, dict]): + The request object. A request to delete a workflow + template. + Currently started workflows will remain + running. + name (str): + Required. The resource name of the workflow template, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates.delete``, + the resource name of the template has the following + format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For + ``projects.locations.workflowTemplates.instantiate``, + the resource name of the template has the following + format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a workflow_templates.DeleteWorkflowTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, workflow_templates.DeleteWorkflowTemplateRequest): + request = workflow_templates.DeleteWorkflowTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_workflow_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "WorkflowTemplateServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("WorkflowTemplateServiceClient",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/pagers.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/pagers.py new file mode 100644 index 000000000000..f8a9e8a1483a --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/pagers.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dataproc_v1.types import workflow_templates + + +class ListWorkflowTemplatesPager: + """A pager for iterating through ``list_workflow_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListWorkflowTemplates`` requests and continue to iterate + through the ``templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., workflow_templates.ListWorkflowTemplatesResponse], + request: workflow_templates.ListWorkflowTemplatesRequest, + response: workflow_templates.ListWorkflowTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest): + The initial request object. + response (google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workflow_templates.ListWorkflowTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[workflow_templates.ListWorkflowTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[workflow_templates.WorkflowTemplate]: + for page in self.pages: + yield from page.templates + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListWorkflowTemplatesAsyncPager: + """A pager for iterating through ``list_workflow_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListWorkflowTemplates`` requests and continue to iterate + through the ``templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[workflow_templates.ListWorkflowTemplatesResponse] + ], + request: workflow_templates.ListWorkflowTemplatesRequest, + response: workflow_templates.ListWorkflowTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest): + The initial request object. + response (google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = workflow_templates.ListWorkflowTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[workflow_templates.ListWorkflowTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[workflow_templates.WorkflowTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/__init__.py new file mode 100644 index 000000000000..7bfc57b2d485 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import WorkflowTemplateServiceTransport +from .grpc import WorkflowTemplateServiceGrpcTransport +from .grpc_asyncio import WorkflowTemplateServiceGrpcAsyncIOTransport +from .rest import ( + WorkflowTemplateServiceRestInterceptor, + WorkflowTemplateServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[WorkflowTemplateServiceTransport]] +_transport_registry["grpc"] = WorkflowTemplateServiceGrpcTransport +_transport_registry["grpc_asyncio"] = WorkflowTemplateServiceGrpcAsyncIOTransport +_transport_registry["rest"] = WorkflowTemplateServiceRestTransport + +__all__ = ( + "WorkflowTemplateServiceTransport", + "WorkflowTemplateServiceGrpcTransport", + "WorkflowTemplateServiceGrpcAsyncIOTransport", + "WorkflowTemplateServiceRestTransport", + "WorkflowTemplateServiceRestInterceptor", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py new file mode 100644 index 000000000000..4c767cedb145 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dataproc_v1 import gapic_version as package_version +from google.cloud.dataproc_v1.types import workflow_templates + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class WorkflowTemplateServiceTransport(abc.ABC): + """Abstract transport class for WorkflowTemplateService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "dataproc.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_workflow_template: gapic_v1.method.wrap_method( + self.create_workflow_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.get_workflow_template: gapic_v1.method.wrap_method( + self.get_workflow_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.instantiate_workflow_template: gapic_v1.method.wrap_method( + self.instantiate_workflow_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.instantiate_inline_workflow_template: gapic_v1.method.wrap_method( + self.instantiate_inline_workflow_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.update_workflow_template: gapic_v1.method.wrap_method( + self.update_workflow_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.list_workflow_templates: gapic_v1.method.wrap_method( + self.list_workflow_templates, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.delete_workflow_template: gapic_v1.method.wrap_method( + self.delete_workflow_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_workflow_template( + self, + ) -> Callable[ + [workflow_templates.CreateWorkflowTemplateRequest], + Union[ + workflow_templates.WorkflowTemplate, + Awaitable[workflow_templates.WorkflowTemplate], + ], + ]: + raise NotImplementedError() + + @property + def get_workflow_template( + self, + ) -> Callable[ + [workflow_templates.GetWorkflowTemplateRequest], + Union[ + workflow_templates.WorkflowTemplate, + Awaitable[workflow_templates.WorkflowTemplate], + ], + ]: + raise NotImplementedError() + + @property + def instantiate_workflow_template( + self, + ) -> Callable[ + [workflow_templates.InstantiateWorkflowTemplateRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def instantiate_inline_workflow_template( + self, + ) -> Callable[ + [workflow_templates.InstantiateInlineWorkflowTemplateRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_workflow_template( + self, + ) -> Callable[ + [workflow_templates.UpdateWorkflowTemplateRequest], + Union[ + workflow_templates.WorkflowTemplate, + Awaitable[workflow_templates.WorkflowTemplate], + ], + ]: + raise NotImplementedError() + + @property + def list_workflow_templates( + self, + ) -> Callable[ + [workflow_templates.ListWorkflowTemplatesRequest], + Union[ + workflow_templates.ListWorkflowTemplatesResponse, + Awaitable[workflow_templates.ListWorkflowTemplatesResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_workflow_template( + self, + ) -> Callable[ + [workflow_templates.DeleteWorkflowTemplateRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("WorkflowTemplateServiceTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py new file mode 100644 index 000000000000..fc20d845cb6e --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py @@ -0,0 +1,666 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dataproc_v1.types import workflow_templates + +from .base import DEFAULT_CLIENT_INFO, WorkflowTemplateServiceTransport + + +class WorkflowTemplateServiceGrpcTransport(WorkflowTemplateServiceTransport): + """gRPC backend transport for WorkflowTemplateService. + + The API interface for managing Workflow Templates in the + Dataproc API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_workflow_template( + self, + ) -> Callable[ + [workflow_templates.CreateWorkflowTemplateRequest], + workflow_templates.WorkflowTemplate, + ]: + r"""Return a callable for the create workflow template method over gRPC. + + Creates new workflow template. + + Returns: + Callable[[~.CreateWorkflowTemplateRequest], + ~.WorkflowTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workflow_template" not in self._stubs: + self._stubs["create_workflow_template"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.WorkflowTemplateService/CreateWorkflowTemplate", + request_serializer=workflow_templates.CreateWorkflowTemplateRequest.serialize, + response_deserializer=workflow_templates.WorkflowTemplate.deserialize, + ) + return self._stubs["create_workflow_template"] + + @property + def get_workflow_template( + self, + ) -> Callable[ + [workflow_templates.GetWorkflowTemplateRequest], + workflow_templates.WorkflowTemplate, + ]: + r"""Return a callable for the get workflow template method over gRPC. + + Retrieves the latest workflow template. + + Can retrieve previously instantiated template by + specifying optional version parameter. + + Returns: + Callable[[~.GetWorkflowTemplateRequest], + ~.WorkflowTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workflow_template" not in self._stubs: + self._stubs["get_workflow_template"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.WorkflowTemplateService/GetWorkflowTemplate", + request_serializer=workflow_templates.GetWorkflowTemplateRequest.serialize, + response_deserializer=workflow_templates.WorkflowTemplate.deserialize, + ) + return self._stubs["get_workflow_template"] + + @property + def instantiate_workflow_template( + self, + ) -> Callable[ + [workflow_templates.InstantiateWorkflowTemplateRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the instantiate workflow template method over gRPC. + + Instantiates a template and begins execution. + + The returned Operation can be used to track execution of + workflow by polling + [operations.get][google.longrunning.Operations.GetOperation]. + The Operation will complete when entire workflow is finished. + + The running workflow can be aborted via + [operations.cancel][google.longrunning.Operations.CancelOperation]. + This will cause any inflight jobs to be cancelled and + workflow-owned clusters to be deleted. + + The [Operation.metadata][google.longrunning.Operation.metadata] + will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. + + On successful completion, + [Operation.response][google.longrunning.Operation.response] will + be [Empty][google.protobuf.Empty]. + + Returns: + Callable[[~.InstantiateWorkflowTemplateRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "instantiate_workflow_template" not in self._stubs: + self._stubs[ + "instantiate_workflow_template" + ] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateWorkflowTemplate", + request_serializer=workflow_templates.InstantiateWorkflowTemplateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["instantiate_workflow_template"] + + @property + def instantiate_inline_workflow_template( + self, + ) -> Callable[ + [workflow_templates.InstantiateInlineWorkflowTemplateRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the instantiate inline workflow + template method over gRPC. + + Instantiates a template and begins execution. + + This method is equivalent to executing the sequence + [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], + [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], + [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. + + The returned Operation can be used to track execution of + workflow by polling + [operations.get][google.longrunning.Operations.GetOperation]. + The Operation will complete when entire workflow is finished. + + The running workflow can be aborted via + [operations.cancel][google.longrunning.Operations.CancelOperation]. + This will cause any inflight jobs to be cancelled and + workflow-owned clusters to be deleted. + + The [Operation.metadata][google.longrunning.Operation.metadata] + will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. + + On successful completion, + [Operation.response][google.longrunning.Operation.response] will + be [Empty][google.protobuf.Empty]. + + Returns: + Callable[[~.InstantiateInlineWorkflowTemplateRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "instantiate_inline_workflow_template" not in self._stubs: + self._stubs[ + "instantiate_inline_workflow_template" + ] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateInlineWorkflowTemplate", + request_serializer=workflow_templates.InstantiateInlineWorkflowTemplateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["instantiate_inline_workflow_template"] + + @property + def update_workflow_template( + self, + ) -> Callable[ + [workflow_templates.UpdateWorkflowTemplateRequest], + workflow_templates.WorkflowTemplate, + ]: + r"""Return a callable for the update workflow template method over gRPC. + + Updates (replaces) workflow template. The updated + template must contain version that matches the current + server version. + + Returns: + Callable[[~.UpdateWorkflowTemplateRequest], + ~.WorkflowTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workflow_template" not in self._stubs: + self._stubs["update_workflow_template"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.WorkflowTemplateService/UpdateWorkflowTemplate", + request_serializer=workflow_templates.UpdateWorkflowTemplateRequest.serialize, + response_deserializer=workflow_templates.WorkflowTemplate.deserialize, + ) + return self._stubs["update_workflow_template"] + + @property + def list_workflow_templates( + self, + ) -> Callable[ + [workflow_templates.ListWorkflowTemplatesRequest], + workflow_templates.ListWorkflowTemplatesResponse, + ]: + r"""Return a callable for the list workflow templates method over gRPC. + + Lists workflows that match the specified filter in + the request. + + Returns: + Callable[[~.ListWorkflowTemplatesRequest], + ~.ListWorkflowTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workflow_templates" not in self._stubs: + self._stubs["list_workflow_templates"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.WorkflowTemplateService/ListWorkflowTemplates", + request_serializer=workflow_templates.ListWorkflowTemplatesRequest.serialize, + response_deserializer=workflow_templates.ListWorkflowTemplatesResponse.deserialize, + ) + return self._stubs["list_workflow_templates"] + + @property + def delete_workflow_template( + self, + ) -> Callable[[workflow_templates.DeleteWorkflowTemplateRequest], empty_pb2.Empty]: + r"""Return a callable for the delete workflow template method over gRPC. + + Deletes a workflow template. It does not cancel + in-progress workflows. + + Returns: + Callable[[~.DeleteWorkflowTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workflow_template" not in self._stubs: + self._stubs["delete_workflow_template"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.WorkflowTemplateService/DeleteWorkflowTemplate", + request_serializer=workflow_templates.DeleteWorkflowTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_workflow_template"] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("WorkflowTemplateServiceGrpcTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..b9d762fa2205 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py @@ -0,0 +1,669 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataproc_v1.types import workflow_templates + +from .base import DEFAULT_CLIENT_INFO, WorkflowTemplateServiceTransport +from .grpc import WorkflowTemplateServiceGrpcTransport + + +class WorkflowTemplateServiceGrpcAsyncIOTransport(WorkflowTemplateServiceTransport): + """gRPC AsyncIO backend transport for WorkflowTemplateService. + + The API interface for managing Workflow Templates in the + Dataproc API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_workflow_template( + self, + ) -> Callable[ + [workflow_templates.CreateWorkflowTemplateRequest], + Awaitable[workflow_templates.WorkflowTemplate], + ]: + r"""Return a callable for the create workflow template method over gRPC. + + Creates new workflow template. + + Returns: + Callable[[~.CreateWorkflowTemplateRequest], + Awaitable[~.WorkflowTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_workflow_template" not in self._stubs: + self._stubs["create_workflow_template"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.WorkflowTemplateService/CreateWorkflowTemplate", + request_serializer=workflow_templates.CreateWorkflowTemplateRequest.serialize, + response_deserializer=workflow_templates.WorkflowTemplate.deserialize, + ) + return self._stubs["create_workflow_template"] + + @property + def get_workflow_template( + self, + ) -> Callable[ + [workflow_templates.GetWorkflowTemplateRequest], + Awaitable[workflow_templates.WorkflowTemplate], + ]: + r"""Return a callable for the get workflow template method over gRPC. + + Retrieves the latest workflow template. + + Can retrieve previously instantiated template by + specifying optional version parameter. + + Returns: + Callable[[~.GetWorkflowTemplateRequest], + Awaitable[~.WorkflowTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_workflow_template" not in self._stubs: + self._stubs["get_workflow_template"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.WorkflowTemplateService/GetWorkflowTemplate", + request_serializer=workflow_templates.GetWorkflowTemplateRequest.serialize, + response_deserializer=workflow_templates.WorkflowTemplate.deserialize, + ) + return self._stubs["get_workflow_template"] + + @property + def instantiate_workflow_template( + self, + ) -> Callable[ + [workflow_templates.InstantiateWorkflowTemplateRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the instantiate workflow template method over gRPC. + + Instantiates a template and begins execution. + + The returned Operation can be used to track execution of + workflow by polling + [operations.get][google.longrunning.Operations.GetOperation]. + The Operation will complete when entire workflow is finished. + + The running workflow can be aborted via + [operations.cancel][google.longrunning.Operations.CancelOperation]. + This will cause any inflight jobs to be cancelled and + workflow-owned clusters to be deleted. + + The [Operation.metadata][google.longrunning.Operation.metadata] + will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. + + On successful completion, + [Operation.response][google.longrunning.Operation.response] will + be [Empty][google.protobuf.Empty]. + + Returns: + Callable[[~.InstantiateWorkflowTemplateRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "instantiate_workflow_template" not in self._stubs: + self._stubs[ + "instantiate_workflow_template" + ] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateWorkflowTemplate", + request_serializer=workflow_templates.InstantiateWorkflowTemplateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["instantiate_workflow_template"] + + @property + def instantiate_inline_workflow_template( + self, + ) -> Callable[ + [workflow_templates.InstantiateInlineWorkflowTemplateRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the instantiate inline workflow + template method over gRPC. + + Instantiates a template and begins execution. + + This method is equivalent to executing the sequence + [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], + [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], + [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. + + The returned Operation can be used to track execution of + workflow by polling + [operations.get][google.longrunning.Operations.GetOperation]. + The Operation will complete when entire workflow is finished. + + The running workflow can be aborted via + [operations.cancel][google.longrunning.Operations.CancelOperation]. + This will cause any inflight jobs to be cancelled and + workflow-owned clusters to be deleted. + + The [Operation.metadata][google.longrunning.Operation.metadata] + will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. + + On successful completion, + [Operation.response][google.longrunning.Operation.response] will + be [Empty][google.protobuf.Empty]. + + Returns: + Callable[[~.InstantiateInlineWorkflowTemplateRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "instantiate_inline_workflow_template" not in self._stubs: + self._stubs[ + "instantiate_inline_workflow_template" + ] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateInlineWorkflowTemplate", + request_serializer=workflow_templates.InstantiateInlineWorkflowTemplateRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["instantiate_inline_workflow_template"] + + @property + def update_workflow_template( + self, + ) -> Callable[ + [workflow_templates.UpdateWorkflowTemplateRequest], + Awaitable[workflow_templates.WorkflowTemplate], + ]: + r"""Return a callable for the update workflow template method over gRPC. + + Updates (replaces) workflow template. The updated + template must contain version that matches the current + server version. + + Returns: + Callable[[~.UpdateWorkflowTemplateRequest], + Awaitable[~.WorkflowTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_workflow_template" not in self._stubs: + self._stubs["update_workflow_template"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.WorkflowTemplateService/UpdateWorkflowTemplate", + request_serializer=workflow_templates.UpdateWorkflowTemplateRequest.serialize, + response_deserializer=workflow_templates.WorkflowTemplate.deserialize, + ) + return self._stubs["update_workflow_template"] + + @property + def list_workflow_templates( + self, + ) -> Callable[ + [workflow_templates.ListWorkflowTemplatesRequest], + Awaitable[workflow_templates.ListWorkflowTemplatesResponse], + ]: + r"""Return a callable for the list workflow templates method over gRPC. + + Lists workflows that match the specified filter in + the request. + + Returns: + Callable[[~.ListWorkflowTemplatesRequest], + Awaitable[~.ListWorkflowTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_workflow_templates" not in self._stubs: + self._stubs["list_workflow_templates"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.WorkflowTemplateService/ListWorkflowTemplates", + request_serializer=workflow_templates.ListWorkflowTemplatesRequest.serialize, + response_deserializer=workflow_templates.ListWorkflowTemplatesResponse.deserialize, + ) + return self._stubs["list_workflow_templates"] + + @property + def delete_workflow_template( + self, + ) -> Callable[ + [workflow_templates.DeleteWorkflowTemplateRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete workflow template method over gRPC. + + Deletes a workflow template. It does not cancel + in-progress workflows. + + Returns: + Callable[[~.DeleteWorkflowTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_workflow_template" not in self._stubs: + self._stubs["delete_workflow_template"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.WorkflowTemplateService/DeleteWorkflowTemplate", + request_serializer=workflow_templates.DeleteWorkflowTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_workflow_template"] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("WorkflowTemplateServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/rest.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/rest.py new file mode 100644 index 000000000000..7fbfc0c7f23c --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/rest.py @@ -0,0 +1,1980 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dataproc_v1.types import workflow_templates + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import WorkflowTemplateServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class WorkflowTemplateServiceRestInterceptor: + """Interceptor for WorkflowTemplateService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the WorkflowTemplateServiceRestTransport. + + .. code-block:: python + class MyCustomWorkflowTemplateServiceInterceptor(WorkflowTemplateServiceRestInterceptor): + def pre_create_workflow_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_workflow_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_workflow_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_workflow_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_workflow_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_instantiate_inline_workflow_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_instantiate_inline_workflow_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_instantiate_workflow_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_instantiate_workflow_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_workflow_templates(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_workflow_templates(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_workflow_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_workflow_template(self, response): + logging.log(f"Received response: {response}") + return response + + transport = WorkflowTemplateServiceRestTransport(interceptor=MyCustomWorkflowTemplateServiceInterceptor()) + client = WorkflowTemplateServiceClient(transport=transport) + + + """ + + def pre_create_workflow_template( + self, + request: workflow_templates.CreateWorkflowTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + workflow_templates.CreateWorkflowTemplateRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_workflow_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the WorkflowTemplateService server. + """ + return request, metadata + + def post_create_workflow_template( + self, response: workflow_templates.WorkflowTemplate + ) -> workflow_templates.WorkflowTemplate: + """Post-rpc interceptor for create_workflow_template + + Override in a subclass to manipulate the response + after it is returned by the WorkflowTemplateService server but before + it is returned to user code. + """ + return response + + def pre_delete_workflow_template( + self, + request: workflow_templates.DeleteWorkflowTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + workflow_templates.DeleteWorkflowTemplateRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_workflow_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the WorkflowTemplateService server. + """ + return request, metadata + + def pre_get_workflow_template( + self, + request: workflow_templates.GetWorkflowTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + workflow_templates.GetWorkflowTemplateRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_workflow_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the WorkflowTemplateService server. + """ + return request, metadata + + def post_get_workflow_template( + self, response: workflow_templates.WorkflowTemplate + ) -> workflow_templates.WorkflowTemplate: + """Post-rpc interceptor for get_workflow_template + + Override in a subclass to manipulate the response + after it is returned by the WorkflowTemplateService server but before + it is returned to user code. + """ + return response + + def pre_instantiate_inline_workflow_template( + self, + request: workflow_templates.InstantiateInlineWorkflowTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + workflow_templates.InstantiateInlineWorkflowTemplateRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for instantiate_inline_workflow_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the WorkflowTemplateService server. + """ + return request, metadata + + def post_instantiate_inline_workflow_template( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for instantiate_inline_workflow_template + + Override in a subclass to manipulate the response + after it is returned by the WorkflowTemplateService server but before + it is returned to user code. + """ + return response + + def pre_instantiate_workflow_template( + self, + request: workflow_templates.InstantiateWorkflowTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + workflow_templates.InstantiateWorkflowTemplateRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for instantiate_workflow_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the WorkflowTemplateService server. + """ + return request, metadata + + def post_instantiate_workflow_template( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for instantiate_workflow_template + + Override in a subclass to manipulate the response + after it is returned by the WorkflowTemplateService server but before + it is returned to user code. + """ + return response + + def pre_list_workflow_templates( + self, + request: workflow_templates.ListWorkflowTemplatesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + workflow_templates.ListWorkflowTemplatesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_workflow_templates + + Override in a subclass to manipulate the request or metadata + before they are sent to the WorkflowTemplateService server. + """ + return request, metadata + + def post_list_workflow_templates( + self, response: workflow_templates.ListWorkflowTemplatesResponse + ) -> workflow_templates.ListWorkflowTemplatesResponse: + """Post-rpc interceptor for list_workflow_templates + + Override in a subclass to manipulate the response + after it is returned by the WorkflowTemplateService server but before + it is returned to user code. + """ + return response + + def pre_update_workflow_template( + self, + request: workflow_templates.UpdateWorkflowTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + workflow_templates.UpdateWorkflowTemplateRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_workflow_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the WorkflowTemplateService server. + """ + return request, metadata + + def post_update_workflow_template( + self, response: workflow_templates.WorkflowTemplate + ) -> workflow_templates.WorkflowTemplate: + """Post-rpc interceptor for update_workflow_template + + Override in a subclass to manipulate the response + after it is returned by the WorkflowTemplateService server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the WorkflowTemplateService server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the WorkflowTemplateService server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the WorkflowTemplateService server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the WorkflowTemplateService server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the WorkflowTemplateService server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the WorkflowTemplateService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the WorkflowTemplateService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the WorkflowTemplateService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the WorkflowTemplateService server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the WorkflowTemplateService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the WorkflowTemplateService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the WorkflowTemplateService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the WorkflowTemplateService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the WorkflowTemplateService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class WorkflowTemplateServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: WorkflowTemplateServiceRestInterceptor + + +class WorkflowTemplateServiceRestTransport(WorkflowTemplateServiceTransport): + """REST backend transport for WorkflowTemplateService. + + The API interface for managing Workflow Templates in the + Dataproc API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dataproc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[WorkflowTemplateServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or WorkflowTemplateServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/regions/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations}", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateWorkflowTemplate(WorkflowTemplateServiceRestStub): + def __hash__(self): + return hash("CreateWorkflowTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workflow_templates.CreateWorkflowTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workflow_templates.WorkflowTemplate: + r"""Call the create workflow template method over HTTP. + + Args: + request (~.workflow_templates.CreateWorkflowTemplateRequest): + The request object. A request to create a workflow + template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workflow_templates.WorkflowTemplate: + A Dataproc workflow template + resource. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/workflowTemplates", + "body": "template", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/regions/*}/workflowTemplates", + "body": "template", + }, + ] + request, metadata = self._interceptor.pre_create_workflow_template( + request, metadata + ) + pb_request = workflow_templates.CreateWorkflowTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workflow_templates.WorkflowTemplate() + pb_resp = workflow_templates.WorkflowTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_workflow_template(resp) + return resp + + class _DeleteWorkflowTemplate(WorkflowTemplateServiceRestStub): + def __hash__(self): + return hash("DeleteWorkflowTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workflow_templates.DeleteWorkflowTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete workflow template method over HTTP. + + Args: + request (~.workflow_templates.DeleteWorkflowTemplateRequest): + The request object. A request to delete a workflow + template. + Currently started workflows will remain + running. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/workflowTemplates/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/regions/*/workflowTemplates/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_workflow_template( + request, metadata + ) + pb_request = workflow_templates.DeleteWorkflowTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetWorkflowTemplate(WorkflowTemplateServiceRestStub): + def __hash__(self): + return hash("GetWorkflowTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workflow_templates.GetWorkflowTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workflow_templates.WorkflowTemplate: + r"""Call the get workflow template method over HTTP. + + Args: + request (~.workflow_templates.GetWorkflowTemplateRequest): + The request object. A request to fetch a workflow + template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workflow_templates.WorkflowTemplate: + A Dataproc workflow template + resource. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/workflowTemplates/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/workflowTemplates/*}", + }, + ] + request, metadata = self._interceptor.pre_get_workflow_template( + request, metadata + ) + pb_request = workflow_templates.GetWorkflowTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workflow_templates.WorkflowTemplate() + pb_resp = workflow_templates.WorkflowTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_workflow_template(resp) + return resp + + class _InstantiateInlineWorkflowTemplate(WorkflowTemplateServiceRestStub): + def __hash__(self): + return hash("InstantiateInlineWorkflowTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workflow_templates.InstantiateInlineWorkflowTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the instantiate inline + workflow template method over HTTP. + + Args: + request (~.workflow_templates.InstantiateInlineWorkflowTemplateRequest): + The request object. A request to instantiate an inline + workflow template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline", + "body": "template", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline", + "body": "template", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_instantiate_inline_workflow_template( + request, metadata + ) + pb_request = workflow_templates.InstantiateInlineWorkflowTemplateRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_instantiate_inline_workflow_template(resp) + return resp + + class _InstantiateWorkflowTemplate(WorkflowTemplateServiceRestStub): + def __hash__(self): + return hash("InstantiateWorkflowTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workflow_templates.InstantiateWorkflowTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the instantiate workflow + template method over HTTP. + + Args: + request (~.workflow_templates.InstantiateWorkflowTemplateRequest): + The request object. A request to instantiate a workflow + template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/regions/*/workflowTemplates/*}:instantiate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_instantiate_workflow_template( + request, metadata + ) + pb_request = workflow_templates.InstantiateWorkflowTemplateRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_instantiate_workflow_template(resp) + return resp + + class _ListWorkflowTemplates(WorkflowTemplateServiceRestStub): + def __hash__(self): + return hash("ListWorkflowTemplates") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workflow_templates.ListWorkflowTemplatesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workflow_templates.ListWorkflowTemplatesResponse: + r"""Call the list workflow templates method over HTTP. + + Args: + request (~.workflow_templates.ListWorkflowTemplatesRequest): + The request object. A request to list workflow templates + in a project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workflow_templates.ListWorkflowTemplatesResponse: + A response to a request to list + workflow templates in a project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/workflowTemplates", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/regions/*}/workflowTemplates", + }, + ] + request, metadata = self._interceptor.pre_list_workflow_templates( + request, metadata + ) + pb_request = workflow_templates.ListWorkflowTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workflow_templates.ListWorkflowTemplatesResponse() + pb_resp = workflow_templates.ListWorkflowTemplatesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_workflow_templates(resp) + return resp + + class _UpdateWorkflowTemplate(WorkflowTemplateServiceRestStub): + def __hash__(self): + return hash("UpdateWorkflowTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: workflow_templates.UpdateWorkflowTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> workflow_templates.WorkflowTemplate: + r"""Call the update workflow template method over HTTP. + + Args: + request (~.workflow_templates.UpdateWorkflowTemplateRequest): + The request object. A request to update a workflow + template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.workflow_templates.WorkflowTemplate: + A Dataproc workflow template + resource. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1/{template.name=projects/*/locations/*/workflowTemplates/*}", + "body": "template", + }, + { + "method": "put", + "uri": "/v1/{template.name=projects/*/regions/*/workflowTemplates/*}", + "body": "template", + }, + ] + request, metadata = self._interceptor.pre_update_workflow_template( + request, metadata + ) + pb_request = workflow_templates.UpdateWorkflowTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = workflow_templates.WorkflowTemplate() + pb_resp = workflow_templates.WorkflowTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_workflow_template(resp) + return resp + + @property + def create_workflow_template( + self, + ) -> Callable[ + [workflow_templates.CreateWorkflowTemplateRequest], + workflow_templates.WorkflowTemplate, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateWorkflowTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_workflow_template( + self, + ) -> Callable[[workflow_templates.DeleteWorkflowTemplateRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteWorkflowTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_workflow_template( + self, + ) -> Callable[ + [workflow_templates.GetWorkflowTemplateRequest], + workflow_templates.WorkflowTemplate, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetWorkflowTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def instantiate_inline_workflow_template( + self, + ) -> Callable[ + [workflow_templates.InstantiateInlineWorkflowTemplateRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InstantiateInlineWorkflowTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def instantiate_workflow_template( + self, + ) -> Callable[ + [workflow_templates.InstantiateWorkflowTemplateRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InstantiateWorkflowTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_workflow_templates( + self, + ) -> Callable[ + [workflow_templates.ListWorkflowTemplatesRequest], + workflow_templates.ListWorkflowTemplatesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListWorkflowTemplates(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_workflow_template( + self, + ) -> Callable[ + [workflow_templates.UpdateWorkflowTemplateRequest], + workflow_templates.WorkflowTemplate, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateWorkflowTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(WorkflowTemplateServiceRestStub): + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:getIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:getIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(WorkflowTemplateServiceRestStub): + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:setIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(WorkflowTemplateServiceRestStub): + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/clusters/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/jobs/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/operations/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/workflowTemplates/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/workflowTemplates/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:testIamPermissions", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(WorkflowTemplateServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/regions/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(WorkflowTemplateServiceRestStub): + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(WorkflowTemplateServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(WorkflowTemplateServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/regions/*/operations}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations}", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("WorkflowTemplateServiceRestTransport",) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py new file mode 100644 index 000000000000..3f427bd9ab40 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/__init__.py @@ -0,0 +1,292 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .autoscaling_policies import ( + AutoscalingPolicy, + BasicAutoscalingAlgorithm, + BasicYarnAutoscalingConfig, + CreateAutoscalingPolicyRequest, + DeleteAutoscalingPolicyRequest, + GetAutoscalingPolicyRequest, + InstanceGroupAutoscalingPolicyConfig, + ListAutoscalingPoliciesRequest, + ListAutoscalingPoliciesResponse, + UpdateAutoscalingPolicyRequest, +) +from .batches import ( + Batch, + CreateBatchRequest, + DeleteBatchRequest, + GetBatchRequest, + ListBatchesRequest, + ListBatchesResponse, + PySparkBatch, + SparkBatch, + SparkRBatch, + SparkSqlBatch, +) +from .clusters import ( + AcceleratorConfig, + AutoscalingConfig, + AuxiliaryNodeGroup, + AuxiliaryServicesConfig, + Cluster, + ClusterConfig, + ClusterMetrics, + ClusterStatus, + ConfidentialInstanceConfig, + CreateClusterRequest, + DataprocMetricConfig, + DeleteClusterRequest, + DiagnoseClusterRequest, + DiagnoseClusterResults, + DiskConfig, + EncryptionConfig, + EndpointConfig, + GceClusterConfig, + GetClusterRequest, + IdentityConfig, + InstanceFlexibilityPolicy, + InstanceGroupConfig, + InstanceReference, + KerberosConfig, + LifecycleConfig, + ListClustersRequest, + ListClustersResponse, + ManagedGroupConfig, + MetastoreConfig, + NodeGroup, + NodeGroupAffinity, + NodeInitializationAction, + ReservationAffinity, + SecurityConfig, + ShieldedInstanceConfig, + SoftwareConfig, + StartClusterRequest, + StopClusterRequest, + UpdateClusterRequest, + VirtualClusterConfig, +) +from .jobs import ( + CancelJobRequest, + DeleteJobRequest, + DriverSchedulingConfig, + GetJobRequest, + HadoopJob, + HiveJob, + Job, + JobMetadata, + JobPlacement, + JobReference, + JobScheduling, + JobStatus, + ListJobsRequest, + ListJobsResponse, + LoggingConfig, + PigJob, + PrestoJob, + PySparkJob, + QueryList, + SparkJob, + SparkRJob, + SparkSqlJob, + SubmitJobRequest, + TrinoJob, + UpdateJobRequest, + YarnApplication, +) +from .node_groups import ( + CreateNodeGroupRequest, + GetNodeGroupRequest, + ResizeNodeGroupRequest, +) +from .operations import ( + BatchOperationMetadata, + ClusterOperationMetadata, + ClusterOperationStatus, + NodeGroupOperationMetadata, +) +from .shared import ( + Component, + EnvironmentConfig, + ExecutionConfig, + FailureAction, + GkeClusterConfig, + GkeNodePoolConfig, + GkeNodePoolTarget, + KubernetesClusterConfig, + KubernetesSoftwareConfig, + PeripheralsConfig, + RuntimeConfig, + RuntimeInfo, + SparkHistoryServerConfig, + UsageMetrics, + UsageSnapshot, +) +from .workflow_templates import ( + ClusterOperation, + ClusterSelector, + CreateWorkflowTemplateRequest, + DeleteWorkflowTemplateRequest, + GetWorkflowTemplateRequest, + InstantiateInlineWorkflowTemplateRequest, + InstantiateWorkflowTemplateRequest, + ListWorkflowTemplatesRequest, + ListWorkflowTemplatesResponse, + ManagedCluster, + OrderedJob, + ParameterValidation, + RegexValidation, + TemplateParameter, + UpdateWorkflowTemplateRequest, + ValueValidation, + WorkflowGraph, + WorkflowMetadata, + WorkflowNode, + WorkflowTemplate, + WorkflowTemplatePlacement, +) + +__all__ = ( + "AutoscalingPolicy", + "BasicAutoscalingAlgorithm", + "BasicYarnAutoscalingConfig", + "CreateAutoscalingPolicyRequest", + "DeleteAutoscalingPolicyRequest", + "GetAutoscalingPolicyRequest", + "InstanceGroupAutoscalingPolicyConfig", + "ListAutoscalingPoliciesRequest", + "ListAutoscalingPoliciesResponse", + "UpdateAutoscalingPolicyRequest", + "Batch", + "CreateBatchRequest", + "DeleteBatchRequest", + "GetBatchRequest", + "ListBatchesRequest", + "ListBatchesResponse", + "PySparkBatch", + "SparkBatch", + "SparkRBatch", + "SparkSqlBatch", + "AcceleratorConfig", + "AutoscalingConfig", + "AuxiliaryNodeGroup", + "AuxiliaryServicesConfig", + "Cluster", + "ClusterConfig", + "ClusterMetrics", + "ClusterStatus", + "ConfidentialInstanceConfig", + "CreateClusterRequest", + "DataprocMetricConfig", + "DeleteClusterRequest", + "DiagnoseClusterRequest", + "DiagnoseClusterResults", + "DiskConfig", + "EncryptionConfig", + "EndpointConfig", + "GceClusterConfig", + "GetClusterRequest", + "IdentityConfig", + "InstanceFlexibilityPolicy", + "InstanceGroupConfig", + "InstanceReference", + "KerberosConfig", + "LifecycleConfig", + "ListClustersRequest", + "ListClustersResponse", + "ManagedGroupConfig", + "MetastoreConfig", + "NodeGroup", + "NodeGroupAffinity", + "NodeInitializationAction", + "ReservationAffinity", + "SecurityConfig", + "ShieldedInstanceConfig", + "SoftwareConfig", + "StartClusterRequest", + "StopClusterRequest", + "UpdateClusterRequest", + "VirtualClusterConfig", + "CancelJobRequest", + "DeleteJobRequest", + "DriverSchedulingConfig", + "GetJobRequest", + "HadoopJob", + "HiveJob", + "Job", + "JobMetadata", + "JobPlacement", + "JobReference", + "JobScheduling", + "JobStatus", + "ListJobsRequest", + "ListJobsResponse", + "LoggingConfig", + "PigJob", + "PrestoJob", + "PySparkJob", + "QueryList", + "SparkJob", + "SparkRJob", + "SparkSqlJob", + "SubmitJobRequest", + "TrinoJob", + "UpdateJobRequest", + "YarnApplication", + "CreateNodeGroupRequest", + "GetNodeGroupRequest", + "ResizeNodeGroupRequest", + "BatchOperationMetadata", + "ClusterOperationMetadata", + "ClusterOperationStatus", + "NodeGroupOperationMetadata", + "EnvironmentConfig", + "ExecutionConfig", + "GkeClusterConfig", + "GkeNodePoolConfig", + "GkeNodePoolTarget", + "KubernetesClusterConfig", + "KubernetesSoftwareConfig", + "PeripheralsConfig", + "RuntimeConfig", + "RuntimeInfo", + "SparkHistoryServerConfig", + "UsageMetrics", + "UsageSnapshot", + "Component", + "FailureAction", + "ClusterOperation", + "ClusterSelector", + "CreateWorkflowTemplateRequest", + "DeleteWorkflowTemplateRequest", + "GetWorkflowTemplateRequest", + "InstantiateInlineWorkflowTemplateRequest", + "InstantiateWorkflowTemplateRequest", + "ListWorkflowTemplatesRequest", + "ListWorkflowTemplatesResponse", + "ManagedCluster", + "OrderedJob", + "ParameterValidation", + "RegexValidation", + "TemplateParameter", + "UpdateWorkflowTemplateRequest", + "ValueValidation", + "WorkflowGraph", + "WorkflowMetadata", + "WorkflowNode", + "WorkflowTemplate", + "WorkflowTemplatePlacement", +) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/autoscaling_policies.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/autoscaling_policies.py new file mode 100644 index 000000000000..7704fb7689d3 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/autoscaling_policies.py @@ -0,0 +1,448 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dataproc.v1", + manifest={ + "AutoscalingPolicy", + "BasicAutoscalingAlgorithm", + "BasicYarnAutoscalingConfig", + "InstanceGroupAutoscalingPolicyConfig", + "CreateAutoscalingPolicyRequest", + "GetAutoscalingPolicyRequest", + "UpdateAutoscalingPolicyRequest", + "DeleteAutoscalingPolicyRequest", + "ListAutoscalingPoliciesRequest", + "ListAutoscalingPoliciesResponse", + }, +) + + +class AutoscalingPolicy(proto.Message): + r"""Describes an autoscaling policy for Dataproc cluster + autoscaler. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + Required. The policy id. + + The id must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). Cannot begin or end with + underscore or hyphen. Must consist of between 3 and 50 + characters. + name (str): + Output only. The "resource name" of the autoscaling policy, + as described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.autoscalingPolicies``, the + resource name of the policy has the following format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For ``projects.locations.autoscalingPolicies``, the + resource name of the policy has the following format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + basic_algorithm (google.cloud.dataproc_v1.types.BasicAutoscalingAlgorithm): + + This field is a member of `oneof`_ ``algorithm``. + worker_config (google.cloud.dataproc_v1.types.InstanceGroupAutoscalingPolicyConfig): + Required. Describes how the autoscaler will + operate for primary workers. + secondary_worker_config (google.cloud.dataproc_v1.types.InstanceGroupAutoscalingPolicyConfig): + Optional. Describes how the autoscaler will + operate for secondary workers. + labels (MutableMapping[str, str]): + Optional. The labels to associate with this autoscaling + policy. Label **keys** must contain 1 to 63 characters, and + must conform to `RFC + 1035 `__. Label + **values** may be empty, but, if present, must contain 1 to + 63 characters, and must conform to `RFC + 1035 `__. No more than + 32 labels can be associated with an autoscaling policy. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + basic_algorithm: "BasicAutoscalingAlgorithm" = proto.Field( + proto.MESSAGE, + number=3, + oneof="algorithm", + message="BasicAutoscalingAlgorithm", + ) + worker_config: "InstanceGroupAutoscalingPolicyConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="InstanceGroupAutoscalingPolicyConfig", + ) + secondary_worker_config: "InstanceGroupAutoscalingPolicyConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="InstanceGroupAutoscalingPolicyConfig", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + + +class BasicAutoscalingAlgorithm(proto.Message): + r"""Basic algorithm for autoscaling. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + yarn_config (google.cloud.dataproc_v1.types.BasicYarnAutoscalingConfig): + Required. YARN autoscaling configuration. + + This field is a member of `oneof`_ ``config``. + cooldown_period (google.protobuf.duration_pb2.Duration): + Optional. Duration between scaling events. A scaling period + starts after the update operation from the previous event + has completed. + + Bounds: [2m, 1d]. Default: 2m. + """ + + yarn_config: "BasicYarnAutoscalingConfig" = proto.Field( + proto.MESSAGE, + number=1, + oneof="config", + message="BasicYarnAutoscalingConfig", + ) + cooldown_period: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class BasicYarnAutoscalingConfig(proto.Message): + r"""Basic autoscaling configurations for YARN. + + Attributes: + graceful_decommission_timeout (google.protobuf.duration_pb2.Duration): + Required. Timeout for YARN graceful decommissioning of Node + Managers. Specifies the duration to wait for jobs to + complete before forcefully removing workers (and potentially + interrupting jobs). Only applicable to downscaling + operations. + + Bounds: [0s, 1d]. + scale_up_factor (float): + Required. Fraction of average YARN pending memory in the + last cooldown period for which to add workers. A scale-up + factor of 1.0 will result in scaling up so that there is no + pending memory remaining after the update (more aggressive + scaling). A scale-up factor closer to 0 will result in a + smaller magnitude of scaling up (less aggressive scaling). + See `How autoscaling + works `__ + for more information. + + Bounds: [0.0, 1.0]. + scale_down_factor (float): + Required. Fraction of average YARN pending memory in the + last cooldown period for which to remove workers. A + scale-down factor of 1 will result in scaling down so that + there is no available memory remaining after the update + (more aggressive scaling). A scale-down factor of 0 disables + removing workers, which can be beneficial for autoscaling a + single job. See `How autoscaling + works `__ + for more information. + + Bounds: [0.0, 1.0]. + scale_up_min_worker_fraction (float): + Optional. Minimum scale-up threshold as a fraction of total + cluster size before scaling occurs. For example, in a + 20-worker cluster, a threshold of 0.1 means the autoscaler + must recommend at least a 2-worker scale-up for the cluster + to scale. A threshold of 0 means the autoscaler will scale + up on any recommended change. + + Bounds: [0.0, 1.0]. Default: 0.0. + scale_down_min_worker_fraction (float): + Optional. Minimum scale-down threshold as a fraction of + total cluster size before scaling occurs. For example, in a + 20-worker cluster, a threshold of 0.1 means the autoscaler + must recommend at least a 2 worker scale-down for the + cluster to scale. A threshold of 0 means the autoscaler will + scale down on any recommended change. + + Bounds: [0.0, 1.0]. Default: 0.0. + """ + + graceful_decommission_timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=5, + message=duration_pb2.Duration, + ) + scale_up_factor: float = proto.Field( + proto.DOUBLE, + number=1, + ) + scale_down_factor: float = proto.Field( + proto.DOUBLE, + number=2, + ) + scale_up_min_worker_fraction: float = proto.Field( + proto.DOUBLE, + number=3, + ) + scale_down_min_worker_fraction: float = proto.Field( + proto.DOUBLE, + number=4, + ) + + +class InstanceGroupAutoscalingPolicyConfig(proto.Message): + r"""Configuration for the size bounds of an instance group, + including its proportional size to other groups. + + Attributes: + min_instances (int): + Optional. Minimum number of instances for this group. + + Primary workers - Bounds: [2, max_instances]. Default: 2. + Secondary workers - Bounds: [0, max_instances]. Default: 0. + max_instances (int): + Required. Maximum number of instances for this group. + Required for primary workers. Note that by default, clusters + will not use secondary workers. Required for secondary + workers if the minimum secondary instances is set. + + Primary workers - Bounds: [min_instances, ). Secondary + workers - Bounds: [min_instances, ). Default: 0. + weight (int): + Optional. Weight for the instance group, which is used to + determine the fraction of total workers in the cluster from + this instance group. For example, if primary workers have + weight 2, and secondary workers have weight 1, the cluster + will have approximately 2 primary workers for each secondary + worker. + + The cluster may not reach the specified balance if + constrained by min/max bounds or other autoscaling settings. + For example, if ``max_instances`` for secondary workers is + 0, then only primary workers will be added. The cluster can + also be out of balance when created. + + If weight is not set on any instance group, the cluster will + default to equal weight for all groups: the cluster will + attempt to maintain an equal number of workers in each group + within the configured size bounds for each group. If weight + is set for one group only, the cluster will default to zero + weight on the unset group. For example if weight is set only + on primary workers, the cluster will use primary workers + only and no secondary workers. + """ + + min_instances: int = proto.Field( + proto.INT32, + number=1, + ) + max_instances: int = proto.Field( + proto.INT32, + number=2, + ) + weight: int = proto.Field( + proto.INT32, + number=3, + ) + + +class CreateAutoscalingPolicyRequest(proto.Message): + r"""A request to create an autoscaling policy. + + Attributes: + parent (str): + Required. The "resource name" of the region or location, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.autoscalingPolicies.create``, the + resource name of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.autoscalingPolicies.create``, + the resource name of the location has the following + format: ``projects/{project_id}/locations/{location}`` + policy (google.cloud.dataproc_v1.types.AutoscalingPolicy): + Required. The autoscaling policy to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + policy: "AutoscalingPolicy" = proto.Field( + proto.MESSAGE, + number=2, + message="AutoscalingPolicy", + ) + + +class GetAutoscalingPolicyRequest(proto.Message): + r"""A request to fetch an autoscaling policy. + + Attributes: + name (str): + Required. The "resource name" of the autoscaling policy, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.autoscalingPolicies.get``, the + resource name of the policy has the following format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For ``projects.locations.autoscalingPolicies.get``, the + resource name of the policy has the following format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateAutoscalingPolicyRequest(proto.Message): + r"""A request to update an autoscaling policy. + + Attributes: + policy (google.cloud.dataproc_v1.types.AutoscalingPolicy): + Required. The updated autoscaling policy. + """ + + policy: "AutoscalingPolicy" = proto.Field( + proto.MESSAGE, + number=1, + message="AutoscalingPolicy", + ) + + +class DeleteAutoscalingPolicyRequest(proto.Message): + r"""A request to delete an autoscaling policy. + + Autoscaling policies in use by one or more clusters will not be + deleted. + + Attributes: + name (str): + Required. The "resource name" of the autoscaling policy, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.autoscalingPolicies.delete``, the + resource name of the policy has the following format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For ``projects.locations.autoscalingPolicies.delete``, + the resource name of the policy has the following format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListAutoscalingPoliciesRequest(proto.Message): + r"""A request to list autoscaling policies in a project. + + Attributes: + parent (str): + Required. The "resource name" of the region or location, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.autoscalingPolicies.list``, the + resource name of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.autoscalingPolicies.list``, the + resource name of the location has the following format: + ``projects/{project_id}/locations/{location}`` + page_size (int): + Optional. The maximum number of results to + return in each response. Must be less than or + equal to 1000. Defaults to 100. + page_token (str): + Optional. The page token, returned by a + previous call, to request the next page of + results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAutoscalingPoliciesResponse(proto.Message): + r"""A response to a request to list autoscaling policies in a + project. + + Attributes: + policies (MutableSequence[google.cloud.dataproc_v1.types.AutoscalingPolicy]): + Output only. Autoscaling policies list. + next_page_token (str): + Output only. This token is included in the + response if there are more results to fetch. + """ + + @property + def raw_page(self): + return self + + policies: MutableSequence["AutoscalingPolicy"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AutoscalingPolicy", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py new file mode 100644 index 000000000000..e32a069b2e1b --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/batches.py @@ -0,0 +1,633 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.dataproc_v1.types import shared + +__protobuf__ = proto.module( + package="google.cloud.dataproc.v1", + manifest={ + "CreateBatchRequest", + "GetBatchRequest", + "ListBatchesRequest", + "ListBatchesResponse", + "DeleteBatchRequest", + "Batch", + "PySparkBatch", + "SparkBatch", + "SparkRBatch", + "SparkSqlBatch", + }, +) + + +class CreateBatchRequest(proto.Message): + r"""A request to create a batch workload. + + Attributes: + parent (str): + Required. The parent resource where this + batch will be created. + batch (google.cloud.dataproc_v1.types.Batch): + Required. The batch to create. + batch_id (str): + Optional. The ID to use for the batch, which will become the + final component of the batch's resource name. + + This value must be 4-63 characters. Valid characters are + ``/[a-z][0-9]-/``. + request_id (str): + Optional. A unique ID used to identify the request. If the + service receives two + `CreateBatchRequest `__\ s + with the same request_id, the second request is ignored and + the Operation that corresponds to the first Batch created + and stored in the backend is returned. + + Recommendation: Set this value to a + `UUID `__. + + The value must contain only letters (a-z, A-Z), numbers + (0-9), underscores (_), and hyphens (-). The maximum length + is 40 characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + batch: "Batch" = proto.Field( + proto.MESSAGE, + number=2, + message="Batch", + ) + batch_id: str = proto.Field( + proto.STRING, + number=3, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GetBatchRequest(proto.Message): + r"""A request to get the resource representation for a batch + workload. + + Attributes: + name (str): + Required. The fully qualified name of the batch to retrieve + in the format + "projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBatchesRequest(proto.Message): + r"""A request to list batch workloads in a project. + + Attributes: + parent (str): + Required. The parent, which owns this + collection of batches. + page_size (int): + Optional. The maximum number of batches to + return in each response. The service may return + fewer than this value. The default page size is + 20; the maximum page size is 1000. + page_token (str): + Optional. A page token received from a previous + ``ListBatches`` call. Provide this token to retrieve the + subsequent page. + filter (str): + Optional. A filter for the batches to return in the + response. + + A filter is a logical expression constraining the values of + various fields in each batch resource. Filters are case + sensitive, and may contain multiple clauses combined with + logical operators (AND/OR). Supported fields are + ``batch_id``, ``batch_uuid``, ``state``, and + ``create_time``. + + e.g. + ``state = RUNNING and create_time < "2023-01-01T00:00:00Z"`` + filters for batches in state RUNNING that were created + before 2023-01-01 + + See https://google.aip.dev/assets/misc/ebnf-filtering.txt + for a detailed description of the filter syntax and a list + of supported comparisons. + order_by (str): + Optional. Field(s) on which to sort the list of batches. + + Currently the only supported sort orders are unspecified + (empty) and ``create_time desc`` to sort by most recently + created batches first. + + See https://google.aip.dev/132#ordering for more details. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListBatchesResponse(proto.Message): + r"""A list of batch workloads. + + Attributes: + batches (MutableSequence[google.cloud.dataproc_v1.types.Batch]): + The batches from the specified collection. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + batches: MutableSequence["Batch"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Batch", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteBatchRequest(proto.Message): + r"""A request to delete a batch workload. + + Attributes: + name (str): + Required. The fully qualified name of the batch to retrieve + in the format + "projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Batch(proto.Message): + r"""A representation of a batch workload in the service. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The resource name of the batch. + uuid (str): + Output only. A batch UUID (Unique Universal + Identifier). The service generates this value + when it creates the batch. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the batch was + created. + pyspark_batch (google.cloud.dataproc_v1.types.PySparkBatch): + Optional. PySpark batch config. + + This field is a member of `oneof`_ ``batch_config``. + spark_batch (google.cloud.dataproc_v1.types.SparkBatch): + Optional. Spark batch config. + + This field is a member of `oneof`_ ``batch_config``. + spark_r_batch (google.cloud.dataproc_v1.types.SparkRBatch): + Optional. SparkR batch config. + + This field is a member of `oneof`_ ``batch_config``. + spark_sql_batch (google.cloud.dataproc_v1.types.SparkSqlBatch): + Optional. SparkSql batch config. + + This field is a member of `oneof`_ ``batch_config``. + runtime_info (google.cloud.dataproc_v1.types.RuntimeInfo): + Output only. Runtime information about batch + execution. + state (google.cloud.dataproc_v1.types.Batch.State): + Output only. The state of the batch. + state_message (str): + Output only. Batch state details, such as a failure + description if the state is ``FAILED``. + state_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the batch entered + a current state. + creator (str): + Output only. The email address of the user + who created the batch. + labels (MutableMapping[str, str]): + Optional. The labels to associate with this batch. Label + **keys** must contain 1 to 63 characters, and must conform + to `RFC 1035 `__. + Label **values** may be empty, but, if present, must contain + 1 to 63 characters, and must conform to `RFC + 1035 `__. No more than + 32 labels can be associated with a batch. + runtime_config (google.cloud.dataproc_v1.types.RuntimeConfig): + Optional. Runtime configuration for the batch + execution. + environment_config (google.cloud.dataproc_v1.types.EnvironmentConfig): + Optional. Environment configuration for the + batch execution. + operation (str): + Output only. The resource name of the + operation associated with this batch. + state_history (MutableSequence[google.cloud.dataproc_v1.types.Batch.StateHistory]): + Output only. Historical state information for + the batch. + """ + + class State(proto.Enum): + r"""The batch state. + + Values: + STATE_UNSPECIFIED (0): + The batch state is unknown. + PENDING (1): + The batch is created before running. + RUNNING (2): + The batch is running. + CANCELLING (3): + The batch is cancelling. + CANCELLED (4): + The batch cancellation was successful. + SUCCEEDED (5): + The batch completed successfully. + FAILED (6): + The batch is no longer running due to an + error. + """ + STATE_UNSPECIFIED = 0 + PENDING = 1 + RUNNING = 2 + CANCELLING = 3 + CANCELLED = 4 + SUCCEEDED = 5 + FAILED = 6 + + class StateHistory(proto.Message): + r"""Historical state information. + + Attributes: + state (google.cloud.dataproc_v1.types.Batch.State): + Output only. The state of the batch at this + point in history. + state_message (str): + Output only. Details about the state at this + point in history. + state_start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the batch entered + the historical state. + """ + + state: "Batch.State" = proto.Field( + proto.ENUM, + number=1, + enum="Batch.State", + ) + state_message: str = proto.Field( + proto.STRING, + number=2, + ) + state_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uuid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + pyspark_batch: "PySparkBatch" = proto.Field( + proto.MESSAGE, + number=4, + oneof="batch_config", + message="PySparkBatch", + ) + spark_batch: "SparkBatch" = proto.Field( + proto.MESSAGE, + number=5, + oneof="batch_config", + message="SparkBatch", + ) + spark_r_batch: "SparkRBatch" = proto.Field( + proto.MESSAGE, + number=6, + oneof="batch_config", + message="SparkRBatch", + ) + spark_sql_batch: "SparkSqlBatch" = proto.Field( + proto.MESSAGE, + number=7, + oneof="batch_config", + message="SparkSqlBatch", + ) + runtime_info: shared.RuntimeInfo = proto.Field( + proto.MESSAGE, + number=8, + message=shared.RuntimeInfo, + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + state_message: str = proto.Field( + proto.STRING, + number=10, + ) + state_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + creator: str = proto.Field( + proto.STRING, + number=12, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + runtime_config: shared.RuntimeConfig = proto.Field( + proto.MESSAGE, + number=14, + message=shared.RuntimeConfig, + ) + environment_config: shared.EnvironmentConfig = proto.Field( + proto.MESSAGE, + number=15, + message=shared.EnvironmentConfig, + ) + operation: str = proto.Field( + proto.STRING, + number=16, + ) + state_history: MutableSequence[StateHistory] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message=StateHistory, + ) + + +class PySparkBatch(proto.Message): + r"""A configuration for running an `Apache + PySpark `__ + batch workload. + + Attributes: + main_python_file_uri (str): + Required. The HCFS URI of the main Python + file to use as the Spark driver. Must be a .py + file. + args (MutableSequence[str]): + Optional. The arguments to pass to the driver. Do not + include arguments that can be set as batch properties, such + as ``--conf``, since a collision can occur that causes an + incorrect batch submission. + python_file_uris (MutableSequence[str]): + Optional. HCFS file URIs of Python files to pass to the + PySpark framework. Supported file types: ``.py``, ``.egg``, + and ``.zip``. + jar_file_uris (MutableSequence[str]): + Optional. HCFS URIs of jar files to add to + the classpath of the Spark driver and tasks. + file_uris (MutableSequence[str]): + Optional. HCFS URIs of files to be placed in + the working directory of each executor. + archive_uris (MutableSequence[str]): + Optional. HCFS URIs of archives to be extracted into the + working directory of each executor. Supported file types: + ``.jar``, ``.tar``, ``.tar.gz``, ``.tgz``, and ``.zip``. + """ + + main_python_file_uri: str = proto.Field( + proto.STRING, + number=1, + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + python_file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + jar_file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + archive_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + + +class SparkBatch(proto.Message): + r"""A configuration for running an `Apache + Spark `__ batch workload. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + main_jar_file_uri (str): + Optional. The HCFS URI of the jar file that + contains the main class. + + This field is a member of `oneof`_ ``driver``. + main_class (str): + Optional. The name of the driver main class. The jar file + that contains the class must be in the classpath or + specified in ``jar_file_uris``. + + This field is a member of `oneof`_ ``driver``. + args (MutableSequence[str]): + Optional. The arguments to pass to the driver. Do not + include arguments that can be set as batch properties, such + as ``--conf``, since a collision can occur that causes an + incorrect batch submission. + jar_file_uris (MutableSequence[str]): + Optional. HCFS URIs of jar files to add to + the classpath of the Spark driver and tasks. + file_uris (MutableSequence[str]): + Optional. HCFS URIs of files to be placed in + the working directory of each executor. + archive_uris (MutableSequence[str]): + Optional. HCFS URIs of archives to be extracted into the + working directory of each executor. Supported file types: + ``.jar``, ``.tar``, ``.tar.gz``, ``.tgz``, and ``.zip``. + """ + + main_jar_file_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="driver", + ) + main_class: str = proto.Field( + proto.STRING, + number=2, + oneof="driver", + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + jar_file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + archive_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + + +class SparkRBatch(proto.Message): + r"""A configuration for running an `Apache + SparkR `__ batch + workload. + + Attributes: + main_r_file_uri (str): + Required. The HCFS URI of the main R file to use as the + driver. Must be a ``.R`` or ``.r`` file. + args (MutableSequence[str]): + Optional. The arguments to pass to the Spark driver. Do not + include arguments that can be set as batch properties, such + as ``--conf``, since a collision can occur that causes an + incorrect batch submission. + file_uris (MutableSequence[str]): + Optional. HCFS URIs of files to be placed in + the working directory of each executor. + archive_uris (MutableSequence[str]): + Optional. HCFS URIs of archives to be extracted into the + working directory of each executor. Supported file types: + ``.jar``, ``.tar``, ``.tar.gz``, ``.tgz``, and ``.zip``. + """ + + main_r_file_uri: str = proto.Field( + proto.STRING, + number=1, + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + archive_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class SparkSqlBatch(proto.Message): + r"""A configuration for running `Apache Spark + SQL `__ queries as a batch workload. + + Attributes: + query_file_uri (str): + Required. The HCFS URI of the script that + contains Spark SQL queries to execute. + query_variables (MutableMapping[str, str]): + Optional. Mapping of query variable names to values + (equivalent to the Spark SQL command: + ``SET name="value";``). + jar_file_uris (MutableSequence[str]): + Optional. HCFS URIs of jar files to be added + to the Spark CLASSPATH. + """ + + query_file_uri: str = proto.Field( + proto.STRING, + number=1, + ) + query_variables: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + jar_file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py new file mode 100644 index 000000000000..18008b007b32 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/clusters.py @@ -0,0 +1,2473 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import interval_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.dataproc_v1.types import shared + +__protobuf__ = proto.module( + package="google.cloud.dataproc.v1", + manifest={ + "Cluster", + "ClusterConfig", + "VirtualClusterConfig", + "AuxiliaryServicesConfig", + "EndpointConfig", + "AutoscalingConfig", + "EncryptionConfig", + "GceClusterConfig", + "NodeGroupAffinity", + "ShieldedInstanceConfig", + "ConfidentialInstanceConfig", + "InstanceGroupConfig", + "InstanceReference", + "ManagedGroupConfig", + "InstanceFlexibilityPolicy", + "AcceleratorConfig", + "DiskConfig", + "AuxiliaryNodeGroup", + "NodeGroup", + "NodeInitializationAction", + "ClusterStatus", + "SecurityConfig", + "KerberosConfig", + "IdentityConfig", + "SoftwareConfig", + "LifecycleConfig", + "MetastoreConfig", + "ClusterMetrics", + "DataprocMetricConfig", + "CreateClusterRequest", + "UpdateClusterRequest", + "StopClusterRequest", + "StartClusterRequest", + "DeleteClusterRequest", + "GetClusterRequest", + "ListClustersRequest", + "ListClustersResponse", + "DiagnoseClusterRequest", + "DiagnoseClusterResults", + "ReservationAffinity", + }, +) + + +class Cluster(proto.Message): + r"""Describes the identifying information, config, and status of + a Dataproc cluster + + Attributes: + project_id (str): + Required. The Google Cloud Platform project + ID that the cluster belongs to. + cluster_name (str): + Required. The cluster name, which must be + unique within a project. The name must start + with a lowercase letter, and can contain up to + 51 lowercase letters, numbers, and hyphens. It + cannot end with a hyphen. The name of a deleted + cluster can be reused. + config (google.cloud.dataproc_v1.types.ClusterConfig): + Optional. The cluster config for a cluster of + Compute Engine Instances. Note that Dataproc may + set default values, and values may change when + clusters are updated. + + Exactly one of ClusterConfig or + VirtualClusterConfig must be specified. + virtual_cluster_config (google.cloud.dataproc_v1.types.VirtualClusterConfig): + Optional. The virtual cluster config is used when creating a + Dataproc cluster that does not directly control the + underlying compute resources, for example, when creating a + `Dataproc-on-GKE + cluster `__. + Dataproc may set default values, and values may change when + clusters are updated. Exactly one of + [config][google.cloud.dataproc.v1.Cluster.config] or + [virtual_cluster_config][google.cloud.dataproc.v1.Cluster.virtual_cluster_config] + must be specified. + labels (MutableMapping[str, str]): + Optional. The labels to associate with this cluster. Label + **keys** must contain 1 to 63 characters, and must conform + to `RFC 1035 `__. + Label **values** may be empty, but, if present, must contain + 1 to 63 characters, and must conform to `RFC + 1035 `__. No more than + 32 labels can be associated with a cluster. + status (google.cloud.dataproc_v1.types.ClusterStatus): + Output only. Cluster status. + status_history (MutableSequence[google.cloud.dataproc_v1.types.ClusterStatus]): + Output only. The previous cluster status. + cluster_uuid (str): + Output only. A cluster UUID (Unique Universal + Identifier). Dataproc generates this value when + it creates the cluster. + metrics (google.cloud.dataproc_v1.types.ClusterMetrics): + Output only. Contains cluster daemon metrics such as HDFS + and YARN stats. + + **Beta Feature**: This report is available for testing + purposes only. It may be changed before final release. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=2, + ) + config: "ClusterConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="ClusterConfig", + ) + virtual_cluster_config: "VirtualClusterConfig" = proto.Field( + proto.MESSAGE, + number=10, + message="VirtualClusterConfig", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + status: "ClusterStatus" = proto.Field( + proto.MESSAGE, + number=4, + message="ClusterStatus", + ) + status_history: MutableSequence["ClusterStatus"] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="ClusterStatus", + ) + cluster_uuid: str = proto.Field( + proto.STRING, + number=6, + ) + metrics: "ClusterMetrics" = proto.Field( + proto.MESSAGE, + number=9, + message="ClusterMetrics", + ) + + +class ClusterConfig(proto.Message): + r"""The cluster config. + + Attributes: + config_bucket (str): + Optional. A Cloud Storage bucket used to stage job + dependencies, config files, and job driver console output. + If you do not specify a staging bucket, Cloud Dataproc will + determine a Cloud Storage location (US, ASIA, or EU) for + your cluster's staging bucket according to the Compute + Engine zone where your cluster is deployed, and then create + and manage this project-level, per-location bucket (see + `Dataproc staging and temp + buckets `__). + **This field requires a Cloud Storage bucket name, not a + ``gs://...`` URI to a Cloud Storage bucket.** + temp_bucket (str): + Optional. A Cloud Storage bucket used to store ephemeral + cluster and jobs data, such as Spark and MapReduce history + files. If you do not specify a temp bucket, Dataproc will + determine a Cloud Storage location (US, ASIA, or EU) for + your cluster's temp bucket according to the Compute Engine + zone where your cluster is deployed, and then create and + manage this project-level, per-location bucket. The default + bucket has a TTL of 90 days, but you can use any TTL (or + none) if you specify a bucket (see `Dataproc staging and + temp + buckets `__). + **This field requires a Cloud Storage bucket name, not a + ``gs://...`` URI to a Cloud Storage bucket.** + gce_cluster_config (google.cloud.dataproc_v1.types.GceClusterConfig): + Optional. The shared Compute Engine config + settings for all instances in a cluster. + master_config (google.cloud.dataproc_v1.types.InstanceGroupConfig): + Optional. The Compute Engine config settings + for the cluster's master instance. + worker_config (google.cloud.dataproc_v1.types.InstanceGroupConfig): + Optional. The Compute Engine config settings + for the cluster's worker instances. + secondary_worker_config (google.cloud.dataproc_v1.types.InstanceGroupConfig): + Optional. The Compute Engine config settings + for a cluster's secondary worker instances + software_config (google.cloud.dataproc_v1.types.SoftwareConfig): + Optional. The config settings for cluster + software. + initialization_actions (MutableSequence[google.cloud.dataproc_v1.types.NodeInitializationAction]): + Optional. Commands to execute on each node after config is + completed. By default, executables are run on master and all + worker nodes. You can test a node's ``role`` metadata to run + an executable on a master or worker node, as shown below + using ``curl`` (you can also use ``wget``): + + :: + + ROLE=$(curl -H Metadata-Flavor:Google + http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) + if [[ "${ROLE}" == 'Master' ]]; then + ... master specific actions ... + else + ... worker specific actions ... + fi + encryption_config (google.cloud.dataproc_v1.types.EncryptionConfig): + Optional. Encryption settings for the + cluster. + autoscaling_config (google.cloud.dataproc_v1.types.AutoscalingConfig): + Optional. Autoscaling config for the policy + associated with the cluster. Cluster does not + autoscale if this field is unset. + security_config (google.cloud.dataproc_v1.types.SecurityConfig): + Optional. Security settings for the cluster. + lifecycle_config (google.cloud.dataproc_v1.types.LifecycleConfig): + Optional. Lifecycle setting for the cluster. + endpoint_config (google.cloud.dataproc_v1.types.EndpointConfig): + Optional. Port/endpoint configuration for + this cluster + metastore_config (google.cloud.dataproc_v1.types.MetastoreConfig): + Optional. Metastore configuration. + dataproc_metric_config (google.cloud.dataproc_v1.types.DataprocMetricConfig): + Optional. The config for Dataproc metrics. + auxiliary_node_groups (MutableSequence[google.cloud.dataproc_v1.types.AuxiliaryNodeGroup]): + Optional. The node group settings. + """ + + config_bucket: str = proto.Field( + proto.STRING, + number=1, + ) + temp_bucket: str = proto.Field( + proto.STRING, + number=2, + ) + gce_cluster_config: "GceClusterConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="GceClusterConfig", + ) + master_config: "InstanceGroupConfig" = proto.Field( + proto.MESSAGE, + number=9, + message="InstanceGroupConfig", + ) + worker_config: "InstanceGroupConfig" = proto.Field( + proto.MESSAGE, + number=10, + message="InstanceGroupConfig", + ) + secondary_worker_config: "InstanceGroupConfig" = proto.Field( + proto.MESSAGE, + number=12, + message="InstanceGroupConfig", + ) + software_config: "SoftwareConfig" = proto.Field( + proto.MESSAGE, + number=13, + message="SoftwareConfig", + ) + initialization_actions: MutableSequence[ + "NodeInitializationAction" + ] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="NodeInitializationAction", + ) + encryption_config: "EncryptionConfig" = proto.Field( + proto.MESSAGE, + number=15, + message="EncryptionConfig", + ) + autoscaling_config: "AutoscalingConfig" = proto.Field( + proto.MESSAGE, + number=18, + message="AutoscalingConfig", + ) + security_config: "SecurityConfig" = proto.Field( + proto.MESSAGE, + number=16, + message="SecurityConfig", + ) + lifecycle_config: "LifecycleConfig" = proto.Field( + proto.MESSAGE, + number=17, + message="LifecycleConfig", + ) + endpoint_config: "EndpointConfig" = proto.Field( + proto.MESSAGE, + number=19, + message="EndpointConfig", + ) + metastore_config: "MetastoreConfig" = proto.Field( + proto.MESSAGE, + number=20, + message="MetastoreConfig", + ) + dataproc_metric_config: "DataprocMetricConfig" = proto.Field( + proto.MESSAGE, + number=23, + message="DataprocMetricConfig", + ) + auxiliary_node_groups: MutableSequence["AuxiliaryNodeGroup"] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message="AuxiliaryNodeGroup", + ) + + +class VirtualClusterConfig(proto.Message): + r"""The Dataproc cluster config for a cluster that does not directly + control the underlying compute resources, such as a `Dataproc-on-GKE + cluster `__. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + staging_bucket (str): + Optional. A Cloud Storage bucket used to stage job + dependencies, config files, and job driver console output. + If you do not specify a staging bucket, Cloud Dataproc will + determine a Cloud Storage location (US, ASIA, or EU) for + your cluster's staging bucket according to the Compute + Engine zone where your cluster is deployed, and then create + and manage this project-level, per-location bucket (see + `Dataproc staging and temp + buckets `__). + **This field requires a Cloud Storage bucket name, not a + ``gs://...`` URI to a Cloud Storage bucket.** + kubernetes_cluster_config (google.cloud.dataproc_v1.types.KubernetesClusterConfig): + Required. The configuration for running the + Dataproc cluster on Kubernetes. + + This field is a member of `oneof`_ ``infrastructure_config``. + auxiliary_services_config (google.cloud.dataproc_v1.types.AuxiliaryServicesConfig): + Optional. Configuration of auxiliary services + used by this cluster. + """ + + staging_bucket: str = proto.Field( + proto.STRING, + number=1, + ) + kubernetes_cluster_config: shared.KubernetesClusterConfig = proto.Field( + proto.MESSAGE, + number=6, + oneof="infrastructure_config", + message=shared.KubernetesClusterConfig, + ) + auxiliary_services_config: "AuxiliaryServicesConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="AuxiliaryServicesConfig", + ) + + +class AuxiliaryServicesConfig(proto.Message): + r"""Auxiliary services configuration for a Cluster. + + Attributes: + metastore_config (google.cloud.dataproc_v1.types.MetastoreConfig): + Optional. The Hive Metastore configuration + for this workload. + spark_history_server_config (google.cloud.dataproc_v1.types.SparkHistoryServerConfig): + Optional. The Spark History Server + configuration for the workload. + """ + + metastore_config: "MetastoreConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="MetastoreConfig", + ) + spark_history_server_config: shared.SparkHistoryServerConfig = proto.Field( + proto.MESSAGE, + number=2, + message=shared.SparkHistoryServerConfig, + ) + + +class EndpointConfig(proto.Message): + r"""Endpoint config for this cluster + + Attributes: + http_ports (MutableMapping[str, str]): + Output only. The map of port descriptions to URLs. Will only + be populated if enable_http_port_access is true. + enable_http_port_access (bool): + Optional. If true, enable http access to + specific ports on the cluster from external + sources. Defaults to false. + """ + + http_ports: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + enable_http_port_access: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class AutoscalingConfig(proto.Message): + r"""Autoscaling Policy config associated with the cluster. + + Attributes: + policy_uri (str): + Optional. The autoscaling policy used by the cluster. + + Only resource names including projectid and location + (region) are valid. Examples: + + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]`` + - ``projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]`` + + Note that the policy must be in the same project and + Dataproc region. + """ + + policy_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + +class EncryptionConfig(proto.Message): + r"""Encryption settings for the cluster. + + Attributes: + gce_pd_kms_key_name (str): + Optional. The Cloud KMS key name to use for + PD disk encryption for all instances in the + cluster. + """ + + gce_pd_kms_key_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GceClusterConfig(proto.Message): + r"""Common config settings for resources of Compute Engine + cluster instances, applicable to all instances in the cluster. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + zone_uri (str): + Optional. The Compute Engine zone where the Dataproc cluster + will be located. If omitted, the service will pick a zone in + the cluster's Compute Engine region. On a get request, zone + will always be present. + + A full URL, partial URI, or short name are valid. Examples: + + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]`` + - ``projects/[project_id]/zones/[zone]`` + - ``[zone]`` + network_uri (str): + Optional. The Compute Engine network to be used for machine + communications. Cannot be specified with subnetwork_uri. If + neither ``network_uri`` nor ``subnetwork_uri`` is specified, + the "default" network of the project is used, if it exists. + Cannot be a "Custom Subnet Network" (see `Using + Subnetworks `__ + for more information). + + A full URL, partial URI, or short name are valid. Examples: + + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/global/networks/default`` + - ``projects/[project_id]/global/networks/default`` + - ``default`` + subnetwork_uri (str): + Optional. The Compute Engine subnetwork to be used for + machine communications. Cannot be specified with + network_uri. + + A full URL, partial URI, or short name are valid. Examples: + + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/regions/[region]/subnetworks/sub0`` + - ``projects/[project_id]/regions/[region]/subnetworks/sub0`` + - ``sub0`` + internal_ip_only (bool): + Optional. If true, all instances in the cluster will only + have internal IP addresses. By default, clusters are not + restricted to internal IP addresses, and will have ephemeral + external IP addresses assigned to each instance. This + ``internal_ip_only`` restriction can only be enabled for + subnetwork enabled networks, and all off-cluster + dependencies must be configured to be accessible without + external IP addresses. + + This field is a member of `oneof`_ ``_internal_ip_only``. + private_ipv6_google_access (google.cloud.dataproc_v1.types.GceClusterConfig.PrivateIpv6GoogleAccess): + Optional. The type of IPv6 access for a + cluster. + service_account (str): + Optional. The `Dataproc service + account `__ + (also see `VM Data Plane + identity `__) + used by Dataproc cluster VM instances to access Google Cloud + Platform services. + + If not specified, the `Compute Engine default service + account `__ + is used. + service_account_scopes (MutableSequence[str]): + Optional. The URIs of service account scopes to be included + in Compute Engine instances. The following base set of + scopes is always included: + + - https://www.googleapis.com/auth/cloud.useraccounts.readonly + - https://www.googleapis.com/auth/devstorage.read_write + - https://www.googleapis.com/auth/logging.write + + If no scopes are specified, the following defaults are also + provided: + + - https://www.googleapis.com/auth/bigquery + - https://www.googleapis.com/auth/bigtable.admin.table + - https://www.googleapis.com/auth/bigtable.data + - https://www.googleapis.com/auth/devstorage.full_control + tags (MutableSequence[str]): + The Compute Engine tags to add to all instances (see + `Tagging + instances `__). + metadata (MutableMapping[str, str]): + Optional. The Compute Engine metadata entries to add to all + instances (see `Project and instance + metadata `__). + reservation_affinity (google.cloud.dataproc_v1.types.ReservationAffinity): + Optional. Reservation Affinity for consuming + Zonal reservation. + node_group_affinity (google.cloud.dataproc_v1.types.NodeGroupAffinity): + Optional. Node Group Affinity for sole-tenant + clusters. + shielded_instance_config (google.cloud.dataproc_v1.types.ShieldedInstanceConfig): + Optional. Shielded Instance Config for clusters using + `Compute Engine Shielded + VMs `__. + confidential_instance_config (google.cloud.dataproc_v1.types.ConfidentialInstanceConfig): + Optional. Confidential Instance Config for clusters using + `Confidential + VMs `__. + """ + + class PrivateIpv6GoogleAccess(proto.Enum): + r"""``PrivateIpv6GoogleAccess`` controls whether and how Dataproc + cluster nodes can communicate with Google Services through gRPC over + IPv6. These values are directly mapped to corresponding values in + the `Compute Engine Instance + fields `__. + + Values: + PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED (0): + If unspecified, Compute Engine default behavior will apply, + which is the same as + [INHERIT_FROM_SUBNETWORK][google.cloud.dataproc.v1.GceClusterConfig.PrivateIpv6GoogleAccess.INHERIT_FROM_SUBNETWORK]. + INHERIT_FROM_SUBNETWORK (1): + Private access to and from Google Services + configuration inherited from the subnetwork + configuration. This is the default Compute + Engine behavior. + OUTBOUND (2): + Enables outbound private IPv6 access to + Google Services from the Dataproc cluster. + BIDIRECTIONAL (3): + Enables bidirectional private IPv6 access + between Google Services and the Dataproc + cluster. + """ + PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED = 0 + INHERIT_FROM_SUBNETWORK = 1 + OUTBOUND = 2 + BIDIRECTIONAL = 3 + + zone_uri: str = proto.Field( + proto.STRING, + number=1, + ) + network_uri: str = proto.Field( + proto.STRING, + number=2, + ) + subnetwork_uri: str = proto.Field( + proto.STRING, + number=6, + ) + internal_ip_only: bool = proto.Field( + proto.BOOL, + number=7, + optional=True, + ) + private_ipv6_google_access: PrivateIpv6GoogleAccess = proto.Field( + proto.ENUM, + number=12, + enum=PrivateIpv6GoogleAccess, + ) + service_account: str = proto.Field( + proto.STRING, + number=8, + ) + service_account_scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + metadata: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + reservation_affinity: "ReservationAffinity" = proto.Field( + proto.MESSAGE, + number=11, + message="ReservationAffinity", + ) + node_group_affinity: "NodeGroupAffinity" = proto.Field( + proto.MESSAGE, + number=13, + message="NodeGroupAffinity", + ) + shielded_instance_config: "ShieldedInstanceConfig" = proto.Field( + proto.MESSAGE, + number=14, + message="ShieldedInstanceConfig", + ) + confidential_instance_config: "ConfidentialInstanceConfig" = proto.Field( + proto.MESSAGE, + number=15, + message="ConfidentialInstanceConfig", + ) + + +class NodeGroupAffinity(proto.Message): + r"""Node Group Affinity for clusters using sole-tenant node groups. + **The Dataproc ``NodeGroupAffinity`` resource is not related to the + Dataproc [NodeGroup][google.cloud.dataproc.v1.NodeGroup] resource.** + + Attributes: + node_group_uri (str): + Required. The URI of a sole-tenant `node group + resource `__ + that the cluster will be created on. + + A full URL, partial URI, or node group name are valid. + Examples: + + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]/nodeGroups/node-group-1`` + - ``projects/[project_id]/zones/[zone]/nodeGroups/node-group-1`` + - ``node-group-1`` + """ + + node_group_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ShieldedInstanceConfig(proto.Message): + r"""Shielded Instance Config for clusters using `Compute Engine Shielded + VMs `__. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enable_secure_boot (bool): + Optional. Defines whether instances have + Secure Boot enabled. + + This field is a member of `oneof`_ ``_enable_secure_boot``. + enable_vtpm (bool): + Optional. Defines whether instances have the + vTPM enabled. + + This field is a member of `oneof`_ ``_enable_vtpm``. + enable_integrity_monitoring (bool): + Optional. Defines whether instances have + integrity monitoring enabled. + + This field is a member of `oneof`_ ``_enable_integrity_monitoring``. + """ + + enable_secure_boot: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + enable_vtpm: bool = proto.Field( + proto.BOOL, + number=2, + optional=True, + ) + enable_integrity_monitoring: bool = proto.Field( + proto.BOOL, + number=3, + optional=True, + ) + + +class ConfidentialInstanceConfig(proto.Message): + r"""Confidential Instance Config for clusters using `Confidential + VMs `__ + + Attributes: + enable_confidential_compute (bool): + Optional. Defines whether the instance should + have confidential compute enabled. + """ + + enable_confidential_compute: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class InstanceGroupConfig(proto.Message): + r"""The config settings for Compute Engine resources in + an instance group, such as a master or worker group. + + Attributes: + num_instances (int): + Optional. The number of VM instances in the instance group. + For `HA + cluster `__ + `master_config <#FIELDS.master_config>`__ groups, **must be + set to 3**. For standard cluster + `master_config <#FIELDS.master_config>`__ groups, **must be + set to 1**. + instance_names (MutableSequence[str]): + Output only. The list of instance names. Dataproc derives + the names from ``cluster_name``, ``num_instances``, and the + instance group. + instance_references (MutableSequence[google.cloud.dataproc_v1.types.InstanceReference]): + Output only. List of references to Compute + Engine instances. + image_uri (str): + Optional. The Compute Engine image resource used for cluster + instances. + + The URI can represent an image or image family. + + Image examples: + + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/global/images/[image-id]`` + - ``projects/[project_id]/global/images/[image-id]`` + - ``image-id`` + + Image family examples. Dataproc will use the most recent + image from the family: + + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/global/images/family/[custom-image-family-name]`` + - ``projects/[project_id]/global/images/family/[custom-image-family-name]`` + + If the URI is unspecified, it will be inferred from + ``SoftwareConfig.image_version`` or the system default. + machine_type_uri (str): + Optional. The Compute Engine machine type used for cluster + instances. + + A full URL, partial URI, or short name are valid. Examples: + + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]/machineTypes/n1-standard-2`` + - ``projects/[project_id]/zones/[zone]/machineTypes/n1-standard-2`` + - ``n1-standard-2`` + + **Auto Zone Exception**: If you are using the Dataproc `Auto + Zone + Placement `__ + feature, you must use the short name of the machine type + resource, for example, ``n1-standard-2``. + disk_config (google.cloud.dataproc_v1.types.DiskConfig): + Optional. Disk option config settings. + is_preemptible (bool): + Output only. Specifies that this instance + group contains preemptible instances. + preemptibility (google.cloud.dataproc_v1.types.InstanceGroupConfig.Preemptibility): + Optional. Specifies the preemptibility of the instance + group. + + The default value for master and worker groups is + ``NON_PREEMPTIBLE``. This default cannot be changed. + + The default value for secondary instances is + ``PREEMPTIBLE``. + managed_group_config (google.cloud.dataproc_v1.types.ManagedGroupConfig): + Output only. The config for Compute Engine + Instance Group Manager that manages this group. + This is only used for preemptible instance + groups. + accelerators (MutableSequence[google.cloud.dataproc_v1.types.AcceleratorConfig]): + Optional. The Compute Engine accelerator + configuration for these instances. + min_cpu_platform (str): + Optional. Specifies the minimum cpu platform for the + Instance Group. See `Dataproc -> Minimum CPU + Platform `__. + min_num_instances (int): + Optional. The minimum number of primary worker instances to + create. If ``min_num_instances`` is set, cluster creation + will succeed if the number of primary workers created is at + least equal to the ``min_num_instances`` number. + + Example: Cluster creation request with ``num_instances`` = + ``5`` and ``min_num_instances`` = ``3``: + + - If 4 VMs are created and 1 instance fails, the failed VM + is deleted. The cluster is resized to 4 instances and + placed in a ``RUNNING`` state. + - If 2 instances are created and 3 instances fail, the + cluster in placed in an ``ERROR`` state. The failed VMs + are not deleted. + instance_flexibility_policy (google.cloud.dataproc_v1.types.InstanceFlexibilityPolicy): + Optional. Instance flexibility Policy + allowing a mixture of VM shapes and provisioning + models. + """ + + class Preemptibility(proto.Enum): + r"""Controls the use of preemptible instances within the group. + + Values: + PREEMPTIBILITY_UNSPECIFIED (0): + Preemptibility is unspecified, the system + will choose the appropriate setting for each + instance group. + NON_PREEMPTIBLE (1): + Instances are non-preemptible. + + This option is allowed for all instance groups + and is the only valid value for Master and + Worker instance groups. + PREEMPTIBLE (2): + Instances are [preemptible] + (https://cloud.google.com/compute/docs/instances/preemptible). + + This option is allowed only for [secondary worker] + (https://cloud.google.com/dataproc/docs/concepts/compute/secondary-vms) + groups. + SPOT (3): + Instances are [Spot VMs] + (https://cloud.google.com/compute/docs/instances/spot). + + This option is allowed only for [secondary worker] + (https://cloud.google.com/dataproc/docs/concepts/compute/secondary-vms) + groups. Spot VMs are the latest version of [preemptible VMs] + (https://cloud.google.com/compute/docs/instances/preemptible), + and provide additional features. + """ + PREEMPTIBILITY_UNSPECIFIED = 0 + NON_PREEMPTIBLE = 1 + PREEMPTIBLE = 2 + SPOT = 3 + + num_instances: int = proto.Field( + proto.INT32, + number=1, + ) + instance_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + instance_references: MutableSequence["InstanceReference"] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="InstanceReference", + ) + image_uri: str = proto.Field( + proto.STRING, + number=3, + ) + machine_type_uri: str = proto.Field( + proto.STRING, + number=4, + ) + disk_config: "DiskConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="DiskConfig", + ) + is_preemptible: bool = proto.Field( + proto.BOOL, + number=6, + ) + preemptibility: Preemptibility = proto.Field( + proto.ENUM, + number=10, + enum=Preemptibility, + ) + managed_group_config: "ManagedGroupConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="ManagedGroupConfig", + ) + accelerators: MutableSequence["AcceleratorConfig"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="AcceleratorConfig", + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=9, + ) + min_num_instances: int = proto.Field( + proto.INT32, + number=12, + ) + instance_flexibility_policy: "InstanceFlexibilityPolicy" = proto.Field( + proto.MESSAGE, + number=13, + message="InstanceFlexibilityPolicy", + ) + + +class InstanceReference(proto.Message): + r"""A reference to a Compute Engine instance. + + Attributes: + instance_name (str): + The user-friendly name of the Compute Engine + instance. + instance_id (str): + The unique identifier of the Compute Engine + instance. + public_key (str): + The public RSA key used for sharing data with + this instance. + public_ecies_key (str): + The public ECIES key used for sharing data + with this instance. + """ + + instance_name: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + public_key: str = proto.Field( + proto.STRING, + number=3, + ) + public_ecies_key: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ManagedGroupConfig(proto.Message): + r"""Specifies the resources used to actively manage an instance + group. + + Attributes: + instance_template_name (str): + Output only. The name of the Instance + Template used for the Managed Instance Group. + instance_group_manager_name (str): + Output only. The name of the Instance Group + Manager for this group. + instance_group_manager_uri (str): + Output only. The partial URI to the instance + group manager for this group. E.g. + projects/my-project/regions/us-central1/instanceGroupManagers/my-igm. + """ + + instance_template_name: str = proto.Field( + proto.STRING, + number=1, + ) + instance_group_manager_name: str = proto.Field( + proto.STRING, + number=2, + ) + instance_group_manager_uri: str = proto.Field( + proto.STRING, + number=3, + ) + + +class InstanceFlexibilityPolicy(proto.Message): + r"""Instance flexibility Policy allowing a mixture of VM shapes + and provisioning models. + + Attributes: + instance_selection_list (MutableSequence[google.cloud.dataproc_v1.types.InstanceFlexibilityPolicy.InstanceSelection]): + Optional. List of instance selection options + that the group will use when creating new VMs. + instance_selection_results (MutableSequence[google.cloud.dataproc_v1.types.InstanceFlexibilityPolicy.InstanceSelectionResult]): + Output only. A list of instance selection + results in the group. + """ + + class InstanceSelection(proto.Message): + r"""Defines machines types and a rank to which the machines types + belong. + + Attributes: + machine_types (MutableSequence[str]): + Optional. Full machine-type names, e.g. + "n1-standard-16". + rank (int): + Optional. Preference of this instance + selection. Lower number means higher preference. + Dataproc will first try to create a VM based on + the machine-type with priority rank and fallback + to next rank based on availability. Machine + types and instance selections with the same + priority have the same preference. + """ + + machine_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + rank: int = proto.Field( + proto.INT32, + number=2, + ) + + class InstanceSelectionResult(proto.Message): + r"""Defines a mapping from machine types to the number of VMs + that are created with each machine type. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + machine_type (str): + Output only. Full machine-type names, e.g. + "n1-standard-16". + + This field is a member of `oneof`_ ``_machine_type``. + vm_count (int): + Output only. Number of VM provisioned with the machine_type. + + This field is a member of `oneof`_ ``_vm_count``. + """ + + machine_type: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + vm_count: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + + instance_selection_list: MutableSequence[InstanceSelection] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=InstanceSelection, + ) + instance_selection_results: MutableSequence[ + InstanceSelectionResult + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=InstanceSelectionResult, + ) + + +class AcceleratorConfig(proto.Message): + r"""Specifies the type and number of accelerator cards attached to the + instances of an instance. See `GPUs on Compute + Engine `__. + + Attributes: + accelerator_type_uri (str): + Full URL, partial URI, or short name of the accelerator type + resource to expose to this instance. See `Compute Engine + AcceleratorTypes `__. + + Examples: + + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-k80`` + - ``projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-k80`` + - ``nvidia-tesla-k80`` + + **Auto Zone Exception**: If you are using the Dataproc `Auto + Zone + Placement `__ + feature, you must use the short name of the accelerator type + resource, for example, ``nvidia-tesla-k80``. + accelerator_count (int): + The number of the accelerator cards of this + type exposed to this instance. + """ + + accelerator_type_uri: str = proto.Field( + proto.STRING, + number=1, + ) + accelerator_count: int = proto.Field( + proto.INT32, + number=2, + ) + + +class DiskConfig(proto.Message): + r"""Specifies the config of disk options for a group of VM + instances. + + Attributes: + boot_disk_type (str): + Optional. Type of the boot disk (default is "pd-standard"). + Valid values: "pd-balanced" (Persistent Disk Balanced Solid + State Drive), "pd-ssd" (Persistent Disk Solid State Drive), + or "pd-standard" (Persistent Disk Hard Disk Drive). See + `Disk + types `__. + boot_disk_size_gb (int): + Optional. Size in GB of the boot disk + (default is 500GB). + num_local_ssds (int): + Optional. Number of attached SSDs, from 0 to 8 (default is + 0). If SSDs are not attached, the boot disk is used to store + runtime logs and + `HDFS `__ + data. If one or more SSDs are attached, this runtime bulk + data is spread across them, and the boot disk contains only + basic config and installed binaries. + + Note: Local SSD options may vary by machine type and number + of vCPUs selected. + local_ssd_interface (str): + Optional. Interface type of local SSDs (default is "scsi"). + Valid values: "scsi" (Small Computer System Interface), + "nvme" (Non-Volatile Memory Express). See `local SSD + performance `__. + """ + + boot_disk_type: str = proto.Field( + proto.STRING, + number=3, + ) + boot_disk_size_gb: int = proto.Field( + proto.INT32, + number=1, + ) + num_local_ssds: int = proto.Field( + proto.INT32, + number=2, + ) + local_ssd_interface: str = proto.Field( + proto.STRING, + number=4, + ) + + +class AuxiliaryNodeGroup(proto.Message): + r"""Node group identification and configuration information. + + Attributes: + node_group (google.cloud.dataproc_v1.types.NodeGroup): + Required. Node group configuration. + node_group_id (str): + Optional. A node group ID. Generated if not specified. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). Cannot begin or end with + underscore or hyphen. Must consist of from 3 to 33 + characters. + """ + + node_group: "NodeGroup" = proto.Field( + proto.MESSAGE, + number=1, + message="NodeGroup", + ) + node_group_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class NodeGroup(proto.Message): + r"""Dataproc Node Group. **The Dataproc ``NodeGroup`` resource is not + related to the Dataproc + [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] + resource.** + + Attributes: + name (str): + The Node group `resource name `__. + roles (MutableSequence[google.cloud.dataproc_v1.types.NodeGroup.Role]): + Required. Node group roles. + node_group_config (google.cloud.dataproc_v1.types.InstanceGroupConfig): + Optional. The node group instance group + configuration. + labels (MutableMapping[str, str]): + Optional. Node group labels. + + - Label **keys** must consist of from 1 to 63 characters + and conform to `RFC + 1035 `__. + - Label **values** can be empty. If specified, they must + consist of from 1 to 63 characters and conform to [RFC + 1035] (https://www.ietf.org/rfc/rfc1035.txt). + - The node group must have no more than 32 labels. + """ + + class Role(proto.Enum): + r"""Node pool roles. + + Values: + ROLE_UNSPECIFIED (0): + Required unspecified role. + DRIVER (1): + Job drivers run on the node pool. + """ + ROLE_UNSPECIFIED = 0 + DRIVER = 1 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + roles: MutableSequence[Role] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=Role, + ) + node_group_config: "InstanceGroupConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="InstanceGroupConfig", + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + +class NodeInitializationAction(proto.Message): + r"""Specifies an executable to run on a fully configured node and + a timeout period for executable completion. + + Attributes: + executable_file (str): + Required. Cloud Storage URI of executable + file. + execution_timeout (google.protobuf.duration_pb2.Duration): + Optional. Amount of time executable has to complete. Default + is 10 minutes (see JSON representation of + `Duration `__). + + Cluster creation fails with an explanatory error message + (the name of the executable that caused the error and the + exceeded timeout period) if the executable is not completed + at end of the timeout period. + """ + + executable_file: str = proto.Field( + proto.STRING, + number=1, + ) + execution_timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class ClusterStatus(proto.Message): + r"""The status of a cluster and its instances. + + Attributes: + state (google.cloud.dataproc_v1.types.ClusterStatus.State): + Output only. The cluster's state. + detail (str): + Optional. Output only. Details of cluster's + state. + state_start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when this state was entered (see JSON + representation of + `Timestamp `__). + substate (google.cloud.dataproc_v1.types.ClusterStatus.Substate): + Output only. Additional state information + that includes status reported by the agent. + """ + + class State(proto.Enum): + r"""The cluster state. + + Values: + UNKNOWN (0): + The cluster state is unknown. + CREATING (1): + The cluster is being created and set up. It + is not ready for use. + RUNNING (2): + The cluster is currently running and healthy. It is ready + for use. + + **Note:** The cluster state changes from "creating" to + "running" status after the master node(s), first two primary + worker nodes (and the last primary worker node if primary + workers > 2) are running. + ERROR (3): + The cluster encountered an error. It is not + ready for use. + ERROR_DUE_TO_UPDATE (9): + The cluster has encountered an error while + being updated. Jobs can be submitted to the + cluster, but the cluster cannot be updated. + DELETING (4): + The cluster is being deleted. It cannot be + used. + UPDATING (5): + The cluster is being updated. It continues to + accept and process jobs. + STOPPING (6): + The cluster is being stopped. It cannot be + used. + STOPPED (7): + The cluster is currently stopped. It is not + ready for use. + STARTING (8): + The cluster is being started. It is not ready + for use. + REPAIRING (10): + The cluster is being repaired. It is not + ready for use. + """ + UNKNOWN = 0 + CREATING = 1 + RUNNING = 2 + ERROR = 3 + ERROR_DUE_TO_UPDATE = 9 + DELETING = 4 + UPDATING = 5 + STOPPING = 6 + STOPPED = 7 + STARTING = 8 + REPAIRING = 10 + + class Substate(proto.Enum): + r"""The cluster substate. + + Values: + UNSPECIFIED (0): + The cluster substate is unknown. + UNHEALTHY (1): + The cluster is known to be in an unhealthy + state (for example, critical daemons are not + running or HDFS capacity is exhausted). + + Applies to RUNNING state. + STALE_STATUS (2): + The agent-reported status is out of date (may + occur if Dataproc loses communication with + Agent). + + Applies to RUNNING state. + """ + UNSPECIFIED = 0 + UNHEALTHY = 1 + STALE_STATUS = 2 + + state: State = proto.Field( + proto.ENUM, + number=1, + enum=State, + ) + detail: str = proto.Field( + proto.STRING, + number=2, + ) + state_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + substate: Substate = proto.Field( + proto.ENUM, + number=4, + enum=Substate, + ) + + +class SecurityConfig(proto.Message): + r"""Security related configuration, including encryption, + Kerberos, etc. + + Attributes: + kerberos_config (google.cloud.dataproc_v1.types.KerberosConfig): + Optional. Kerberos related configuration. + identity_config (google.cloud.dataproc_v1.types.IdentityConfig): + Optional. Identity related configuration, + including service account based secure + multi-tenancy user mappings. + """ + + kerberos_config: "KerberosConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="KerberosConfig", + ) + identity_config: "IdentityConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="IdentityConfig", + ) + + +class KerberosConfig(proto.Message): + r"""Specifies Kerberos related configuration. + + Attributes: + enable_kerberos (bool): + Optional. Flag to indicate whether to + Kerberize the cluster (default: false). Set this + field to true to enable Kerberos on a cluster. + root_principal_password_uri (str): + Optional. The Cloud Storage URI of a KMS + encrypted file containing the root principal + password. + kms_key_uri (str): + Optional. The uri of the KMS key used to + encrypt various sensitive files. + keystore_uri (str): + Optional. The Cloud Storage URI of the + keystore file used for SSL encryption. If not + provided, Dataproc will provide a self-signed + certificate. + truststore_uri (str): + Optional. The Cloud Storage URI of the + truststore file used for SSL encryption. If not + provided, Dataproc will provide a self-signed + certificate. + keystore_password_uri (str): + Optional. The Cloud Storage URI of a KMS + encrypted file containing the password to the + user provided keystore. For the self-signed + certificate, this password is generated by + Dataproc. + key_password_uri (str): + Optional. The Cloud Storage URI of a KMS + encrypted file containing the password to the + user provided key. For the self-signed + certificate, this password is generated by + Dataproc. + truststore_password_uri (str): + Optional. The Cloud Storage URI of a KMS + encrypted file containing the password to the + user provided truststore. For the self-signed + certificate, this password is generated by + Dataproc. + cross_realm_trust_realm (str): + Optional. The remote realm the Dataproc + on-cluster KDC will trust, should the user + enable cross realm trust. + cross_realm_trust_kdc (str): + Optional. The KDC (IP or hostname) for the + remote trusted realm in a cross realm trust + relationship. + cross_realm_trust_admin_server (str): + Optional. The admin server (IP or hostname) + for the remote trusted realm in a cross realm + trust relationship. + cross_realm_trust_shared_password_uri (str): + Optional. The Cloud Storage URI of a KMS + encrypted file containing the shared password + between the on-cluster Kerberos realm and the + remote trusted realm, in a cross realm trust + relationship. + kdc_db_key_uri (str): + Optional. The Cloud Storage URI of a KMS + encrypted file containing the master key of the + KDC database. + tgt_lifetime_hours (int): + Optional. The lifetime of the ticket granting + ticket, in hours. If not specified, or user + specifies 0, then default value 10 will be used. + realm (str): + Optional. The name of the on-cluster Kerberos + realm. If not specified, the uppercased domain + of hostnames will be the realm. + """ + + enable_kerberos: bool = proto.Field( + proto.BOOL, + number=1, + ) + root_principal_password_uri: str = proto.Field( + proto.STRING, + number=2, + ) + kms_key_uri: str = proto.Field( + proto.STRING, + number=3, + ) + keystore_uri: str = proto.Field( + proto.STRING, + number=4, + ) + truststore_uri: str = proto.Field( + proto.STRING, + number=5, + ) + keystore_password_uri: str = proto.Field( + proto.STRING, + number=6, + ) + key_password_uri: str = proto.Field( + proto.STRING, + number=7, + ) + truststore_password_uri: str = proto.Field( + proto.STRING, + number=8, + ) + cross_realm_trust_realm: str = proto.Field( + proto.STRING, + number=9, + ) + cross_realm_trust_kdc: str = proto.Field( + proto.STRING, + number=10, + ) + cross_realm_trust_admin_server: str = proto.Field( + proto.STRING, + number=11, + ) + cross_realm_trust_shared_password_uri: str = proto.Field( + proto.STRING, + number=12, + ) + kdc_db_key_uri: str = proto.Field( + proto.STRING, + number=13, + ) + tgt_lifetime_hours: int = proto.Field( + proto.INT32, + number=14, + ) + realm: str = proto.Field( + proto.STRING, + number=15, + ) + + +class IdentityConfig(proto.Message): + r"""Identity related configuration, including service account + based secure multi-tenancy user mappings. + + Attributes: + user_service_account_mapping (MutableMapping[str, str]): + Required. Map of user to service account. + """ + + user_service_account_mapping: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + +class SoftwareConfig(proto.Message): + r"""Specifies the selection and config of software inside the + cluster. + + Attributes: + image_version (str): + Optional. The version of software inside the cluster. It + must be one of the supported `Dataproc + Versions `__, + such as "1.2" (including a subminor version, such as + "1.2.29"), or the `"preview" + version `__. + If unspecified, it defaults to the latest Debian version. + properties (MutableMapping[str, str]): + Optional. The properties to set on daemon config files. + + Property keys are specified in ``prefix:property`` format, + for example ``core:hadoop.tmp.dir``. The following are + supported prefixes and their mappings: + + - capacity-scheduler: ``capacity-scheduler.xml`` + - core: ``core-site.xml`` + - distcp: ``distcp-default.xml`` + - hdfs: ``hdfs-site.xml`` + - hive: ``hive-site.xml`` + - mapred: ``mapred-site.xml`` + - pig: ``pig.properties`` + - spark: ``spark-defaults.conf`` + - yarn: ``yarn-site.xml`` + + For more information, see `Cluster + properties `__. + optional_components (MutableSequence[google.cloud.dataproc_v1.types.Component]): + Optional. The set of components to activate + on the cluster. + """ + + image_version: str = proto.Field( + proto.STRING, + number=1, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + optional_components: MutableSequence[shared.Component] = proto.RepeatedField( + proto.ENUM, + number=3, + enum=shared.Component, + ) + + +class LifecycleConfig(proto.Message): + r"""Specifies the cluster auto-delete schedule configuration. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + idle_delete_ttl (google.protobuf.duration_pb2.Duration): + Optional. The duration to keep the cluster alive while + idling (when no jobs are running). Passing this threshold + will cause the cluster to be deleted. Minimum value is 5 + minutes; maximum value is 14 days (see JSON representation + of + `Duration `__). + auto_delete_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when cluster will be auto-deleted (see + JSON representation of + `Timestamp `__). + + This field is a member of `oneof`_ ``ttl``. + auto_delete_ttl (google.protobuf.duration_pb2.Duration): + Optional. The lifetime duration of cluster. The cluster will + be auto-deleted at the end of this period. Minimum value is + 10 minutes; maximum value is 14 days (see JSON + representation of + `Duration `__). + + This field is a member of `oneof`_ ``ttl``. + idle_start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when cluster became idle (most recent + job finished) and became eligible for deletion due to + idleness (see JSON representation of + `Timestamp `__). + """ + + idle_delete_ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + auto_delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + oneof="ttl", + message=timestamp_pb2.Timestamp, + ) + auto_delete_ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + oneof="ttl", + message=duration_pb2.Duration, + ) + idle_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class MetastoreConfig(proto.Message): + r"""Specifies a Metastore configuration. + + Attributes: + dataproc_metastore_service (str): + Required. Resource name of an existing Dataproc Metastore + service. + + Example: + + - ``projects/[project_id]/locations/[dataproc_region]/services/[service-name]`` + """ + + dataproc_metastore_service: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ClusterMetrics(proto.Message): + r"""Contains cluster daemon metrics, such as HDFS and YARN stats. + + **Beta Feature**: This report is available for testing purposes + only. It may be changed before final release. + + Attributes: + hdfs_metrics (MutableMapping[str, int]): + The HDFS metrics. + yarn_metrics (MutableMapping[str, int]): + YARN metrics. + """ + + hdfs_metrics: MutableMapping[str, int] = proto.MapField( + proto.STRING, + proto.INT64, + number=1, + ) + yarn_metrics: MutableMapping[str, int] = proto.MapField( + proto.STRING, + proto.INT64, + number=2, + ) + + +class DataprocMetricConfig(proto.Message): + r"""Dataproc metric config. + + Attributes: + metrics (MutableSequence[google.cloud.dataproc_v1.types.DataprocMetricConfig.Metric]): + Required. Metrics sources to enable. + """ + + class MetricSource(proto.Enum): + r"""A source for the collection of Dataproc custom metrics (see [Custom + metrics] + (https://cloud.google.com//dataproc/docs/guides/dataproc-metrics#custom_metrics)). + + Values: + METRIC_SOURCE_UNSPECIFIED (0): + Required unspecified metric source. + MONITORING_AGENT_DEFAULTS (1): + Monitoring agent metrics. If this source is enabled, + Dataproc enables the monitoring agent in Compute Engine, and + collects monitoring agent metrics, which are published with + an ``agent.googleapis.com`` prefix. + HDFS (2): + HDFS metric source. + SPARK (3): + Spark metric source. + YARN (4): + YARN metric source. + SPARK_HISTORY_SERVER (5): + Spark History Server metric source. + HIVESERVER2 (6): + Hiveserver2 metric source. + HIVEMETASTORE (7): + hivemetastore metric source + """ + METRIC_SOURCE_UNSPECIFIED = 0 + MONITORING_AGENT_DEFAULTS = 1 + HDFS = 2 + SPARK = 3 + YARN = 4 + SPARK_HISTORY_SERVER = 5 + HIVESERVER2 = 6 + HIVEMETASTORE = 7 + + class Metric(proto.Message): + r"""A Dataproc custom metric. + + Attributes: + metric_source (google.cloud.dataproc_v1.types.DataprocMetricConfig.MetricSource): + Required. A standard set of metrics is collected unless + ``metricOverrides`` are specified for the metric source (see + [Custom metrics] + (https://cloud.google.com/dataproc/docs/guides/dataproc-metrics#custom_metrics) + for more information). + metric_overrides (MutableSequence[str]): + Optional. Specify one or more [Custom metrics] + (https://cloud.google.com/dataproc/docs/guides/dataproc-metrics#custom_metrics) + to collect for the metric course (for the ``SPARK`` metric + source (any [Spark metric] + (https://spark.apache.org/docs/latest/monitoring.html#metrics) + can be specified). + + Provide metrics in the following format: + METRIC_SOURCE:INSTANCE:GROUP:METRIC Use camelcase as + appropriate. + + Examples: + + :: + + yarn:ResourceManager:QueueMetrics:AppsCompleted + spark:driver:DAGScheduler:job.allJobs + sparkHistoryServer:JVM:Memory:NonHeapMemoryUsage.committed + hiveserver2:JVM:Memory:NonHeapMemoryUsage.used + + Notes: + + - Only the specified overridden metrics are collected for + the metric source. For example, if one or more + ``spark:executive`` metrics are listed as metric + overrides, other ``SPARK`` metrics are not collected. The + collection of the metrics for other enabled custom metric + sources is unaffected. For example, if both ``SPARK`` + andd ``YARN`` metric sources are enabled, and overrides + are provided for Spark metrics only, all YARN metrics are + collected. + """ + + metric_source: "DataprocMetricConfig.MetricSource" = proto.Field( + proto.ENUM, + number=1, + enum="DataprocMetricConfig.MetricSource", + ) + metric_overrides: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + metrics: MutableSequence[Metric] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Metric, + ) + + +class CreateClusterRequest(proto.Message): + r"""A request to create a cluster. + + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project that the cluster belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + cluster (google.cloud.dataproc_v1.types.Cluster): + Required. The cluster to create. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two + `CreateClusterRequest `__\ s + with the same id, then the second request will be ignored + and the first + [google.longrunning.Operation][google.longrunning.Operation] + created and stored in the backend is returned. + + It is recommended to always set this value to a + `UUID `__. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + action_on_failed_primary_workers (google.cloud.dataproc_v1.types.FailureAction): + Optional. Failure action when primary worker + creation fails. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=3, + ) + cluster: "Cluster" = proto.Field( + proto.MESSAGE, + number=2, + message="Cluster", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + action_on_failed_primary_workers: shared.FailureAction = proto.Field( + proto.ENUM, + number=5, + enum=shared.FailureAction, + ) + + +class UpdateClusterRequest(proto.Message): + r"""A request to update a cluster. + + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project the cluster belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + cluster_name (str): + Required. The cluster name. + cluster (google.cloud.dataproc_v1.types.Cluster): + Required. The changes to the cluster. + graceful_decommission_timeout (google.protobuf.duration_pb2.Duration): + Optional. Timeout for graceful YARN decommissioning. + Graceful decommissioning allows removing nodes from the + cluster without interrupting jobs in progress. Timeout + specifies how long to wait for jobs in progress to finish + before forcefully removing nodes (and potentially + interrupting jobs). Default timeout is 0 (for forceful + decommission), and the maximum allowed timeout is 1 day. + (see JSON representation of + `Duration `__). + + Only supported on Dataproc image versions 1.2 and higher. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Specifies the path, relative to ``Cluster``, of + the field to update. For example, to change the number of + workers in a cluster to 5, the ``update_mask`` parameter + would be specified as + ``config.worker_config.num_instances``, and the ``PATCH`` + request body would specify the new value, as follows: + + :: + + { + "config":{ + "workerConfig":{ + "numInstances":"5" + } + } + } + + Similarly, to change the number of preemptible workers in a + cluster to 5, the ``update_mask`` parameter would be + ``config.secondary_worker_config.num_instances``, and the + ``PATCH`` request body would be set as follows: + + :: + + { + "config":{ + "secondaryWorkerConfig":{ + "numInstances":"5" + } + } + } + + Note: Currently, only the following fields can be updated: + + .. raw:: html + + + + + + + + + + + + + + + + + + + + + + + +
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
config.autoscaling_config.policy_uriUse, stop using, or + change autoscaling policies
+ request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two + `UpdateClusterRequest `__\ s + with the same id, then the second request will be ignored + and the first + [google.longrunning.Operation][google.longrunning.Operation] + created and stored in the backend is returned. + + It is recommended to always set this value to a + `UUID `__. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=5, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=2, + ) + cluster: "Cluster" = proto.Field( + proto.MESSAGE, + number=3, + message="Cluster", + ) + graceful_decommission_timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=6, + message=duration_pb2.Duration, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, + ) + request_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class StopClusterRequest(proto.Message): + r"""A request to stop a cluster. + + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project the cluster belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + cluster_name (str): + Required. The cluster name. + cluster_uuid (str): + Optional. Specifying the ``cluster_uuid`` means the RPC will + fail (with error NOT_FOUND) if a cluster with the specified + UUID does not exist. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two + `StopClusterRequest `__\ s + with the same id, then the second request will be ignored + and the first + [google.longrunning.Operation][google.longrunning.Operation] + created and stored in the backend is returned. + + Recommendation: Set this value to a + `UUID `__. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=3, + ) + cluster_uuid: str = proto.Field( + proto.STRING, + number=4, + ) + request_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class StartClusterRequest(proto.Message): + r"""A request to start a cluster. + + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project the cluster belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + cluster_name (str): + Required. The cluster name. + cluster_uuid (str): + Optional. Specifying the ``cluster_uuid`` means the RPC will + fail (with error NOT_FOUND) if a cluster with the specified + UUID does not exist. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two + `StartClusterRequest `__\ s + with the same id, then the second request will be ignored + and the first + [google.longrunning.Operation][google.longrunning.Operation] + created and stored in the backend is returned. + + Recommendation: Set this value to a + `UUID `__. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=3, + ) + cluster_uuid: str = proto.Field( + proto.STRING, + number=4, + ) + request_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class DeleteClusterRequest(proto.Message): + r"""A request to delete a cluster. + + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project that the cluster belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + cluster_name (str): + Required. The cluster name. + cluster_uuid (str): + Optional. Specifying the ``cluster_uuid`` means the RPC + should fail (with error NOT_FOUND) if cluster with specified + UUID does not exist. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two + `DeleteClusterRequest `__\ s + with the same id, then the second request will be ignored + and the first + [google.longrunning.Operation][google.longrunning.Operation] + created and stored in the backend is returned. + + It is recommended to always set this value to a + `UUID `__. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=3, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_uuid: str = proto.Field( + proto.STRING, + number=4, + ) + request_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class GetClusterRequest(proto.Message): + r"""Request to get the resource representation for a cluster in a + project. + + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project that the cluster belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + cluster_name (str): + Required. The cluster name. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=3, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListClustersRequest(proto.Message): + r"""A request to list the clusters in a project. + + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project that the cluster belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + filter (str): + Optional. A filter constraining the clusters to list. + Filters are case-sensitive and have the following syntax: + + field = value [AND [field = value]] ... + + where **field** is one of ``status.state``, ``clusterName``, + or ``labels.[KEY]``, and ``[KEY]`` is a label key. **value** + can be ``*`` to match all values. ``status.state`` can be + one of the following: ``ACTIVE``, ``INACTIVE``, + ``CREATING``, ``RUNNING``, ``ERROR``, ``DELETING``, or + ``UPDATING``. ``ACTIVE`` contains the ``CREATING``, + ``UPDATING``, and ``RUNNING`` states. ``INACTIVE`` contains + the ``DELETING`` and ``ERROR`` states. ``clusterName`` is + the name of the cluster provided at creation time. Only the + logical ``AND`` operator is supported; space-separated items + are treated as having an implicit ``AND`` operator. + + Example filter: + + status.state = ACTIVE AND clusterName = mycluster AND + labels.env = staging AND labels.starred = \* + page_size (int): + Optional. The standard List page size. + page_token (str): + Optional. The standard List page token. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListClustersResponse(proto.Message): + r"""The list of all clusters in a project. + + Attributes: + clusters (MutableSequence[google.cloud.dataproc_v1.types.Cluster]): + Output only. The clusters in the project. + next_page_token (str): + Output only. This token is included in the response if there + are more results to fetch. To fetch additional results, + provide this value as the ``page_token`` in a subsequent + ``ListClustersRequest``. + """ + + @property + def raw_page(self): + return self + + clusters: MutableSequence["Cluster"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Cluster", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DiagnoseClusterRequest(proto.Message): + r"""A request to collect cluster diagnostic information. + + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project that the cluster belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + cluster_name (str): + Required. The cluster name. + tarball_gcs_dir (str): + Optional. The output Cloud Storage directory + for the diagnostic tarball. If not specified, a + task-specific directory in the cluster's staging + bucket will be used. + diagnosis_interval (google.type.interval_pb2.Interval): + Optional. Time interval in which diagnosis + should be carried out on the cluster. + jobs (MutableSequence[str]): + Optional. Specifies a list of jobs on which + diagnosis is to be performed. Format: + projects/{project}/regions/{region}/jobs/{job} + yarn_application_ids (MutableSequence[str]): + Optional. Specifies a list of yarn + applications on which diagnosis is to be + performed. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=3, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=2, + ) + tarball_gcs_dir: str = proto.Field( + proto.STRING, + number=4, + ) + diagnosis_interval: interval_pb2.Interval = proto.Field( + proto.MESSAGE, + number=6, + message=interval_pb2.Interval, + ) + jobs: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + yarn_application_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=11, + ) + + +class DiagnoseClusterResults(proto.Message): + r"""The location of diagnostic output. + + Attributes: + output_uri (str): + Output only. The Cloud Storage URI of the + diagnostic output. The output report is a plain + text file with a summary of collected + diagnostics. + """ + + output_uri: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ReservationAffinity(proto.Message): + r"""Reservation Affinity for consuming Zonal reservation. + + Attributes: + consume_reservation_type (google.cloud.dataproc_v1.types.ReservationAffinity.Type): + Optional. Type of reservation to consume + key (str): + Optional. Corresponds to the label key of + reservation resource. + values (MutableSequence[str]): + Optional. Corresponds to the label values of + reservation resource. + """ + + class Type(proto.Enum): + r"""Indicates whether to consume capacity from an reservation or + not. + + Values: + TYPE_UNSPECIFIED (0): + No description available. + NO_RESERVATION (1): + Do not consume from any allocated capacity. + ANY_RESERVATION (2): + Consume any reservation available. + SPECIFIC_RESERVATION (3): + Must consume from a specific reservation. + Must specify key value fields for specifying the + reservations. + """ + TYPE_UNSPECIFIED = 0 + NO_RESERVATION = 1 + ANY_RESERVATION = 2 + SPECIFIC_RESERVATION = 3 + + consume_reservation_type: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + key: str = proto.Field( + proto.STRING, + number=2, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py new file mode 100644 index 000000000000..14cd14c044ca --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/jobs.py @@ -0,0 +1,1720 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dataproc.v1", + manifest={ + "LoggingConfig", + "HadoopJob", + "SparkJob", + "PySparkJob", + "QueryList", + "HiveJob", + "SparkSqlJob", + "PigJob", + "SparkRJob", + "PrestoJob", + "TrinoJob", + "JobPlacement", + "JobStatus", + "JobReference", + "YarnApplication", + "Job", + "DriverSchedulingConfig", + "JobScheduling", + "SubmitJobRequest", + "JobMetadata", + "GetJobRequest", + "ListJobsRequest", + "UpdateJobRequest", + "ListJobsResponse", + "CancelJobRequest", + "DeleteJobRequest", + }, +) + + +class LoggingConfig(proto.Message): + r"""The runtime logging config of the job. + + Attributes: + driver_log_levels (MutableMapping[str, google.cloud.dataproc_v1.types.LoggingConfig.Level]): + The per-package log levels for the driver. + This may include "root" package name to + configure rootLogger. Examples: + + - 'com.google = FATAL' + - 'root = INFO' + - 'org.apache = DEBUG' + """ + + class Level(proto.Enum): + r"""The Log4j level for job execution. When running an `Apache + Hive `__ job, Cloud Dataproc configures + the Hive client to an equivalent verbosity level. + + Values: + LEVEL_UNSPECIFIED (0): + Level is unspecified. Use default level for + log4j. + ALL (1): + Use ALL level for log4j. + TRACE (2): + Use TRACE level for log4j. + DEBUG (3): + Use DEBUG level for log4j. + INFO (4): + Use INFO level for log4j. + WARN (5): + Use WARN level for log4j. + ERROR (6): + Use ERROR level for log4j. + FATAL (7): + Use FATAL level for log4j. + OFF (8): + Turn off log4j. + """ + LEVEL_UNSPECIFIED = 0 + ALL = 1 + TRACE = 2 + DEBUG = 3 + INFO = 4 + WARN = 5 + ERROR = 6 + FATAL = 7 + OFF = 8 + + driver_log_levels: MutableMapping[str, Level] = proto.MapField( + proto.STRING, + proto.ENUM, + number=2, + enum=Level, + ) + + +class HadoopJob(proto.Message): + r"""A Dataproc job for running `Apache Hadoop + MapReduce `__ + jobs on `Apache Hadoop + YARN `__. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + main_jar_file_uri (str): + The HCFS URI of the jar file containing the + main class. Examples: + + 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' + 'hdfs:/tmp/test-samples/custom-wordcount.jar' + 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar' + + This field is a member of `oneof`_ ``driver``. + main_class (str): + The name of the driver's main class. The jar file containing + the class must be in the default CLASSPATH or specified in + ``jar_file_uris``. + + This field is a member of `oneof`_ ``driver``. + args (MutableSequence[str]): + Optional. The arguments to pass to the driver. Do not + include arguments, such as ``-libjars`` or ``-Dfoo=bar``, + that can be set as job properties, since a collision may + occur that causes an incorrect job submission. + jar_file_uris (MutableSequence[str]): + Optional. Jar file URIs to add to the + CLASSPATHs of the Hadoop driver and tasks. + file_uris (MutableSequence[str]): + Optional. HCFS (Hadoop Compatible Filesystem) + URIs of files to be copied to the working + directory of Hadoop drivers and distributed + tasks. Useful for naively parallel tasks. + archive_uris (MutableSequence[str]): + Optional. HCFS URIs of archives to be + extracted in the working directory of Hadoop + drivers and tasks. Supported file types: + + .jar, .tar, .tar.gz, .tgz, or .zip. + properties (MutableMapping[str, str]): + Optional. A mapping of property names to values, used to + configure Hadoop. Properties that conflict with values set + by the Dataproc API may be overwritten. Can include + properties set in ``/etc/hadoop/conf/*-site`` and classes in + user code. + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): + Optional. The runtime log config for job + execution. + """ + + main_jar_file_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="driver", + ) + main_class: str = proto.Field( + proto.STRING, + number=2, + oneof="driver", + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + jar_file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + archive_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="LoggingConfig", + ) + + +class SparkJob(proto.Message): + r"""A Dataproc job for running `Apache + Spark `__ applications on YARN. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + main_jar_file_uri (str): + The HCFS URI of the jar file that contains + the main class. + + This field is a member of `oneof`_ ``driver``. + main_class (str): + The name of the driver's main class. The jar file that + contains the class must be in the default CLASSPATH or + specified in ``jar_file_uris``. + + This field is a member of `oneof`_ ``driver``. + args (MutableSequence[str]): + Optional. The arguments to pass to the driver. Do not + include arguments, such as ``--conf``, that can be set as + job properties, since a collision may occur that causes an + incorrect job submission. + jar_file_uris (MutableSequence[str]): + Optional. HCFS URIs of jar files to add to + the CLASSPATHs of the Spark driver and tasks. + file_uris (MutableSequence[str]): + Optional. HCFS URIs of files to be placed in + the working directory of each executor. Useful + for naively parallel tasks. + archive_uris (MutableSequence[str]): + Optional. HCFS URIs of archives to be + extracted into the working directory of each + executor. Supported file types: + + .jar, .tar, .tar.gz, .tgz, and .zip. + properties (MutableMapping[str, str]): + Optional. A mapping of property names to + values, used to configure Spark. Properties that + conflict with values set by the Dataproc API may + be overwritten. Can include properties set in + /etc/spark/conf/spark-defaults.conf and classes + in user code. + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): + Optional. The runtime log config for job + execution. + """ + + main_jar_file_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="driver", + ) + main_class: str = proto.Field( + proto.STRING, + number=2, + oneof="driver", + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + jar_file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + archive_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="LoggingConfig", + ) + + +class PySparkJob(proto.Message): + r"""A Dataproc job for running `Apache + PySpark `__ + applications on YARN. + + Attributes: + main_python_file_uri (str): + Required. The HCFS URI of the main Python + file to use as the driver. Must be a .py file. + args (MutableSequence[str]): + Optional. The arguments to pass to the driver. Do not + include arguments, such as ``--conf``, that can be set as + job properties, since a collision may occur that causes an + incorrect job submission. + python_file_uris (MutableSequence[str]): + Optional. HCFS file URIs of Python files to + pass to the PySpark framework. Supported file + types: .py, .egg, and .zip. + jar_file_uris (MutableSequence[str]): + Optional. HCFS URIs of jar files to add to + the CLASSPATHs of the Python driver and tasks. + file_uris (MutableSequence[str]): + Optional. HCFS URIs of files to be placed in + the working directory of each executor. Useful + for naively parallel tasks. + archive_uris (MutableSequence[str]): + Optional. HCFS URIs of archives to be + extracted into the working directory of each + executor. Supported file types: + + .jar, .tar, .tar.gz, .tgz, and .zip. + properties (MutableMapping[str, str]): + Optional. A mapping of property names to + values, used to configure PySpark. Properties + that conflict with values set by the Dataproc + API may be overwritten. Can include properties + set in + /etc/spark/conf/spark-defaults.conf and classes + in user code. + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): + Optional. The runtime log config for job + execution. + """ + + main_python_file_uri: str = proto.Field( + proto.STRING, + number=1, + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + python_file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + jar_file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + archive_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="LoggingConfig", + ) + + +class QueryList(proto.Message): + r"""A list of queries to run on a cluster. + + Attributes: + queries (MutableSequence[str]): + Required. The queries to execute. You do not need to end a + query expression with a semicolon. Multiple queries can be + specified in one string by separating each with a semicolon. + Here is an example of a Dataproc API snippet that uses a + QueryList to specify a HiveJob: + + :: + + "hiveJob": { + "queryList": { + "queries": [ + "query1", + "query2", + "query3;query4", + ] + } + } + """ + + queries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class HiveJob(proto.Message): + r"""A Dataproc job for running `Apache + Hive `__ queries on YARN. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + query_file_uri (str): + The HCFS URI of the script that contains Hive + queries. + + This field is a member of `oneof`_ ``queries``. + query_list (google.cloud.dataproc_v1.types.QueryList): + A list of queries. + + This field is a member of `oneof`_ ``queries``. + continue_on_failure (bool): + Optional. Whether to continue executing queries if a query + fails. The default value is ``false``. Setting to ``true`` + can be useful when executing independent parallel queries. + script_variables (MutableMapping[str, str]): + Optional. Mapping of query variable names to values + (equivalent to the Hive command: ``SET name="value";``). + properties (MutableMapping[str, str]): + Optional. A mapping of property names and values, used to + configure Hive. Properties that conflict with values set by + the Dataproc API may be overwritten. Can include properties + set in ``/etc/hadoop/conf/*-site.xml``, + /etc/hive/conf/hive-site.xml, and classes in user code. + jar_file_uris (MutableSequence[str]): + Optional. HCFS URIs of jar files to add to + the CLASSPATH of the Hive server and Hadoop + MapReduce (MR) tasks. Can contain Hive SerDes + and UDFs. + """ + + query_file_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="queries", + ) + query_list: "QueryList" = proto.Field( + proto.MESSAGE, + number=2, + oneof="queries", + message="QueryList", + ) + continue_on_failure: bool = proto.Field( + proto.BOOL, + number=3, + ) + script_variables: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + jar_file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + + +class SparkSqlJob(proto.Message): + r"""A Dataproc job for running `Apache Spark + SQL `__ queries. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + query_file_uri (str): + The HCFS URI of the script that contains SQL + queries. + + This field is a member of `oneof`_ ``queries``. + query_list (google.cloud.dataproc_v1.types.QueryList): + A list of queries. + + This field is a member of `oneof`_ ``queries``. + script_variables (MutableMapping[str, str]): + Optional. Mapping of query variable names to values + (equivalent to the Spark SQL command: SET + ``name="value";``). + properties (MutableMapping[str, str]): + Optional. A mapping of property names to + values, used to configure Spark SQL's SparkConf. + Properties that conflict with values set by the + Dataproc API may be overwritten. + jar_file_uris (MutableSequence[str]): + Optional. HCFS URIs of jar files to be added + to the Spark CLASSPATH. + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): + Optional. The runtime log config for job + execution. + """ + + query_file_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="queries", + ) + query_list: "QueryList" = proto.Field( + proto.MESSAGE, + number=2, + oneof="queries", + message="QueryList", + ) + script_variables: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + jar_file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=56, + ) + logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="LoggingConfig", + ) + + +class PigJob(proto.Message): + r"""A Dataproc job for running `Apache Pig `__ + queries on YARN. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + query_file_uri (str): + The HCFS URI of the script that contains the + Pig queries. + + This field is a member of `oneof`_ ``queries``. + query_list (google.cloud.dataproc_v1.types.QueryList): + A list of queries. + + This field is a member of `oneof`_ ``queries``. + continue_on_failure (bool): + Optional. Whether to continue executing queries if a query + fails. The default value is ``false``. Setting to ``true`` + can be useful when executing independent parallel queries. + script_variables (MutableMapping[str, str]): + Optional. Mapping of query variable names to values + (equivalent to the Pig command: ``name=[value]``). + properties (MutableMapping[str, str]): + Optional. A mapping of property names to values, used to + configure Pig. Properties that conflict with values set by + the Dataproc API may be overwritten. Can include properties + set in ``/etc/hadoop/conf/*-site.xml``, + /etc/pig/conf/pig.properties, and classes in user code. + jar_file_uris (MutableSequence[str]): + Optional. HCFS URIs of jar files to add to + the CLASSPATH of the Pig Client and Hadoop + MapReduce (MR) tasks. Can contain Pig UDFs. + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): + Optional. The runtime log config for job + execution. + """ + + query_file_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="queries", + ) + query_list: "QueryList" = proto.Field( + proto.MESSAGE, + number=2, + oneof="queries", + message="QueryList", + ) + continue_on_failure: bool = proto.Field( + proto.BOOL, + number=3, + ) + script_variables: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + jar_file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="LoggingConfig", + ) + + +class SparkRJob(proto.Message): + r"""A Dataproc job for running `Apache + SparkR `__ + applications on YARN. + + Attributes: + main_r_file_uri (str): + Required. The HCFS URI of the main R file to + use as the driver. Must be a .R file. + args (MutableSequence[str]): + Optional. The arguments to pass to the driver. Do not + include arguments, such as ``--conf``, that can be set as + job properties, since a collision may occur that causes an + incorrect job submission. + file_uris (MutableSequence[str]): + Optional. HCFS URIs of files to be placed in + the working directory of each executor. Useful + for naively parallel tasks. + archive_uris (MutableSequence[str]): + Optional. HCFS URIs of archives to be + extracted into the working directory of each + executor. Supported file types: + + .jar, .tar, .tar.gz, .tgz, and .zip. + properties (MutableMapping[str, str]): + Optional. A mapping of property names to + values, used to configure SparkR. Properties + that conflict with values set by the Dataproc + API may be overwritten. Can include properties + set in + /etc/spark/conf/spark-defaults.conf and classes + in user code. + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): + Optional. The runtime log config for job + execution. + """ + + main_r_file_uri: str = proto.Field( + proto.STRING, + number=1, + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + file_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + archive_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="LoggingConfig", + ) + + +class PrestoJob(proto.Message): + r"""A Dataproc job for running `Presto `__ + queries. **IMPORTANT**: The `Dataproc Presto Optional + Component `__ + must be enabled when the cluster is created to submit a Presto job + to the cluster. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + query_file_uri (str): + The HCFS URI of the script that contains SQL + queries. + + This field is a member of `oneof`_ ``queries``. + query_list (google.cloud.dataproc_v1.types.QueryList): + A list of queries. + + This field is a member of `oneof`_ ``queries``. + continue_on_failure (bool): + Optional. Whether to continue executing queries if a query + fails. The default value is ``false``. Setting to ``true`` + can be useful when executing independent parallel queries. + output_format (str): + Optional. The format in which query output + will be displayed. See the Presto documentation + for supported output formats + client_tags (MutableSequence[str]): + Optional. Presto client tags to attach to + this query + properties (MutableMapping[str, str]): + Optional. A mapping of property names to values. Used to set + Presto `session + properties `__ + Equivalent to using the --session flag in the Presto CLI + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): + Optional. The runtime log config for job + execution. + """ + + query_file_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="queries", + ) + query_list: "QueryList" = proto.Field( + proto.MESSAGE, + number=2, + oneof="queries", + message="QueryList", + ) + continue_on_failure: bool = proto.Field( + proto.BOOL, + number=3, + ) + output_format: str = proto.Field( + proto.STRING, + number=4, + ) + client_tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="LoggingConfig", + ) + + +class TrinoJob(proto.Message): + r"""A Dataproc job for running `Trino `__ queries. + **IMPORTANT**: The `Dataproc Trino Optional + Component `__ + must be enabled when the cluster is created to submit a Trino job to + the cluster. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + query_file_uri (str): + The HCFS URI of the script that contains SQL + queries. + + This field is a member of `oneof`_ ``queries``. + query_list (google.cloud.dataproc_v1.types.QueryList): + A list of queries. + + This field is a member of `oneof`_ ``queries``. + continue_on_failure (bool): + Optional. Whether to continue executing queries if a query + fails. The default value is ``false``. Setting to ``true`` + can be useful when executing independent parallel queries. + output_format (str): + Optional. The format in which query output + will be displayed. See the Trino documentation + for supported output formats + client_tags (MutableSequence[str]): + Optional. Trino client tags to attach to this + query + properties (MutableMapping[str, str]): + Optional. A mapping of property names to values. Used to set + Trino `session + properties `__ + Equivalent to using the --session flag in the Trino CLI + logging_config (google.cloud.dataproc_v1.types.LoggingConfig): + Optional. The runtime log config for job + execution. + """ + + query_file_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="queries", + ) + query_list: "QueryList" = proto.Field( + proto.MESSAGE, + number=2, + oneof="queries", + message="QueryList", + ) + continue_on_failure: bool = proto.Field( + proto.BOOL, + number=3, + ) + output_format: str = proto.Field( + proto.STRING, + number=4, + ) + client_tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + logging_config: "LoggingConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="LoggingConfig", + ) + + +class JobPlacement(proto.Message): + r"""Dataproc job config. + + Attributes: + cluster_name (str): + Required. The name of the cluster where the + job will be submitted. + cluster_uuid (str): + Output only. A cluster UUID generated by the + Dataproc service when the job is submitted. + cluster_labels (MutableMapping[str, str]): + Optional. Cluster labels to identify a + cluster where the job will be submitted. + """ + + cluster_name: str = proto.Field( + proto.STRING, + number=1, + ) + cluster_uuid: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + + +class JobStatus(proto.Message): + r"""Dataproc job status. + + Attributes: + state (google.cloud.dataproc_v1.types.JobStatus.State): + Output only. A state message specifying the + overall job state. + details (str): + Optional. Output only. Job state details, + such as an error description if the state is + ERROR. + state_start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when this state was + entered. + substate (google.cloud.dataproc_v1.types.JobStatus.Substate): + Output only. Additional state information, + which includes status reported by the agent. + """ + + class State(proto.Enum): + r"""The job state. + + Values: + STATE_UNSPECIFIED (0): + The job state is unknown. + PENDING (1): + The job is pending; it has been submitted, + but is not yet running. + SETUP_DONE (8): + Job has been received by the service and + completed initial setup; it will soon be + submitted to the cluster. + RUNNING (2): + The job is running on the cluster. + CANCEL_PENDING (3): + A CancelJob request has been received, but is + pending. + CANCEL_STARTED (7): + Transient in-flight resources have been + canceled, and the request to cancel the running + job has been issued to the cluster. + CANCELLED (4): + The job cancellation was successful. + DONE (5): + The job has completed successfully. + ERROR (6): + The job has completed, but encountered an + error. + ATTEMPT_FAILURE (9): + Job attempt has failed. The detail field + contains failure details for this attempt. + + Applies to restartable jobs only. + """ + STATE_UNSPECIFIED = 0 + PENDING = 1 + SETUP_DONE = 8 + RUNNING = 2 + CANCEL_PENDING = 3 + CANCEL_STARTED = 7 + CANCELLED = 4 + DONE = 5 + ERROR = 6 + ATTEMPT_FAILURE = 9 + + class Substate(proto.Enum): + r"""The job substate. + + Values: + UNSPECIFIED (0): + The job substate is unknown. + SUBMITTED (1): + The Job is submitted to the agent. + + Applies to RUNNING state. + QUEUED (2): + The Job has been received and is awaiting + execution (it may be waiting for a condition to + be met). See the "details" field for the reason + for the delay. + + Applies to RUNNING state. + STALE_STATUS (3): + The agent-reported status is out of date, + which may be caused by a loss of communication + between the agent and Dataproc. If the agent + does not send a timely update, the job will + fail. + + Applies to RUNNING state. + """ + UNSPECIFIED = 0 + SUBMITTED = 1 + QUEUED = 2 + STALE_STATUS = 3 + + state: State = proto.Field( + proto.ENUM, + number=1, + enum=State, + ) + details: str = proto.Field( + proto.STRING, + number=2, + ) + state_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + substate: Substate = proto.Field( + proto.ENUM, + number=7, + enum=Substate, + ) + + +class JobReference(proto.Message): + r"""Encapsulates the full scoping used to reference a job. + + Attributes: + project_id (str): + Optional. The ID of the Google Cloud Platform + project that the job belongs to. If specified, + must match the request project ID. + job_id (str): + Optional. The job ID, which must be unique within the + project. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), or hyphens (-). The maximum length is 100 + characters. + + If not specified by the caller, the job ID will be provided + by the server. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class YarnApplication(proto.Message): + r"""A YARN application created by a job. Application information is a + subset of + org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto. + + **Beta Feature**: This report is available for testing purposes + only. It may be changed before final release. + + Attributes: + name (str): + Required. The application name. + state (google.cloud.dataproc_v1.types.YarnApplication.State): + Required. The application state. + progress (float): + Required. The numerical progress of the + application, from 1 to 100. + tracking_url (str): + Optional. The HTTP URL of the + ApplicationMaster, HistoryServer, or + TimelineServer that provides + application-specific information. The URL uses + the internal hostname, and requires a proxy + server for resolution and, possibly, access. + """ + + class State(proto.Enum): + r"""The application state, corresponding to + YarnProtos.YarnApplicationStateProto. + + Values: + STATE_UNSPECIFIED (0): + Status is unspecified. + NEW (1): + Status is NEW. + NEW_SAVING (2): + Status is NEW_SAVING. + SUBMITTED (3): + Status is SUBMITTED. + ACCEPTED (4): + Status is ACCEPTED. + RUNNING (5): + Status is RUNNING. + FINISHED (6): + Status is FINISHED. + FAILED (7): + Status is FAILED. + KILLED (8): + Status is KILLED. + """ + STATE_UNSPECIFIED = 0 + NEW = 1 + NEW_SAVING = 2 + SUBMITTED = 3 + ACCEPTED = 4 + RUNNING = 5 + FINISHED = 6 + FAILED = 7 + KILLED = 8 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) + progress: float = proto.Field( + proto.FLOAT, + number=3, + ) + tracking_url: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Job(proto.Message): + r"""A Dataproc job resource. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + reference (google.cloud.dataproc_v1.types.JobReference): + Optional. The fully qualified reference to the job, which + can be used to obtain the equivalent REST path of the job + resource. If this property is not specified when a job is + created, the server generates a job_id. + placement (google.cloud.dataproc_v1.types.JobPlacement): + Required. Job information, including how, + when, and where to run the job. + hadoop_job (google.cloud.dataproc_v1.types.HadoopJob): + Optional. Job is a Hadoop job. + + This field is a member of `oneof`_ ``type_job``. + spark_job (google.cloud.dataproc_v1.types.SparkJob): + Optional. Job is a Spark job. + + This field is a member of `oneof`_ ``type_job``. + pyspark_job (google.cloud.dataproc_v1.types.PySparkJob): + Optional. Job is a PySpark job. + + This field is a member of `oneof`_ ``type_job``. + hive_job (google.cloud.dataproc_v1.types.HiveJob): + Optional. Job is a Hive job. + + This field is a member of `oneof`_ ``type_job``. + pig_job (google.cloud.dataproc_v1.types.PigJob): + Optional. Job is a Pig job. + + This field is a member of `oneof`_ ``type_job``. + spark_r_job (google.cloud.dataproc_v1.types.SparkRJob): + Optional. Job is a SparkR job. + + This field is a member of `oneof`_ ``type_job``. + spark_sql_job (google.cloud.dataproc_v1.types.SparkSqlJob): + Optional. Job is a SparkSql job. + + This field is a member of `oneof`_ ``type_job``. + presto_job (google.cloud.dataproc_v1.types.PrestoJob): + Optional. Job is a Presto job. + + This field is a member of `oneof`_ ``type_job``. + trino_job (google.cloud.dataproc_v1.types.TrinoJob): + Optional. Job is a Trino job. + + This field is a member of `oneof`_ ``type_job``. + status (google.cloud.dataproc_v1.types.JobStatus): + Output only. The job status. Additional application-specific + status information may be contained in the type_job and + yarn_applications fields. + status_history (MutableSequence[google.cloud.dataproc_v1.types.JobStatus]): + Output only. The previous job status. + yarn_applications (MutableSequence[google.cloud.dataproc_v1.types.YarnApplication]): + Output only. The collection of YARN applications spun up by + this job. + + **Beta** Feature: This report is available for testing + purposes only. It may be changed before final release. + driver_output_resource_uri (str): + Output only. A URI pointing to the location + of the stdout of the job's driver program. + driver_control_files_uri (str): + Output only. If present, the location of miscellaneous + control files which may be used as part of job setup and + handling. If not present, control files may be placed in the + same location as ``driver_output_uri``. + labels (MutableMapping[str, str]): + Optional. The labels to associate with this job. Label + **keys** must contain 1 to 63 characters, and must conform + to `RFC 1035 `__. + Label **values** may be empty, but, if present, must contain + 1 to 63 characters, and must conform to `RFC + 1035 `__. No more than + 32 labels can be associated with a job. + scheduling (google.cloud.dataproc_v1.types.JobScheduling): + Optional. Job scheduling configuration. + job_uuid (str): + Output only. A UUID that uniquely identifies a job within + the project over time. This is in contrast to a + user-settable reference.job_id that may be reused over time. + done (bool): + Output only. Indicates whether the job is completed. If the + value is ``false``, the job is still in progress. If + ``true``, the job is completed, and ``status.state`` field + will indicate if it was successful, failed, or cancelled. + driver_scheduling_config (google.cloud.dataproc_v1.types.DriverSchedulingConfig): + Optional. Driver scheduling configuration. + """ + + reference: "JobReference" = proto.Field( + proto.MESSAGE, + number=1, + message="JobReference", + ) + placement: "JobPlacement" = proto.Field( + proto.MESSAGE, + number=2, + message="JobPlacement", + ) + hadoop_job: "HadoopJob" = proto.Field( + proto.MESSAGE, + number=3, + oneof="type_job", + message="HadoopJob", + ) + spark_job: "SparkJob" = proto.Field( + proto.MESSAGE, + number=4, + oneof="type_job", + message="SparkJob", + ) + pyspark_job: "PySparkJob" = proto.Field( + proto.MESSAGE, + number=5, + oneof="type_job", + message="PySparkJob", + ) + hive_job: "HiveJob" = proto.Field( + proto.MESSAGE, + number=6, + oneof="type_job", + message="HiveJob", + ) + pig_job: "PigJob" = proto.Field( + proto.MESSAGE, + number=7, + oneof="type_job", + message="PigJob", + ) + spark_r_job: "SparkRJob" = proto.Field( + proto.MESSAGE, + number=21, + oneof="type_job", + message="SparkRJob", + ) + spark_sql_job: "SparkSqlJob" = proto.Field( + proto.MESSAGE, + number=12, + oneof="type_job", + message="SparkSqlJob", + ) + presto_job: "PrestoJob" = proto.Field( + proto.MESSAGE, + number=23, + oneof="type_job", + message="PrestoJob", + ) + trino_job: "TrinoJob" = proto.Field( + proto.MESSAGE, + number=28, + oneof="type_job", + message="TrinoJob", + ) + status: "JobStatus" = proto.Field( + proto.MESSAGE, + number=8, + message="JobStatus", + ) + status_history: MutableSequence["JobStatus"] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message="JobStatus", + ) + yarn_applications: MutableSequence["YarnApplication"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="YarnApplication", + ) + driver_output_resource_uri: str = proto.Field( + proto.STRING, + number=17, + ) + driver_control_files_uri: str = proto.Field( + proto.STRING, + number=15, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=18, + ) + scheduling: "JobScheduling" = proto.Field( + proto.MESSAGE, + number=20, + message="JobScheduling", + ) + job_uuid: str = proto.Field( + proto.STRING, + number=22, + ) + done: bool = proto.Field( + proto.BOOL, + number=24, + ) + driver_scheduling_config: "DriverSchedulingConfig" = proto.Field( + proto.MESSAGE, + number=27, + message="DriverSchedulingConfig", + ) + + +class DriverSchedulingConfig(proto.Message): + r"""Driver scheduling configuration. + + Attributes: + memory_mb (int): + Required. The amount of memory in MB the + driver is requesting. + vcores (int): + Required. The number of vCPUs the driver is + requesting. + """ + + memory_mb: int = proto.Field( + proto.INT32, + number=1, + ) + vcores: int = proto.Field( + proto.INT32, + number=2, + ) + + +class JobScheduling(proto.Message): + r"""Job scheduling options. + + Attributes: + max_failures_per_hour (int): + Optional. Maximum number of times per hour a driver may be + restarted as a result of driver exiting with non-zero code + before job is reported failed. + + A job may be reported as thrashing if the driver exits with + a non-zero code four times within a 10-minute window. + + Maximum value is 10. + + **Note:** This restartable job option is not supported in + Dataproc [workflow templates] + (https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#adding_jobs_to_a_template). + max_failures_total (int): + Optional. Maximum total number of times a driver may be + restarted as a result of the driver exiting with a non-zero + code. After the maximum number is reached, the job will be + reported as failed. + + Maximum value is 240. + + **Note:** Currently, this restartable job option is not + supported in Dataproc `workflow + templates `__. + """ + + max_failures_per_hour: int = proto.Field( + proto.INT32, + number=1, + ) + max_failures_total: int = proto.Field( + proto.INT32, + number=2, + ) + + +class SubmitJobRequest(proto.Message): + r"""A request to submit a job. + + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project that the job belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + job (google.cloud.dataproc_v1.types.Job): + Required. The job resource. + request_id (str): + Optional. A unique id used to identify the request. If the + server receives two + `SubmitJobRequest `__\ s + with the same id, then the second request will be ignored + and the first [Job][google.cloud.dataproc.v1.Job] created + and stored in the backend is returned. + + It is recommended to always set this value to a + `UUID `__. + + The id must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=3, + ) + job: "Job" = proto.Field( + proto.MESSAGE, + number=2, + message="Job", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class JobMetadata(proto.Message): + r"""Job Operation metadata. + + Attributes: + job_id (str): + Output only. The job id. + status (google.cloud.dataproc_v1.types.JobStatus): + Output only. Most recent job status. + operation_type (str): + Output only. Operation type. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Job submission time. + """ + + job_id: str = proto.Field( + proto.STRING, + number=1, + ) + status: "JobStatus" = proto.Field( + proto.MESSAGE, + number=2, + message="JobStatus", + ) + operation_type: str = proto.Field( + proto.STRING, + number=3, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class GetJobRequest(proto.Message): + r"""A request to get the resource representation for a job in a + project. + + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project that the job belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + job_id (str): + Required. The job ID. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=3, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListJobsRequest(proto.Message): + r"""A request to list jobs in a project. + + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project that the job belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + page_size (int): + Optional. The number of results to return in + each response. + page_token (str): + Optional. The page token, returned by a + previous call, to request the next page of + results. + cluster_name (str): + Optional. If set, the returned jobs list + includes only jobs that were submitted to the + named cluster. + job_state_matcher (google.cloud.dataproc_v1.types.ListJobsRequest.JobStateMatcher): + Optional. Specifies enumerated categories of jobs to list. + (default = match ALL jobs). + + If ``filter`` is provided, ``jobStateMatcher`` will be + ignored. + filter (str): + Optional. A filter constraining the jobs to list. Filters + are case-sensitive and have the following syntax: + + [field = value] AND [field [= value]] ... + + where **field** is ``status.state`` or ``labels.[KEY]``, and + ``[KEY]`` is a label key. **value** can be ``*`` to match + all values. ``status.state`` can be either ``ACTIVE`` or + ``NON_ACTIVE``. Only the logical ``AND`` operator is + supported; space-separated items are treated as having an + implicit ``AND`` operator. + + Example filter: + + status.state = ACTIVE AND labels.env = staging AND + labels.starred = \* + """ + + class JobStateMatcher(proto.Enum): + r"""A matcher that specifies categories of job states. + + Values: + ALL (0): + Match all jobs, regardless of state. + ACTIVE (1): + Only match jobs in non-terminal states: PENDING, RUNNING, or + CANCEL_PENDING. + NON_ACTIVE (2): + Only match jobs in terminal states: + CANCELLED, DONE, or ERROR. + """ + ALL = 0 + ACTIVE = 1 + NON_ACTIVE = 2 + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=6, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=4, + ) + job_state_matcher: JobStateMatcher = proto.Field( + proto.ENUM, + number=5, + enum=JobStateMatcher, + ) + filter: str = proto.Field( + proto.STRING, + number=7, + ) + + +class UpdateJobRequest(proto.Message): + r"""A request to update a job. + + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project that the job belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + job_id (str): + Required. The job ID. + job (google.cloud.dataproc_v1.types.Job): + Required. The changes to the job. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Specifies the path, relative to Job, of the field + to update. For example, to update the labels of a Job the + update_mask parameter would be specified as labels, and the + ``PATCH`` request body would specify the new value. Note: + Currently, labels is the only field that can be updated. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=2, + ) + job_id: str = proto.Field( + proto.STRING, + number=3, + ) + job: "Job" = proto.Field( + proto.MESSAGE, + number=4, + message="Job", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask_pb2.FieldMask, + ) + + +class ListJobsResponse(proto.Message): + r"""A list of jobs in a project. + + Attributes: + jobs (MutableSequence[google.cloud.dataproc_v1.types.Job]): + Output only. Jobs list. + next_page_token (str): + Optional. This token is included in the response if there + are more results to fetch. To fetch additional results, + provide this value as the ``page_token`` in a subsequent + ListJobsRequest. + """ + + @property + def raw_page(self): + return self + + jobs: MutableSequence["Job"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Job", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CancelJobRequest(proto.Message): + r"""A request to cancel a job. + + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project that the job belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + job_id (str): + Required. The job ID. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=3, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteJobRequest(proto.Message): + r"""A request to delete a job. + + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project that the job belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + job_id (str): + Required. The job ID. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=3, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/node_groups.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/node_groups.py new file mode 100644 index 000000000000..65e974b18e4f --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/node_groups.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.dataproc_v1.types import clusters + +__protobuf__ = proto.module( + package="google.cloud.dataproc.v1", + manifest={ + "CreateNodeGroupRequest", + "ResizeNodeGroupRequest", + "GetNodeGroupRequest", + }, +) + + +class CreateNodeGroupRequest(proto.Message): + r"""A request to create a node group. + + Attributes: + parent (str): + Required. The parent resource where this node group will be + created. Format: + ``projects/{project}/regions/{region}/clusters/{cluster}`` + node_group (google.cloud.dataproc_v1.types.NodeGroup): + Required. The node group to create. + node_group_id (str): + Optional. An optional node group ID. Generated if not + specified. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). Cannot begin or end with + underscore or hyphen. Must consist of from 3 to 33 + characters. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two + `CreateNodeGroupRequest `__ + with the same ID, the second request is ignored and the + first + [google.longrunning.Operation][google.longrunning.Operation] + created and stored in the backend is returned. + + Recommendation: Set this value to a + `UUID `__. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + node_group: clusters.NodeGroup = proto.Field( + proto.MESSAGE, + number=2, + message=clusters.NodeGroup, + ) + node_group_id: str = proto.Field( + proto.STRING, + number=4, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ResizeNodeGroupRequest(proto.Message): + r"""A request to resize a node group. + + Attributes: + name (str): + Required. The name of the node group to resize. Format: + ``projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{nodeGroup}`` + size (int): + Required. The number of running instances for + the node group to maintain. The group adds or + removes instances to maintain the number of + instances specified by this parameter. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two + `ResizeNodeGroupRequest `__ + with the same ID, the second request is ignored and the + first + [google.longrunning.Operation][google.longrunning.Operation] + created and stored in the backend is returned. + + Recommendation: Set this value to a + `UUID `__. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + graceful_decommission_timeout (google.protobuf.duration_pb2.Duration): + Optional. Timeout for graceful YARN decommissioning. + [Graceful decommissioning] + (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/scaling-clusters#graceful_decommissioning) + allows the removal of nodes from the Compute Engine node + group without interrupting jobs in progress. This timeout + specifies how long to wait for jobs in progress to finish + before forcefully removing nodes (and potentially + interrupting jobs). Default timeout is 0 (for forceful + decommission), and the maximum allowed timeout is 1 day. + (see JSON representation of + `Duration `__). + + Only supported on Dataproc image versions 1.2 and higher. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + size: int = proto.Field( + proto.INT32, + number=2, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + graceful_decommission_timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + + +class GetNodeGroupRequest(proto.Message): + r"""A request to get a node group . + + Attributes: + name (str): + Required. The name of the node group to retrieve. Format: + ``projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{nodeGroup}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/operations.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/operations.py new file mode 100644 index 000000000000..2a68bb2df90d --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/operations.py @@ -0,0 +1,315 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dataproc.v1", + manifest={ + "BatchOperationMetadata", + "ClusterOperationStatus", + "ClusterOperationMetadata", + "NodeGroupOperationMetadata", + }, +) + + +class BatchOperationMetadata(proto.Message): + r"""Metadata describing the Batch operation. + + Attributes: + batch (str): + Name of the batch for the operation. + batch_uuid (str): + Batch UUID for the operation. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the operation was created. + done_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the operation finished. + operation_type (google.cloud.dataproc_v1.types.BatchOperationMetadata.BatchOperationType): + The operation type. + description (str): + Short description of the operation. + labels (MutableMapping[str, str]): + Labels associated with the operation. + warnings (MutableSequence[str]): + Warnings encountered during operation + execution. + """ + + class BatchOperationType(proto.Enum): + r"""Operation type for Batch resources + + Values: + BATCH_OPERATION_TYPE_UNSPECIFIED (0): + Batch operation type is unknown. + BATCH (1): + Batch operation type. + """ + BATCH_OPERATION_TYPE_UNSPECIFIED = 0 + BATCH = 1 + + batch: str = proto.Field( + proto.STRING, + number=1, + ) + batch_uuid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + done_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + operation_type: BatchOperationType = proto.Field( + proto.ENUM, + number=6, + enum=BatchOperationType, + ) + description: str = proto.Field( + proto.STRING, + number=7, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + warnings: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + + +class ClusterOperationStatus(proto.Message): + r"""The status of the operation. + + Attributes: + state (google.cloud.dataproc_v1.types.ClusterOperationStatus.State): + Output only. A message containing the + operation state. + inner_state (str): + Output only. A message containing the + detailed operation state. + details (str): + Output only. A message containing any + operation metadata details. + state_start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time this state was entered. + """ + + class State(proto.Enum): + r"""The operation state. + + Values: + UNKNOWN (0): + Unused. + PENDING (1): + The operation has been created. + RUNNING (2): + The operation is running. + DONE (3): + The operation is done; either cancelled or + completed. + """ + UNKNOWN = 0 + PENDING = 1 + RUNNING = 2 + DONE = 3 + + state: State = proto.Field( + proto.ENUM, + number=1, + enum=State, + ) + inner_state: str = proto.Field( + proto.STRING, + number=2, + ) + details: str = proto.Field( + proto.STRING, + number=3, + ) + state_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class ClusterOperationMetadata(proto.Message): + r"""Metadata describing the operation. + + Attributes: + cluster_name (str): + Output only. Name of the cluster for the + operation. + cluster_uuid (str): + Output only. Cluster UUID for the operation. + status (google.cloud.dataproc_v1.types.ClusterOperationStatus): + Output only. Current operation status. + status_history (MutableSequence[google.cloud.dataproc_v1.types.ClusterOperationStatus]): + Output only. The previous operation status. + operation_type (str): + Output only. The operation type. + description (str): + Output only. Short description of operation. + labels (MutableMapping[str, str]): + Output only. Labels associated with the + operation + warnings (MutableSequence[str]): + Output only. Errors encountered during + operation execution. + child_operation_ids (MutableSequence[str]): + Output only. Child operation ids + """ + + cluster_name: str = proto.Field( + proto.STRING, + number=7, + ) + cluster_uuid: str = proto.Field( + proto.STRING, + number=8, + ) + status: "ClusterOperationStatus" = proto.Field( + proto.MESSAGE, + number=9, + message="ClusterOperationStatus", + ) + status_history: MutableSequence["ClusterOperationStatus"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="ClusterOperationStatus", + ) + operation_type: str = proto.Field( + proto.STRING, + number=11, + ) + description: str = proto.Field( + proto.STRING, + number=12, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + warnings: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=14, + ) + child_operation_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=15, + ) + + +class NodeGroupOperationMetadata(proto.Message): + r"""Metadata describing the node group operation. + + Attributes: + node_group_id (str): + Output only. Node group ID for the operation. + cluster_uuid (str): + Output only. Cluster UUID associated with the + node group operation. + status (google.cloud.dataproc_v1.types.ClusterOperationStatus): + Output only. Current operation status. + status_history (MutableSequence[google.cloud.dataproc_v1.types.ClusterOperationStatus]): + Output only. The previous operation status. + operation_type (google.cloud.dataproc_v1.types.NodeGroupOperationMetadata.NodeGroupOperationType): + The operation type. + description (str): + Output only. Short description of operation. + labels (MutableMapping[str, str]): + Output only. Labels associated with the + operation. + warnings (MutableSequence[str]): + Output only. Errors encountered during + operation execution. + """ + + class NodeGroupOperationType(proto.Enum): + r"""Operation type for node group resources. + + Values: + NODE_GROUP_OPERATION_TYPE_UNSPECIFIED (0): + Node group operation type is unknown. + CREATE (1): + Create node group operation type. + UPDATE (2): + Update node group operation type. + DELETE (3): + Delete node group operation type. + RESIZE (4): + Resize node group operation type. + """ + NODE_GROUP_OPERATION_TYPE_UNSPECIFIED = 0 + CREATE = 1 + UPDATE = 2 + DELETE = 3 + RESIZE = 4 + + node_group_id: str = proto.Field( + proto.STRING, + number=1, + ) + cluster_uuid: str = proto.Field( + proto.STRING, + number=2, + ) + status: "ClusterOperationStatus" = proto.Field( + proto.MESSAGE, + number=3, + message="ClusterOperationStatus", + ) + status_history: MutableSequence["ClusterOperationStatus"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="ClusterOperationStatus", + ) + operation_type: NodeGroupOperationType = proto.Field( + proto.ENUM, + number=5, + enum=NodeGroupOperationType, + ) + description: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + warnings: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py new file mode 100644 index 000000000000..b9ecdf13b097 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/shared.py @@ -0,0 +1,789 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dataproc.v1", + manifest={ + "Component", + "FailureAction", + "RuntimeConfig", + "EnvironmentConfig", + "ExecutionConfig", + "SparkHistoryServerConfig", + "PeripheralsConfig", + "RuntimeInfo", + "UsageMetrics", + "UsageSnapshot", + "GkeClusterConfig", + "KubernetesClusterConfig", + "KubernetesSoftwareConfig", + "GkeNodePoolTarget", + "GkeNodePoolConfig", + }, +) + + +class Component(proto.Enum): + r"""Cluster components that can be activated. + + Values: + COMPONENT_UNSPECIFIED (0): + Unspecified component. Specifying this will + cause Cluster creation to fail. + ANACONDA (5): + The Anaconda python distribution. The + Anaconda component is not supported in the + Dataproc 2.0 + image. The 2.0 image is pre-installed with + Miniconda. + DOCKER (13): + Docker + DRUID (9): + The Druid query engine. (alpha) + FLINK (14): + Flink + HBASE (11): + HBase. (beta) + HIVE_WEBHCAT (3): + The Hive Web HCatalog (the REST service for + accessing HCatalog). + HUDI (18): + Hudi. + JUPYTER (1): + The Jupyter Notebook. + PRESTO (6): + The Presto query engine. + TRINO (17): + The Trino query engine. + RANGER (12): + The Ranger service. + SOLR (10): + The Solr service. + ZEPPELIN (4): + The Zeppelin notebook. + ZOOKEEPER (8): + The Zookeeper service. + """ + COMPONENT_UNSPECIFIED = 0 + ANACONDA = 5 + DOCKER = 13 + DRUID = 9 + FLINK = 14 + HBASE = 11 + HIVE_WEBHCAT = 3 + HUDI = 18 + JUPYTER = 1 + PRESTO = 6 + TRINO = 17 + RANGER = 12 + SOLR = 10 + ZEPPELIN = 4 + ZOOKEEPER = 8 + + +class FailureAction(proto.Enum): + r"""Actions in response to failure of a resource associated with + a cluster. + + Values: + FAILURE_ACTION_UNSPECIFIED (0): + When FailureAction is unspecified, failure action defaults + to NO_ACTION. + NO_ACTION (1): + Take no action on failure to create a cluster resource. + NO_ACTION is the default. + DELETE (2): + Delete the failed cluster resource. + """ + FAILURE_ACTION_UNSPECIFIED = 0 + NO_ACTION = 1 + DELETE = 2 + + +class RuntimeConfig(proto.Message): + r"""Runtime configuration for a workload. + + Attributes: + version (str): + Optional. Version of the batch runtime. + container_image (str): + Optional. Optional custom container image for + the job runtime environment. If not specified, a + default container image will be used. + properties (MutableMapping[str, str]): + Optional. A mapping of property names to + values, which are used to configure workload + execution. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + container_image: str = proto.Field( + proto.STRING, + number=2, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + + +class EnvironmentConfig(proto.Message): + r"""Environment configuration for a workload. + + Attributes: + execution_config (google.cloud.dataproc_v1.types.ExecutionConfig): + Optional. Execution configuration for a + workload. + peripherals_config (google.cloud.dataproc_v1.types.PeripheralsConfig): + Optional. Peripherals configuration that + workload has access to. + """ + + execution_config: "ExecutionConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="ExecutionConfig", + ) + peripherals_config: "PeripheralsConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="PeripheralsConfig", + ) + + +class ExecutionConfig(proto.Message): + r"""Execution configuration for a workload. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + service_account (str): + Optional. Service account that used to + execute workload. + network_uri (str): + Optional. Network URI to connect workload to. + + This field is a member of `oneof`_ ``network``. + subnetwork_uri (str): + Optional. Subnetwork URI to connect workload + to. + + This field is a member of `oneof`_ ``network``. + network_tags (MutableSequence[str]): + Optional. Tags used for network traffic + control. + kms_key (str): + Optional. The Cloud KMS key to use for + encryption. + ttl (google.protobuf.duration_pb2.Duration): + Optional. The duration after which the workload will be + terminated. When the workload passes this ttl, it will be + unconditionally killed without waiting for ongoing work to + finish. Minimum value is 10 minutes; maximum value is 14 + days (see JSON representation of + `Duration `__). + If both ttl and idle_ttl are specified, the conditions are + treated as and OR: the workload will be terminated when it + has been idle for idle_ttl or when the ttl has passed, + whichever comes first. If ttl is not specified for a + session, it defaults to 24h. + staging_bucket (str): + Optional. A Cloud Storage bucket used to stage workload + dependencies, config files, and store workload output and + other ephemeral data, such as Spark history files. If you do + not specify a staging bucket, Cloud Dataproc will determine + a Cloud Storage location according to the region where your + workload is running, and then create and manage + project-level, per-location staging and temporary buckets. + **This field requires a Cloud Storage bucket name, not a + ``gs://...`` URI to a Cloud Storage bucket.** + """ + + service_account: str = proto.Field( + proto.STRING, + number=2, + ) + network_uri: str = proto.Field( + proto.STRING, + number=4, + oneof="network", + ) + subnetwork_uri: str = proto.Field( + proto.STRING, + number=5, + oneof="network", + ) + network_tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + kms_key: str = proto.Field( + proto.STRING, + number=7, + ) + ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=9, + message=duration_pb2.Duration, + ) + staging_bucket: str = proto.Field( + proto.STRING, + number=10, + ) + + +class SparkHistoryServerConfig(proto.Message): + r"""Spark History Server configuration for the workload. + + Attributes: + dataproc_cluster (str): + Optional. Resource name of an existing Dataproc Cluster to + act as a Spark History Server for the workload. + + Example: + + - ``projects/[project_id]/regions/[region]/clusters/[cluster_name]`` + """ + + dataproc_cluster: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PeripheralsConfig(proto.Message): + r"""Auxiliary services configuration for a workload. + + Attributes: + metastore_service (str): + Optional. Resource name of an existing Dataproc Metastore + service. + + Example: + + - ``projects/[project_id]/locations/[region]/services/[service_id]`` + spark_history_server_config (google.cloud.dataproc_v1.types.SparkHistoryServerConfig): + Optional. The Spark History Server + configuration for the workload. + """ + + metastore_service: str = proto.Field( + proto.STRING, + number=1, + ) + spark_history_server_config: "SparkHistoryServerConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="SparkHistoryServerConfig", + ) + + +class RuntimeInfo(proto.Message): + r"""Runtime information about workload execution. + + Attributes: + endpoints (MutableMapping[str, str]): + Output only. Map of remote access endpoints + (such as web interfaces and APIs) to their URIs. + output_uri (str): + Output only. A URI pointing to the location + of the stdout and stderr of the workload. + diagnostic_output_uri (str): + Output only. A URI pointing to the location + of the diagnostics tarball. + approximate_usage (google.cloud.dataproc_v1.types.UsageMetrics): + Output only. Approximate workload resource usage calculated + after workload finishes (see [Dataproc Serverless pricing] + (https://cloud.google.com/dataproc-serverless/pricing)). + current_usage (google.cloud.dataproc_v1.types.UsageSnapshot): + Output only. Snapshot of current workload + resource usage. + """ + + endpoints: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + output_uri: str = proto.Field( + proto.STRING, + number=2, + ) + diagnostic_output_uri: str = proto.Field( + proto.STRING, + number=3, + ) + approximate_usage: "UsageMetrics" = proto.Field( + proto.MESSAGE, + number=6, + message="UsageMetrics", + ) + current_usage: "UsageSnapshot" = proto.Field( + proto.MESSAGE, + number=7, + message="UsageSnapshot", + ) + + +class UsageMetrics(proto.Message): + r"""Usage metrics represent approximate total resources consumed + by a workload. + + Attributes: + milli_dcu_seconds (int): + Optional. DCU (Dataproc Compute Units) usage in + (``milliDCU`` x ``seconds``) (see [Dataproc Serverless + pricing] + (https://cloud.google.com/dataproc-serverless/pricing)). + shuffle_storage_gb_seconds (int): + Optional. Shuffle storage usage in (``GB`` x ``seconds``) + (see [Dataproc Serverless pricing] + (https://cloud.google.com/dataproc-serverless/pricing)). + """ + + milli_dcu_seconds: int = proto.Field( + proto.INT64, + number=1, + ) + shuffle_storage_gb_seconds: int = proto.Field( + proto.INT64, + number=2, + ) + + +class UsageSnapshot(proto.Message): + r"""The usage snaphot represents the resources consumed by a + workload at a specified time. + + Attributes: + milli_dcu (int): + Optional. Milli (one-thousandth) Dataproc Compute Units + (DCUs) (see [Dataproc Serverless pricing] + (https://cloud.google.com/dataproc-serverless/pricing)). + shuffle_storage_gb (int): + Optional. Shuffle Storage in gigabytes (GB). (see [Dataproc + Serverless pricing] + (https://cloud.google.com/dataproc-serverless/pricing)) + snapshot_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The timestamp of the usage + snapshot. + """ + + milli_dcu: int = proto.Field( + proto.INT64, + number=1, + ) + shuffle_storage_gb: int = proto.Field( + proto.INT64, + number=2, + ) + snapshot_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class GkeClusterConfig(proto.Message): + r"""The cluster's GKE config. + + Attributes: + gke_cluster_target (str): + Optional. A target GKE cluster to deploy to. It must be in + the same project and region as the Dataproc cluster (the GKE + cluster can be zonal or regional). Format: + 'projects/{project}/locations/{location}/clusters/{cluster_id}' + node_pool_target (MutableSequence[google.cloud.dataproc_v1.types.GkeNodePoolTarget]): + Optional. GKE node pools where workloads will be scheduled. + At least one node pool must be assigned the ``DEFAULT`` + [GkeNodePoolTarget.Role][google.cloud.dataproc.v1.GkeNodePoolTarget.Role]. + If a ``GkeNodePoolTarget`` is not specified, Dataproc + constructs a ``DEFAULT`` ``GkeNodePoolTarget``. Each role + can be given to only one ``GkeNodePoolTarget``. All node + pools must have the same location settings. + """ + + gke_cluster_target: str = proto.Field( + proto.STRING, + number=2, + ) + node_pool_target: MutableSequence["GkeNodePoolTarget"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="GkeNodePoolTarget", + ) + + +class KubernetesClusterConfig(proto.Message): + r"""The configuration for running the Dataproc cluster on + Kubernetes. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kubernetes_namespace (str): + Optional. A namespace within the Kubernetes + cluster to deploy into. If this namespace does + not exist, it is created. If it exists, Dataproc + verifies that another Dataproc VirtualCluster is + not installed into it. If not specified, the + name of the Dataproc Cluster is used. + gke_cluster_config (google.cloud.dataproc_v1.types.GkeClusterConfig): + Required. The configuration for running the + Dataproc cluster on GKE. + + This field is a member of `oneof`_ ``config``. + kubernetes_software_config (google.cloud.dataproc_v1.types.KubernetesSoftwareConfig): + Optional. The software configuration for this + Dataproc cluster running on Kubernetes. + """ + + kubernetes_namespace: str = proto.Field( + proto.STRING, + number=1, + ) + gke_cluster_config: "GkeClusterConfig" = proto.Field( + proto.MESSAGE, + number=2, + oneof="config", + message="GkeClusterConfig", + ) + kubernetes_software_config: "KubernetesSoftwareConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="KubernetesSoftwareConfig", + ) + + +class KubernetesSoftwareConfig(proto.Message): + r"""The software configuration for this Dataproc cluster running + on Kubernetes. + + Attributes: + component_version (MutableMapping[str, str]): + The components that should be installed in + this Dataproc cluster. The key must be a string + from the KubernetesComponent enumeration. The + value is the version of the software to be + installed. + At least one entry must be specified. + properties (MutableMapping[str, str]): + The properties to set on daemon config files. + + Property keys are specified in ``prefix:property`` format, + for example ``spark:spark.kubernetes.container.image``. The + following are supported prefixes and their mappings: + + - spark: ``spark-defaults.conf`` + + For more information, see `Cluster + properties `__. + """ + + component_version: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + properties: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + + +class GkeNodePoolTarget(proto.Message): + r"""GKE node pools that Dataproc workloads run on. + + Attributes: + node_pool (str): + Required. The target GKE node pool. Format: + 'projects/{project}/locations/{location}/clusters/{cluster}/nodePools/{node_pool}' + roles (MutableSequence[google.cloud.dataproc_v1.types.GkeNodePoolTarget.Role]): + Required. The roles associated with the GKE + node pool. + node_pool_config (google.cloud.dataproc_v1.types.GkeNodePoolConfig): + Input only. The configuration for the GKE + node pool. + If specified, Dataproc attempts to create a node + pool with the specified shape. If one with the + same name already exists, it is verified against + all specified fields. If a field differs, the + virtual cluster creation will fail. + + If omitted, any node pool with the specified + name is used. If a node pool with the specified + name does not exist, Dataproc create a node pool + with default values. + + This is an input only field. It will not be + returned by the API. + """ + + class Role(proto.Enum): + r"""``Role`` specifies the tasks that will run on the node pool. Roles + can be specific to workloads. Exactly one + [GkeNodePoolTarget][google.cloud.dataproc.v1.GkeNodePoolTarget] + within the virtual cluster must have the ``DEFAULT`` role, which is + used to run all workloads that are not associated with a node pool. + + Values: + ROLE_UNSPECIFIED (0): + Role is unspecified. + DEFAULT (1): + At least one node pool must have the ``DEFAULT`` role. Work + assigned to a role that is not associated with a node pool + is assigned to the node pool with the ``DEFAULT`` role. For + example, work assigned to the ``CONTROLLER`` role will be + assigned to the node pool with the ``DEFAULT`` role if no + node pool has the ``CONTROLLER`` role. + CONTROLLER (2): + Run work associated with the Dataproc control + plane (for example, controllers and webhooks). + Very low resource requirements. + SPARK_DRIVER (3): + Run work associated with a Spark driver of a + job. + SPARK_EXECUTOR (4): + Run work associated with a Spark executor of + a job. + """ + ROLE_UNSPECIFIED = 0 + DEFAULT = 1 + CONTROLLER = 2 + SPARK_DRIVER = 3 + SPARK_EXECUTOR = 4 + + node_pool: str = proto.Field( + proto.STRING, + number=1, + ) + roles: MutableSequence[Role] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=Role, + ) + node_pool_config: "GkeNodePoolConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="GkeNodePoolConfig", + ) + + +class GkeNodePoolConfig(proto.Message): + r"""The configuration of a GKE node pool used by a `Dataproc-on-GKE + cluster `__. + + Attributes: + config (google.cloud.dataproc_v1.types.GkeNodePoolConfig.GkeNodeConfig): + Optional. The node pool configuration. + locations (MutableSequence[str]): + Optional. The list of Compute Engine + `zones `__ + where node pool nodes associated with a Dataproc on GKE + virtual cluster will be located. + + **Note:** All node pools associated with a virtual cluster + must be located in the same region as the virtual cluster, + and they must be located in the same zone within that + region. + + If a location is not specified during node pool creation, + Dataproc on GKE will choose the zone. + autoscaling (google.cloud.dataproc_v1.types.GkeNodePoolConfig.GkeNodePoolAutoscalingConfig): + Optional. The autoscaler configuration for + this node pool. The autoscaler is enabled only + when a valid configuration is present. + """ + + class GkeNodeConfig(proto.Message): + r"""Parameters that describe cluster nodes. + + Attributes: + machine_type (str): + Optional. The name of a Compute Engine `machine + type `__. + local_ssd_count (int): + Optional. The number of local SSD disks to attach to the + node, which is limited by the maximum number of disks + allowable per zone (see `Adding Local + SSDs `__). + preemptible (bool): + Optional. Whether the nodes are created as legacy + [preemptible VM instances] + (https://cloud.google.com/compute/docs/instances/preemptible). + Also see + [Spot][google.cloud.dataproc.v1.GkeNodePoolConfig.GkeNodeConfig.spot] + VMs, preemptible VM instances without a maximum lifetime. + Legacy and Spot preemptible nodes cannot be used in a node + pool with the ``CONTROLLER`` [role] + (/dataproc/docs/reference/rest/v1/projects.regions.clusters#role) + or in the DEFAULT node pool if the CONTROLLER role is not + assigned (the DEFAULT node pool will assume the CONTROLLER + role). + accelerators (MutableSequence[google.cloud.dataproc_v1.types.GkeNodePoolConfig.GkeNodePoolAcceleratorConfig]): + Optional. A list of `hardware + accelerators `__ + to attach to each node. + min_cpu_platform (str): + Optional. `Minimum CPU + platform `__ + to be used by this instance. The instance may be scheduled + on the specified or a newer CPU platform. Specify the + friendly names of CPU platforms, such as "Intel Haswell"\` + or Intel Sandy Bridge". + boot_disk_kms_key (str): + Optional. The [Customer Managed Encryption Key (CMEK)] + (https://cloud.google.com/kubernetes-engine/docs/how-to/using-cmek) + used to encrypt the boot disk attached to each node in the + node pool. Specify the key using the following format: + projects/KEY_PROJECT_ID/locations/LOCATION/keyRings/RING_NAME/cryptoKeys/KEY_NAME. + spot (bool): + Optional. Whether the nodes are created as [Spot VM + instances] + (https://cloud.google.com/compute/docs/instances/spot). Spot + VMs are the latest update to legacy [preemptible + VMs][google.cloud.dataproc.v1.GkeNodePoolConfig.GkeNodeConfig.preemptible]. + Spot VMs do not have a maximum lifetime. Legacy and Spot + preemptible nodes cannot be used in a node pool with the + ``CONTROLLER`` + `role `__ + or in the DEFAULT node pool if the CONTROLLER role is not + assigned (the DEFAULT node pool will assume the CONTROLLER + role). + """ + + machine_type: str = proto.Field( + proto.STRING, + number=1, + ) + local_ssd_count: int = proto.Field( + proto.INT32, + number=7, + ) + preemptible: bool = proto.Field( + proto.BOOL, + number=10, + ) + accelerators: MutableSequence[ + "GkeNodePoolConfig.GkeNodePoolAcceleratorConfig" + ] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="GkeNodePoolConfig.GkeNodePoolAcceleratorConfig", + ) + min_cpu_platform: str = proto.Field( + proto.STRING, + number=13, + ) + boot_disk_kms_key: str = proto.Field( + proto.STRING, + number=23, + ) + spot: bool = proto.Field( + proto.BOOL, + number=32, + ) + + class GkeNodePoolAcceleratorConfig(proto.Message): + r"""A GkeNodeConfigAcceleratorConfig represents a Hardware + Accelerator request for a node pool. + + Attributes: + accelerator_count (int): + The number of accelerator cards exposed to an + instance. + accelerator_type (str): + The accelerator type resource namename (see + GPUs on Compute Engine). + gpu_partition_size (str): + Size of partitions to create on the GPU. Valid values are + described in the NVIDIA `mig user + guide `__. + """ + + accelerator_count: int = proto.Field( + proto.INT64, + number=1, + ) + accelerator_type: str = proto.Field( + proto.STRING, + number=2, + ) + gpu_partition_size: str = proto.Field( + proto.STRING, + number=3, + ) + + class GkeNodePoolAutoscalingConfig(proto.Message): + r"""GkeNodePoolAutoscaling contains information the cluster + autoscaler needs to adjust the size of the node pool to the + current cluster usage. + + Attributes: + min_node_count (int): + The minimum number of nodes in the node pool. Must be >= 0 + and <= max_node_count. + max_node_count (int): + The maximum number of nodes in the node pool. Must be >= + min_node_count, and must be > 0. **Note:** Quota must be + sufficient to scale up the cluster. + """ + + min_node_count: int = proto.Field( + proto.INT32, + number=2, + ) + max_node_count: int = proto.Field( + proto.INT32, + number=3, + ) + + config: GkeNodeConfig = proto.Field( + proto.MESSAGE, + number=2, + message=GkeNodeConfig, + ) + locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + autoscaling: GkeNodePoolAutoscalingConfig = proto.Field( + proto.MESSAGE, + number=4, + message=GkeNodePoolAutoscalingConfig, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py new file mode 100644 index 000000000000..b2ba90bcd573 --- /dev/null +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/types/workflow_templates.py @@ -0,0 +1,1151 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.dataproc_v1.types import clusters +from google.cloud.dataproc_v1.types import jobs as gcd_jobs + +__protobuf__ = proto.module( + package="google.cloud.dataproc.v1", + manifest={ + "WorkflowTemplate", + "WorkflowTemplatePlacement", + "ManagedCluster", + "ClusterSelector", + "OrderedJob", + "TemplateParameter", + "ParameterValidation", + "RegexValidation", + "ValueValidation", + "WorkflowMetadata", + "ClusterOperation", + "WorkflowGraph", + "WorkflowNode", + "CreateWorkflowTemplateRequest", + "GetWorkflowTemplateRequest", + "InstantiateWorkflowTemplateRequest", + "InstantiateInlineWorkflowTemplateRequest", + "UpdateWorkflowTemplateRequest", + "ListWorkflowTemplatesRequest", + "ListWorkflowTemplatesResponse", + "DeleteWorkflowTemplateRequest", + }, +) + + +class WorkflowTemplate(proto.Message): + r"""A Dataproc workflow template resource. + + Attributes: + id (str): + + name (str): + Output only. The resource name of the workflow template, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates``, the resource + name of the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates``, the + resource name of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + version (int): + Optional. Used to perform a consistent read-modify-write. + + This field should be left blank for a + ``CreateWorkflowTemplate`` request. It is required for an + ``UpdateWorkflowTemplate`` request, and must match the + current server version. A typical update template flow would + fetch the current template with a ``GetWorkflowTemplate`` + request, which will return the current template with the + ``version`` field filled in with the current server version. + The user updates other fields in the template, then returns + it as part of the ``UpdateWorkflowTemplate`` request. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time template was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time template was last + updated. + labels (MutableMapping[str, str]): + Optional. The labels to associate with this template. These + labels will be propagated to all jobs and clusters created + by the workflow instance. + + Label **keys** must contain 1 to 63 characters, and must + conform to `RFC + 1035 `__. + + Label **values** may be empty, but, if present, must contain + 1 to 63 characters, and must conform to `RFC + 1035 `__. + + No more than 32 labels can be associated with a template. + placement (google.cloud.dataproc_v1.types.WorkflowTemplatePlacement): + Required. WorkflowTemplate scheduling + information. + jobs (MutableSequence[google.cloud.dataproc_v1.types.OrderedJob]): + Required. The Directed Acyclic Graph of Jobs + to submit. + parameters (MutableSequence[google.cloud.dataproc_v1.types.TemplateParameter]): + Optional. Template parameters whose values + are substituted into the template. Values for + parameters must be provided when the template is + instantiated. + dag_timeout (google.protobuf.duration_pb2.Duration): + Optional. Timeout duration for the DAG of jobs, expressed in + seconds (see `JSON representation of + duration `__). + The timeout duration must be from 10 minutes ("600s") to 24 + hours ("86400s"). The timer begins when the first job is + submitted. If the workflow is running at the end of the + timeout period, any remaining jobs are cancelled, the + workflow is ended, and if the workflow was running on a + `managed + cluster `__, + the cluster is deleted. + """ + + id: str = proto.Field( + proto.STRING, + number=2, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: int = proto.Field( + proto.INT32, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + placement: "WorkflowTemplatePlacement" = proto.Field( + proto.MESSAGE, + number=7, + message="WorkflowTemplatePlacement", + ) + jobs: MutableSequence["OrderedJob"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="OrderedJob", + ) + parameters: MutableSequence["TemplateParameter"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="TemplateParameter", + ) + dag_timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=10, + message=duration_pb2.Duration, + ) + + +class WorkflowTemplatePlacement(proto.Message): + r"""Specifies workflow execution target. + + Either ``managed_cluster`` or ``cluster_selector`` is required. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + managed_cluster (google.cloud.dataproc_v1.types.ManagedCluster): + A cluster that is managed by the workflow. + + This field is a member of `oneof`_ ``placement``. + cluster_selector (google.cloud.dataproc_v1.types.ClusterSelector): + Optional. A selector that chooses target + cluster for jobs based on metadata. + + The selector is evaluated at the time each job + is submitted. + + This field is a member of `oneof`_ ``placement``. + """ + + managed_cluster: "ManagedCluster" = proto.Field( + proto.MESSAGE, + number=1, + oneof="placement", + message="ManagedCluster", + ) + cluster_selector: "ClusterSelector" = proto.Field( + proto.MESSAGE, + number=2, + oneof="placement", + message="ClusterSelector", + ) + + +class ManagedCluster(proto.Message): + r"""Cluster that is managed by the workflow. + + Attributes: + cluster_name (str): + Required. The cluster name prefix. A unique + cluster name will be formed by appending a + random suffix. + + The name must contain only lower-case letters + (a-z), numbers (0-9), and hyphens (-). Must + begin with a letter. Cannot begin or end with + hyphen. Must consist of between 2 and 35 + characters. + config (google.cloud.dataproc_v1.types.ClusterConfig): + Required. The cluster configuration. + labels (MutableMapping[str, str]): + Optional. The labels to associate with this cluster. + + Label keys must be between 1 and 63 characters long, and + must conform to the following PCRE regular expression: + [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} + + Label values must be between 1 and 63 characters long, and + must conform to the following PCRE regular expression: + [\p{Ll}\p{Lo}\p{N}_-]{0,63} + + No more than 32 labels can be associated with a given + cluster. + """ + + cluster_name: str = proto.Field( + proto.STRING, + number=2, + ) + config: clusters.ClusterConfig = proto.Field( + proto.MESSAGE, + number=3, + message=clusters.ClusterConfig, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + +class ClusterSelector(proto.Message): + r"""A selector that chooses target cluster for jobs based on + metadata. + + Attributes: + zone (str): + Optional. The zone where workflow process + executes. This parameter does not affect the + selection of the cluster. + + If unspecified, the zone of the first cluster + matching the selector is used. + cluster_labels (MutableMapping[str, str]): + Required. The cluster labels. Cluster must + have all labels to match. + """ + + zone: str = proto.Field( + proto.STRING, + number=1, + ) + cluster_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + + +class OrderedJob(proto.Message): + r"""A job executed by the workflow. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + step_id (str): + Required. The step id. The id must be unique among all jobs + within the template. + + The step id is used as prefix for job id, as job + ``goog-dataproc-workflow-step-id`` label, and in + [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] + field from other steps. + + The id must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). Cannot begin or end with + underscore or hyphen. Must consist of between 3 and 50 + characters. + hadoop_job (google.cloud.dataproc_v1.types.HadoopJob): + Optional. Job is a Hadoop job. + + This field is a member of `oneof`_ ``job_type``. + spark_job (google.cloud.dataproc_v1.types.SparkJob): + Optional. Job is a Spark job. + + This field is a member of `oneof`_ ``job_type``. + pyspark_job (google.cloud.dataproc_v1.types.PySparkJob): + Optional. Job is a PySpark job. + + This field is a member of `oneof`_ ``job_type``. + hive_job (google.cloud.dataproc_v1.types.HiveJob): + Optional. Job is a Hive job. + + This field is a member of `oneof`_ ``job_type``. + pig_job (google.cloud.dataproc_v1.types.PigJob): + Optional. Job is a Pig job. + + This field is a member of `oneof`_ ``job_type``. + spark_r_job (google.cloud.dataproc_v1.types.SparkRJob): + Optional. Job is a SparkR job. + + This field is a member of `oneof`_ ``job_type``. + spark_sql_job (google.cloud.dataproc_v1.types.SparkSqlJob): + Optional. Job is a SparkSql job. + + This field is a member of `oneof`_ ``job_type``. + presto_job (google.cloud.dataproc_v1.types.PrestoJob): + Optional. Job is a Presto job. + + This field is a member of `oneof`_ ``job_type``. + labels (MutableMapping[str, str]): + Optional. The labels to associate with this job. + + Label keys must be between 1 and 63 characters long, and + must conform to the following regular expression: + [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} + + Label values must be between 1 and 63 characters long, and + must conform to the following regular expression: + [\p{Ll}\p{Lo}\p{N}_-]{0,63} + + No more than 32 labels can be associated with a given job. + scheduling (google.cloud.dataproc_v1.types.JobScheduling): + Optional. Job scheduling configuration. + prerequisite_step_ids (MutableSequence[str]): + Optional. The optional list of prerequisite job step_ids. If + not specified, the job will start at the beginning of + workflow. + """ + + step_id: str = proto.Field( + proto.STRING, + number=1, + ) + hadoop_job: gcd_jobs.HadoopJob = proto.Field( + proto.MESSAGE, + number=2, + oneof="job_type", + message=gcd_jobs.HadoopJob, + ) + spark_job: gcd_jobs.SparkJob = proto.Field( + proto.MESSAGE, + number=3, + oneof="job_type", + message=gcd_jobs.SparkJob, + ) + pyspark_job: gcd_jobs.PySparkJob = proto.Field( + proto.MESSAGE, + number=4, + oneof="job_type", + message=gcd_jobs.PySparkJob, + ) + hive_job: gcd_jobs.HiveJob = proto.Field( + proto.MESSAGE, + number=5, + oneof="job_type", + message=gcd_jobs.HiveJob, + ) + pig_job: gcd_jobs.PigJob = proto.Field( + proto.MESSAGE, + number=6, + oneof="job_type", + message=gcd_jobs.PigJob, + ) + spark_r_job: gcd_jobs.SparkRJob = proto.Field( + proto.MESSAGE, + number=11, + oneof="job_type", + message=gcd_jobs.SparkRJob, + ) + spark_sql_job: gcd_jobs.SparkSqlJob = proto.Field( + proto.MESSAGE, + number=7, + oneof="job_type", + message=gcd_jobs.SparkSqlJob, + ) + presto_job: gcd_jobs.PrestoJob = proto.Field( + proto.MESSAGE, + number=12, + oneof="job_type", + message=gcd_jobs.PrestoJob, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + scheduling: gcd_jobs.JobScheduling = proto.Field( + proto.MESSAGE, + number=9, + message=gcd_jobs.JobScheduling, + ) + prerequisite_step_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + + +class TemplateParameter(proto.Message): + r"""A configurable parameter that replaces one or more fields in + the template. Parameterizable fields: + + - Labels + - File uris + - Job properties + - Job arguments + - Script variables + - Main class (in HadoopJob and SparkJob) + - Zone (in ClusterSelector) + + Attributes: + name (str): + Required. Parameter name. The parameter name is used as the + key, and paired with the parameter value, which are passed + to the template when the template is instantiated. The name + must contain only capital letters (A-Z), numbers (0-9), and + underscores (_), and must not start with a number. The + maximum length is 40 characters. + fields (MutableSequence[str]): + Required. Paths to all fields that the parameter replaces. A + field is allowed to appear in at most one parameter's list + of field paths. + + A field path is similar in syntax to a + [google.protobuf.FieldMask][google.protobuf.FieldMask]. For + example, a field path that references the zone field of a + workflow template's cluster selector would be specified as + ``placement.clusterSelector.zone``. + + Also, field paths can reference fields using the following + syntax: + + - Values in maps can be referenced by key: + + - labels['key'] + - placement.clusterSelector.clusterLabels['key'] + - placement.managedCluster.labels['key'] + - placement.clusterSelector.clusterLabels['key'] + - jobs['step-id'].labels['key'] + + - Jobs in the jobs list can be referenced by step-id: + + - jobs['step-id'].hadoopJob.mainJarFileUri + - jobs['step-id'].hiveJob.queryFileUri + - jobs['step-id'].pySparkJob.mainPythonFileUri + - jobs['step-id'].hadoopJob.jarFileUris[0] + - jobs['step-id'].hadoopJob.archiveUris[0] + - jobs['step-id'].hadoopJob.fileUris[0] + - jobs['step-id'].pySparkJob.pythonFileUris[0] + + - Items in repeated fields can be referenced by a + zero-based index: + + - jobs['step-id'].sparkJob.args[0] + + - Other examples: + + - jobs['step-id'].hadoopJob.properties['key'] + - jobs['step-id'].hadoopJob.args[0] + - jobs['step-id'].hiveJob.scriptVariables['key'] + - jobs['step-id'].hadoopJob.mainJarFileUri + - placement.clusterSelector.zone + + It may not be possible to parameterize maps and repeated + fields in their entirety since only individual map values + and individual items in repeated fields can be referenced. + For example, the following field paths are invalid: + + - placement.clusterSelector.clusterLabels + - jobs['step-id'].sparkJob.args + description (str): + Optional. Brief description of the parameter. + Must not exceed 1024 characters. + validation (google.cloud.dataproc_v1.types.ParameterValidation): + Optional. Validation rules to be applied to + this parameter's value. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + fields: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + validation: "ParameterValidation" = proto.Field( + proto.MESSAGE, + number=4, + message="ParameterValidation", + ) + + +class ParameterValidation(proto.Message): + r"""Configuration for parameter validation. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + regex (google.cloud.dataproc_v1.types.RegexValidation): + Validation based on regular expressions. + + This field is a member of `oneof`_ ``validation_type``. + values (google.cloud.dataproc_v1.types.ValueValidation): + Validation based on a list of allowed values. + + This field is a member of `oneof`_ ``validation_type``. + """ + + regex: "RegexValidation" = proto.Field( + proto.MESSAGE, + number=1, + oneof="validation_type", + message="RegexValidation", + ) + values: "ValueValidation" = proto.Field( + proto.MESSAGE, + number=2, + oneof="validation_type", + message="ValueValidation", + ) + + +class RegexValidation(proto.Message): + r"""Validation based on regular expressions. + + Attributes: + regexes (MutableSequence[str]): + Required. RE2 regular expressions used to + validate the parameter's value. The value must + match the regex in its entirety (substring + matches are not sufficient). + """ + + regexes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class ValueValidation(proto.Message): + r"""Validation based on a list of allowed values. + + Attributes: + values (MutableSequence[str]): + Required. List of allowed values for the + parameter. + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class WorkflowMetadata(proto.Message): + r"""A Dataproc workflow template resource. + + Attributes: + template (str): + Output only. The resource name of the workflow template as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates``, the resource + name of the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates``, the + resource name of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + version (int): + Output only. The version of template at the + time of workflow instantiation. + create_cluster (google.cloud.dataproc_v1.types.ClusterOperation): + Output only. The create cluster operation + metadata. + graph (google.cloud.dataproc_v1.types.WorkflowGraph): + Output only. The workflow graph. + delete_cluster (google.cloud.dataproc_v1.types.ClusterOperation): + Output only. The delete cluster operation + metadata. + state (google.cloud.dataproc_v1.types.WorkflowMetadata.State): + Output only. The workflow state. + cluster_name (str): + Output only. The name of the target cluster. + parameters (MutableMapping[str, str]): + Map from parameter names to values that were + used for those parameters. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Workflow start time. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Workflow end time. + cluster_uuid (str): + Output only. The UUID of target cluster. + dag_timeout (google.protobuf.duration_pb2.Duration): + Output only. The timeout duration for the DAG of jobs, + expressed in seconds (see `JSON representation of + duration `__). + dag_start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. DAG start time, only set for workflows with + [dag_timeout][google.cloud.dataproc.v1.WorkflowMetadata.dag_timeout] + when DAG begins. + dag_end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. DAG end time, only set for workflows with + [dag_timeout][google.cloud.dataproc.v1.WorkflowMetadata.dag_timeout] + when DAG ends. + """ + + class State(proto.Enum): + r"""The operation state. + + Values: + UNKNOWN (0): + Unused. + PENDING (1): + The operation has been created. + RUNNING (2): + The operation is running. + DONE (3): + The operation is done; either cancelled or + completed. + """ + UNKNOWN = 0 + PENDING = 1 + RUNNING = 2 + DONE = 3 + + template: str = proto.Field( + proto.STRING, + number=1, + ) + version: int = proto.Field( + proto.INT32, + number=2, + ) + create_cluster: "ClusterOperation" = proto.Field( + proto.MESSAGE, + number=3, + message="ClusterOperation", + ) + graph: "WorkflowGraph" = proto.Field( + proto.MESSAGE, + number=4, + message="WorkflowGraph", + ) + delete_cluster: "ClusterOperation" = proto.Field( + proto.MESSAGE, + number=5, + message="ClusterOperation", + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + cluster_name: str = proto.Field( + proto.STRING, + number=7, + ) + parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + cluster_uuid: str = proto.Field( + proto.STRING, + number=11, + ) + dag_timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=12, + message=duration_pb2.Duration, + ) + dag_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + dag_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + + +class ClusterOperation(proto.Message): + r"""The cluster operation triggered by a workflow. + + Attributes: + operation_id (str): + Output only. The id of the cluster operation. + error (str): + Output only. Error, if operation failed. + done (bool): + Output only. Indicates the operation is done. + """ + + operation_id: str = proto.Field( + proto.STRING, + number=1, + ) + error: str = proto.Field( + proto.STRING, + number=2, + ) + done: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class WorkflowGraph(proto.Message): + r"""The workflow graph. + + Attributes: + nodes (MutableSequence[google.cloud.dataproc_v1.types.WorkflowNode]): + Output only. The workflow nodes. + """ + + nodes: MutableSequence["WorkflowNode"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="WorkflowNode", + ) + + +class WorkflowNode(proto.Message): + r"""The workflow node. + + Attributes: + step_id (str): + Output only. The name of the node. + prerequisite_step_ids (MutableSequence[str]): + Output only. Node's prerequisite nodes. + job_id (str): + Output only. The job id; populated after the + node enters RUNNING state. + state (google.cloud.dataproc_v1.types.WorkflowNode.NodeState): + Output only. The node state. + error (str): + Output only. The error detail. + """ + + class NodeState(proto.Enum): + r"""The workflow node state. + + Values: + NODE_STATE_UNSPECIFIED (0): + State is unspecified. + BLOCKED (1): + The node is awaiting prerequisite node to + finish. + RUNNABLE (2): + The node is runnable but not running. + RUNNING (3): + The node is running. + COMPLETED (4): + The node completed successfully. + FAILED (5): + The node failed. A node can be marked FAILED + because its ancestor or peer failed. + """ + NODE_STATE_UNSPECIFIED = 0 + BLOCKED = 1 + RUNNABLE = 2 + RUNNING = 3 + COMPLETED = 4 + FAILED = 5 + + step_id: str = proto.Field( + proto.STRING, + number=1, + ) + prerequisite_step_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + job_id: str = proto.Field( + proto.STRING, + number=3, + ) + state: NodeState = proto.Field( + proto.ENUM, + number=5, + enum=NodeState, + ) + error: str = proto.Field( + proto.STRING, + number=6, + ) + + +class CreateWorkflowTemplateRequest(proto.Message): + r"""A request to create a workflow template. + + Attributes: + parent (str): + Required. The resource name of the region or location, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates.create``, the + resource name of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.create``, the + resource name of the location has the following format: + ``projects/{project_id}/locations/{location}`` + template (google.cloud.dataproc_v1.types.WorkflowTemplate): + Required. The Dataproc workflow template to + create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + template: "WorkflowTemplate" = proto.Field( + proto.MESSAGE, + number=2, + message="WorkflowTemplate", + ) + + +class GetWorkflowTemplateRequest(proto.Message): + r"""A request to fetch a workflow template. + + Attributes: + name (str): + Required. The resource name of the workflow template, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates.get``, the + resource name of the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.get``, the + resource name of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + version (int): + Optional. The version of workflow template to + retrieve. Only previously instantiated versions + can be retrieved. + + If unspecified, retrieves the current version. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: int = proto.Field( + proto.INT32, + number=2, + ) + + +class InstantiateWorkflowTemplateRequest(proto.Message): + r"""A request to instantiate a workflow template. + + Attributes: + name (str): + Required. The resource name of the workflow template, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates.instantiate``, + the resource name of the template has the following + format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.instantiate``, + the resource name of the template has the following + format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + version (int): + Optional. The version of workflow template to + instantiate. If specified, the workflow will be + instantiated only if the current version of the + workflow template has the supplied version. + + This option cannot be used to instantiate a + previous version of workflow template. + request_id (str): + Optional. A tag that prevents multiple concurrent workflow + instances with the same tag from running. This mitigates + risk of concurrent instances started due to retries. + + It is recommended to always set this value to a + `UUID `__. + + The tag must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + parameters (MutableMapping[str, str]): + Optional. Map from parameter names to values + that should be used for those parameters. Values + may not exceed 1000 characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: int = proto.Field( + proto.INT32, + number=2, + ) + request_id: str = proto.Field( + proto.STRING, + number=5, + ) + parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + + +class InstantiateInlineWorkflowTemplateRequest(proto.Message): + r"""A request to instantiate an inline workflow template. + + Attributes: + parent (str): + Required. The resource name of the region or location, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For + ``projects.regions.workflowTemplates,instantiateinline``, + the resource name of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For + ``projects.locations.workflowTemplates.instantiateinline``, + the resource name of the location has the following + format: ``projects/{project_id}/locations/{location}`` + template (google.cloud.dataproc_v1.types.WorkflowTemplate): + Required. The workflow template to + instantiate. + request_id (str): + Optional. A tag that prevents multiple concurrent workflow + instances with the same tag from running. This mitigates + risk of concurrent instances started due to retries. + + It is recommended to always set this value to a + `UUID `__. + + The tag must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + template: "WorkflowTemplate" = proto.Field( + proto.MESSAGE, + number=2, + message="WorkflowTemplate", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateWorkflowTemplateRequest(proto.Message): + r"""A request to update a workflow template. + + Attributes: + template (google.cloud.dataproc_v1.types.WorkflowTemplate): + Required. The updated workflow template. + + The ``template.version`` field must match the current + version. + """ + + template: "WorkflowTemplate" = proto.Field( + proto.MESSAGE, + number=1, + message="WorkflowTemplate", + ) + + +class ListWorkflowTemplatesRequest(proto.Message): + r"""A request to list workflow templates in a project. + + Attributes: + parent (str): + Required. The resource name of the region or location, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates,list``, the + resource name of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.list``, the + resource name of the location has the following format: + ``projects/{project_id}/locations/{location}`` + page_size (int): + Optional. The maximum number of results to + return in each response. + page_token (str): + Optional. The page token, returned by a + previous call, to request the next page of + results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListWorkflowTemplatesResponse(proto.Message): + r"""A response to a request to list workflow templates in a + project. + + Attributes: + templates (MutableSequence[google.cloud.dataproc_v1.types.WorkflowTemplate]): + Output only. WorkflowTemplates list. + next_page_token (str): + Output only. This token is included in the response if there + are more results to fetch. To fetch additional results, + provide this value as the page_token in a subsequent + ListWorkflowTemplatesRequest. + """ + + @property + def raw_page(self): + return self + + templates: MutableSequence["WorkflowTemplate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="WorkflowTemplate", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteWorkflowTemplateRequest(proto.Message): + r"""A request to delete a workflow template. + + Currently started workflows will remain running. + + Attributes: + name (str): + Required. The resource name of the workflow template, as + described in + https://cloud.google.com/apis/design/resource_names. + + - For ``projects.regions.workflowTemplates.delete``, the + resource name of the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.instantiate``, + the resource name of the template has the following + format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` + version (int): + Optional. The version of workflow template to + delete. If specified, will only delete the + template if the current server version matches + specified version. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: int = proto.Field( + proto.INT32, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataproc/mypy.ini b/packages/google-cloud-dataproc/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-dataproc/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-dataproc/noxfile.py b/packages/google-cloud-dataproc/noxfile.py new file mode 100644 index 000000000000..be54712bfa8f --- /dev/null +++ b/packages/google-cloud-dataproc/noxfile.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-dataproc/renovate.json b/packages/google-cloud-dataproc/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-dataproc/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-dataproc/samples/AUTHORING_GUIDE.md b/packages/google-cloud-dataproc/samples/AUTHORING_GUIDE.md new file mode 100644 index 000000000000..8249522ffc2d --- /dev/null +++ b/packages/google-cloud-dataproc/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/google-cloud-dataproc/samples/CONTRIBUTING.md b/packages/google-cloud-dataproc/samples/CONTRIBUTING.md new file mode 100644 index 000000000000..f5fe2e6baf13 --- /dev/null +++ b/packages/google-cloud-dataproc/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/CONTRIBUTING.md \ No newline at end of file diff --git a/packages/google-cloud-dataproc/samples/snippets/README.md b/packages/google-cloud-dataproc/samples/snippets/README.md new file mode 100644 index 000000000000..9b1171683c64 --- /dev/null +++ b/packages/google-cloud-dataproc/samples/snippets/README.md @@ -0,0 +1,4 @@ +Samples migrated +================ + +New location: https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/dataproc/snippets diff --git a/packages/google-cloud-dataproc/scripts/decrypt-secrets.sh b/packages/google-cloud-dataproc/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-dataproc/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py b/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py new file mode 100644 index 000000000000..7b4c85ab3ae2 --- /dev/null +++ b/packages/google-cloud-dataproc/scripts/fixup_dataproc_v1_keywords.py @@ -0,0 +1,209 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class dataprocCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'cancel_job': ('project_id', 'region', 'job_id', ), + 'create_autoscaling_policy': ('parent', 'policy', ), + 'create_batch': ('parent', 'batch', 'batch_id', 'request_id', ), + 'create_cluster': ('project_id', 'region', 'cluster', 'request_id', 'action_on_failed_primary_workers', ), + 'create_node_group': ('parent', 'node_group', 'node_group_id', 'request_id', ), + 'create_workflow_template': ('parent', 'template', ), + 'delete_autoscaling_policy': ('name', ), + 'delete_batch': ('name', ), + 'delete_cluster': ('project_id', 'region', 'cluster_name', 'cluster_uuid', 'request_id', ), + 'delete_job': ('project_id', 'region', 'job_id', ), + 'delete_workflow_template': ('name', 'version', ), + 'diagnose_cluster': ('project_id', 'region', 'cluster_name', 'tarball_gcs_dir', 'diagnosis_interval', 'jobs', 'yarn_application_ids', ), + 'get_autoscaling_policy': ('name', ), + 'get_batch': ('name', ), + 'get_cluster': ('project_id', 'region', 'cluster_name', ), + 'get_job': ('project_id', 'region', 'job_id', ), + 'get_node_group': ('name', ), + 'get_workflow_template': ('name', 'version', ), + 'instantiate_inline_workflow_template': ('parent', 'template', 'request_id', ), + 'instantiate_workflow_template': ('name', 'version', 'request_id', 'parameters', ), + 'list_autoscaling_policies': ('parent', 'page_size', 'page_token', ), + 'list_batches': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_clusters': ('project_id', 'region', 'filter', 'page_size', 'page_token', ), + 'list_jobs': ('project_id', 'region', 'page_size', 'page_token', 'cluster_name', 'job_state_matcher', 'filter', ), + 'list_workflow_templates': ('parent', 'page_size', 'page_token', ), + 'resize_node_group': ('name', 'size', 'request_id', 'graceful_decommission_timeout', ), + 'start_cluster': ('project_id', 'region', 'cluster_name', 'cluster_uuid', 'request_id', ), + 'stop_cluster': ('project_id', 'region', 'cluster_name', 'cluster_uuid', 'request_id', ), + 'submit_job': ('project_id', 'region', 'job', 'request_id', ), + 'submit_job_as_operation': ('project_id', 'region', 'job', 'request_id', ), + 'update_autoscaling_policy': ('policy', ), + 'update_cluster': ('project_id', 'region', 'cluster_name', 'cluster', 'update_mask', 'graceful_decommission_timeout', 'request_id', ), + 'update_job': ('project_id', 'region', 'job_id', 'job', 'update_mask', ), + 'update_workflow_template': ('template', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=dataprocCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the dataproc client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-dataproc/scripts/readme-gen/readme_gen.py b/packages/google-cloud-dataproc/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..1acc119835b5 --- /dev/null +++ b/packages/google-cloud-dataproc/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-dataproc/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-dataproc/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-dataproc/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-dataproc/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-dataproc/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-dataproc/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-dataproc/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-dataproc/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-dataproc/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-dataproc/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-dataproc/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-dataproc/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-dataproc/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-dataproc/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-dataproc/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-dataproc/setup.cfg b/packages/google-cloud-dataproc/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-dataproc/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-dataproc/setup.py b/packages/google-cloud-dataproc/setup.py new file mode 100644 index 000000000000..acc5a8c96f16 --- /dev/null +++ b/packages/google-cloud-dataproc/setup.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-dataproc" + + +description = "Google Cloud Dataproc API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/dataproc/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-dataproc/testing/.gitignore b/packages/google-cloud-dataproc/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-dataproc/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-dataproc/testing/constraints-3.10.txt b/packages/google-cloud-dataproc/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-dataproc/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-dataproc/testing/constraints-3.11.txt b/packages/google-cloud-dataproc/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-dataproc/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-dataproc/testing/constraints-3.12.txt b/packages/google-cloud-dataproc/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-dataproc/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-dataproc/testing/constraints-3.7.txt b/packages/google-cloud-dataproc/testing/constraints-3.7.txt new file mode 100644 index 000000000000..2beecf99e0be --- /dev/null +++ b/packages/google-cloud-dataproc/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/packages/google-cloud-dataproc/testing/constraints-3.8.txt b/packages/google-cloud-dataproc/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-dataproc/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-dataproc/testing/constraints-3.9.txt b/packages/google-cloud-dataproc/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-dataproc/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-dataproc/tests/__init__.py b/packages/google-cloud-dataproc/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-dataproc/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-dataproc/tests/system/__init__.py b/packages/google-cloud-dataproc/tests/system/__init__.py new file mode 100644 index 000000000000..6cfb48f4cdf8 --- /dev/null +++ b/packages/google-cloud-dataproc/tests/system/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-dataproc/tests/system/smoke_test.py b/packages/google-cloud-dataproc/tests/system/smoke_test.py new file mode 100644 index 000000000000..c388feed0ac1 --- /dev/null +++ b/packages/google-cloud-dataproc/tests/system/smoke_test.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + +from google.cloud import dataproc_v1 + + +@pytest.fixture(scope="session") +def project_id(): + return os.environ["PROJECT_ID"] + + +@pytest.mark.parametrize("transport", ["grpc", "rest"]) +def test_list_clusters(project_id: str, transport: str): + client = dataproc_v1.ClusterControllerClient(transport=transport) + + client.list_clusters(project_id=project_id, region="global") + + # The purpose of this smoke test is to test the communication with the API server, + # rather than API-specific functionality. + # If the smoke test fails, we won't reach this line. + assert True diff --git a/packages/google-cloud-dataproc/tests/unit/__init__.py b/packages/google-cloud-dataproc/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-dataproc/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/__init__.py b/packages/google-cloud-dataproc/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-dataproc/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/__init__.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py new file mode 100644 index 000000000000..91517312cf9f --- /dev/null +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py @@ -0,0 +1,6011 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.dataproc_v1.services.autoscaling_policy_service import ( + AutoscalingPolicyServiceAsyncClient, + AutoscalingPolicyServiceClient, + pagers, + transports, +) +from google.cloud.dataproc_v1.types import autoscaling_policies + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AutoscalingPolicyServiceClient._get_default_mtls_endpoint(None) is None + assert ( + AutoscalingPolicyServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + AutoscalingPolicyServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AutoscalingPolicyServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AutoscalingPolicyServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AutoscalingPolicyServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AutoscalingPolicyServiceClient, "grpc"), + (AutoscalingPolicyServiceAsyncClient, "grpc_asyncio"), + (AutoscalingPolicyServiceClient, "rest"), + ], +) +def test_autoscaling_policy_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AutoscalingPolicyServiceGrpcTransport, "grpc"), + (transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AutoscalingPolicyServiceRestTransport, "rest"), + ], +) +def test_autoscaling_policy_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AutoscalingPolicyServiceClient, "grpc"), + (AutoscalingPolicyServiceAsyncClient, "grpc_asyncio"), + (AutoscalingPolicyServiceClient, "rest"), + ], +) +def test_autoscaling_policy_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +def test_autoscaling_policy_service_client_get_transport_class(): + transport = AutoscalingPolicyServiceClient.get_transport_class() + available_transports = [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceRestTransport, + ] + assert transport in available_transports + + transport = AutoscalingPolicyServiceClient.get_transport_class("grpc") + assert transport == transports.AutoscalingPolicyServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AutoscalingPolicyServiceClient, + transports.AutoscalingPolicyServiceGrpcTransport, + "grpc", + ), + ( + AutoscalingPolicyServiceAsyncClient, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + AutoscalingPolicyServiceClient, + transports.AutoscalingPolicyServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + AutoscalingPolicyServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AutoscalingPolicyServiceClient), +) +@mock.patch.object( + AutoscalingPolicyServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AutoscalingPolicyServiceAsyncClient), +) +def test_autoscaling_policy_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + AutoscalingPolicyServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + AutoscalingPolicyServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + AutoscalingPolicyServiceClient, + transports.AutoscalingPolicyServiceGrpcTransport, + "grpc", + "true", + ), + ( + AutoscalingPolicyServiceAsyncClient, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + AutoscalingPolicyServiceClient, + transports.AutoscalingPolicyServiceGrpcTransport, + "grpc", + "false", + ), + ( + AutoscalingPolicyServiceAsyncClient, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + AutoscalingPolicyServiceClient, + transports.AutoscalingPolicyServiceRestTransport, + "rest", + "true", + ), + ( + AutoscalingPolicyServiceClient, + transports.AutoscalingPolicyServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + AutoscalingPolicyServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AutoscalingPolicyServiceClient), +) +@mock.patch.object( + AutoscalingPolicyServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AutoscalingPolicyServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_autoscaling_policy_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [AutoscalingPolicyServiceClient, AutoscalingPolicyServiceAsyncClient], +) +@mock.patch.object( + AutoscalingPolicyServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AutoscalingPolicyServiceClient), +) +@mock.patch.object( + AutoscalingPolicyServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AutoscalingPolicyServiceAsyncClient), +) +def test_autoscaling_policy_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + AutoscalingPolicyServiceClient, + transports.AutoscalingPolicyServiceGrpcTransport, + "grpc", + ), + ( + AutoscalingPolicyServiceAsyncClient, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + AutoscalingPolicyServiceClient, + transports.AutoscalingPolicyServiceRestTransport, + "rest", + ), + ], +) +def test_autoscaling_policy_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AutoscalingPolicyServiceClient, + transports.AutoscalingPolicyServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AutoscalingPolicyServiceAsyncClient, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + AutoscalingPolicyServiceClient, + transports.AutoscalingPolicyServiceRestTransport, + "rest", + None, + ), + ], +) +def test_autoscaling_policy_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_autoscaling_policy_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.dataproc_v1.services.autoscaling_policy_service.transports.AutoscalingPolicyServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AutoscalingPolicyServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AutoscalingPolicyServiceClient, + transports.AutoscalingPolicyServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AutoscalingPolicyServiceAsyncClient, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_autoscaling_policy_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autoscaling_policies.CreateAutoscalingPolicyRequest, + dict, + ], +) +def test_create_autoscaling_policy(request_type, transport: str = "grpc"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autoscaling_policies.AutoscalingPolicy( + id="id_value", + name="name_value", + ) + response = client.create_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.CreateAutoscalingPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, autoscaling_policies.AutoscalingPolicy) + assert response.id == "id_value" + assert response.name == "name_value" + + +def test_create_autoscaling_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_autoscaling_policy), "__call__" + ) as call: + client.create_autoscaling_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.CreateAutoscalingPolicyRequest() + + +@pytest.mark.asyncio +async def test_create_autoscaling_policy_async( + transport: str = "grpc_asyncio", + request_type=autoscaling_policies.CreateAutoscalingPolicyRequest, +): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autoscaling_policies.AutoscalingPolicy( + id="id_value", + name="name_value", + ) + ) + response = await client.create_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.CreateAutoscalingPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, autoscaling_policies.AutoscalingPolicy) + assert response.id == "id_value" + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_create_autoscaling_policy_async_from_dict(): + await test_create_autoscaling_policy_async(request_type=dict) + + +def test_create_autoscaling_policy_field_headers(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autoscaling_policies.CreateAutoscalingPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_autoscaling_policy), "__call__" + ) as call: + call.return_value = autoscaling_policies.AutoscalingPolicy() + client.create_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_autoscaling_policy_field_headers_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autoscaling_policies.CreateAutoscalingPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_autoscaling_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autoscaling_policies.AutoscalingPolicy() + ) + await client.create_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_autoscaling_policy_flattened(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autoscaling_policies.AutoscalingPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_autoscaling_policy( + parent="parent_value", + policy=autoscaling_policies.AutoscalingPolicy(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].policy + mock_val = autoscaling_policies.AutoscalingPolicy(id="id_value") + assert arg == mock_val + + +def test_create_autoscaling_policy_flattened_error(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_autoscaling_policy( + autoscaling_policies.CreateAutoscalingPolicyRequest(), + parent="parent_value", + policy=autoscaling_policies.AutoscalingPolicy(id="id_value"), + ) + + +@pytest.mark.asyncio +async def test_create_autoscaling_policy_flattened_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autoscaling_policies.AutoscalingPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autoscaling_policies.AutoscalingPolicy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_autoscaling_policy( + parent="parent_value", + policy=autoscaling_policies.AutoscalingPolicy(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].policy + mock_val = autoscaling_policies.AutoscalingPolicy(id="id_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_autoscaling_policy_flattened_error_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_autoscaling_policy( + autoscaling_policies.CreateAutoscalingPolicyRequest(), + parent="parent_value", + policy=autoscaling_policies.AutoscalingPolicy(id="id_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autoscaling_policies.UpdateAutoscalingPolicyRequest, + dict, + ], +) +def test_update_autoscaling_policy(request_type, transport: str = "grpc"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autoscaling_policies.AutoscalingPolicy( + id="id_value", + name="name_value", + ) + response = client.update_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.UpdateAutoscalingPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, autoscaling_policies.AutoscalingPolicy) + assert response.id == "id_value" + assert response.name == "name_value" + + +def test_update_autoscaling_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autoscaling_policy), "__call__" + ) as call: + client.update_autoscaling_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.UpdateAutoscalingPolicyRequest() + + +@pytest.mark.asyncio +async def test_update_autoscaling_policy_async( + transport: str = "grpc_asyncio", + request_type=autoscaling_policies.UpdateAutoscalingPolicyRequest, +): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autoscaling_policies.AutoscalingPolicy( + id="id_value", + name="name_value", + ) + ) + response = await client.update_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.UpdateAutoscalingPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, autoscaling_policies.AutoscalingPolicy) + assert response.id == "id_value" + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_update_autoscaling_policy_async_from_dict(): + await test_update_autoscaling_policy_async(request_type=dict) + + +def test_update_autoscaling_policy_field_headers(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autoscaling_policies.UpdateAutoscalingPolicyRequest() + + request.policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autoscaling_policy), "__call__" + ) as call: + call.return_value = autoscaling_policies.AutoscalingPolicy() + client.update_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "policy.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_autoscaling_policy_field_headers_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autoscaling_policies.UpdateAutoscalingPolicyRequest() + + request.policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autoscaling_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autoscaling_policies.AutoscalingPolicy() + ) + await client.update_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "policy.name=name_value", + ) in kw["metadata"] + + +def test_update_autoscaling_policy_flattened(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autoscaling_policies.AutoscalingPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_autoscaling_policy( + policy=autoscaling_policies.AutoscalingPolicy(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].policy + mock_val = autoscaling_policies.AutoscalingPolicy(id="id_value") + assert arg == mock_val + + +def test_update_autoscaling_policy_flattened_error(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_autoscaling_policy( + autoscaling_policies.UpdateAutoscalingPolicyRequest(), + policy=autoscaling_policies.AutoscalingPolicy(id="id_value"), + ) + + +@pytest.mark.asyncio +async def test_update_autoscaling_policy_flattened_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autoscaling_policies.AutoscalingPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autoscaling_policies.AutoscalingPolicy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_autoscaling_policy( + policy=autoscaling_policies.AutoscalingPolicy(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].policy + mock_val = autoscaling_policies.AutoscalingPolicy(id="id_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_autoscaling_policy_flattened_error_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_autoscaling_policy( + autoscaling_policies.UpdateAutoscalingPolicyRequest(), + policy=autoscaling_policies.AutoscalingPolicy(id="id_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autoscaling_policies.GetAutoscalingPolicyRequest, + dict, + ], +) +def test_get_autoscaling_policy(request_type, transport: str = "grpc"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autoscaling_policies.AutoscalingPolicy( + id="id_value", + name="name_value", + ) + response = client.get_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.GetAutoscalingPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, autoscaling_policies.AutoscalingPolicy) + assert response.id == "id_value" + assert response.name == "name_value" + + +def test_get_autoscaling_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autoscaling_policy), "__call__" + ) as call: + client.get_autoscaling_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.GetAutoscalingPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_autoscaling_policy_async( + transport: str = "grpc_asyncio", + request_type=autoscaling_policies.GetAutoscalingPolicyRequest, +): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autoscaling_policies.AutoscalingPolicy( + id="id_value", + name="name_value", + ) + ) + response = await client.get_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.GetAutoscalingPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, autoscaling_policies.AutoscalingPolicy) + assert response.id == "id_value" + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_autoscaling_policy_async_from_dict(): + await test_get_autoscaling_policy_async(request_type=dict) + + +def test_get_autoscaling_policy_field_headers(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autoscaling_policies.GetAutoscalingPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autoscaling_policy), "__call__" + ) as call: + call.return_value = autoscaling_policies.AutoscalingPolicy() + client.get_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_autoscaling_policy_field_headers_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autoscaling_policies.GetAutoscalingPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autoscaling_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autoscaling_policies.AutoscalingPolicy() + ) + await client.get_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_autoscaling_policy_flattened(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autoscaling_policies.AutoscalingPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_autoscaling_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_autoscaling_policy_flattened_error(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_autoscaling_policy( + autoscaling_policies.GetAutoscalingPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_autoscaling_policy_flattened_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autoscaling_policies.AutoscalingPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autoscaling_policies.AutoscalingPolicy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_autoscaling_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_autoscaling_policy_flattened_error_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_autoscaling_policy( + autoscaling_policies.GetAutoscalingPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autoscaling_policies.ListAutoscalingPoliciesRequest, + dict, + ], +) +def test_list_autoscaling_policies(request_type, transport: str = "grpc"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_autoscaling_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_autoscaling_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.ListAutoscalingPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutoscalingPoliciesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_autoscaling_policies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_autoscaling_policies), "__call__" + ) as call: + client.list_autoscaling_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.ListAutoscalingPoliciesRequest() + + +@pytest.mark.asyncio +async def test_list_autoscaling_policies_async( + transport: str = "grpc_asyncio", + request_type=autoscaling_policies.ListAutoscalingPoliciesRequest, +): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_autoscaling_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autoscaling_policies.ListAutoscalingPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_autoscaling_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.ListAutoscalingPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutoscalingPoliciesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_autoscaling_policies_async_from_dict(): + await test_list_autoscaling_policies_async(request_type=dict) + + +def test_list_autoscaling_policies_field_headers(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autoscaling_policies.ListAutoscalingPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_autoscaling_policies), "__call__" + ) as call: + call.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() + client.list_autoscaling_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_autoscaling_policies_field_headers_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autoscaling_policies.ListAutoscalingPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_autoscaling_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autoscaling_policies.ListAutoscalingPoliciesResponse() + ) + await client.list_autoscaling_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_autoscaling_policies_flattened(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_autoscaling_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_autoscaling_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_autoscaling_policies_flattened_error(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_autoscaling_policies( + autoscaling_policies.ListAutoscalingPoliciesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_autoscaling_policies_flattened_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_autoscaling_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + autoscaling_policies.ListAutoscalingPoliciesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_autoscaling_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_autoscaling_policies_flattened_error_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_autoscaling_policies( + autoscaling_policies.ListAutoscalingPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_autoscaling_policies_pager(transport_name: str = "grpc"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_autoscaling_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + ], + next_page_token="abc", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[], + next_page_token="def", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + ], + next_page_token="ghi", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_autoscaling_policies(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, autoscaling_policies.AutoscalingPolicy) for i in results + ) + + +def test_list_autoscaling_policies_pages(transport_name: str = "grpc"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_autoscaling_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + ], + next_page_token="abc", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[], + next_page_token="def", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + ], + next_page_token="ghi", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_autoscaling_policies(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_autoscaling_policies_async_pager(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_autoscaling_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + ], + next_page_token="abc", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[], + next_page_token="def", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + ], + next_page_token="ghi", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_autoscaling_policies( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, autoscaling_policies.AutoscalingPolicy) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_autoscaling_policies_async_pages(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_autoscaling_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + ], + next_page_token="abc", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[], + next_page_token="def", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + ], + next_page_token="ghi", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_autoscaling_policies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + autoscaling_policies.DeleteAutoscalingPolicyRequest, + dict, + ], +) +def test_delete_autoscaling_policy(request_type, transport: str = "grpc"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.DeleteAutoscalingPolicyRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_autoscaling_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_autoscaling_policy), "__call__" + ) as call: + client.delete_autoscaling_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.DeleteAutoscalingPolicyRequest() + + +@pytest.mark.asyncio +async def test_delete_autoscaling_policy_async( + transport: str = "grpc_asyncio", + request_type=autoscaling_policies.DeleteAutoscalingPolicyRequest, +): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == autoscaling_policies.DeleteAutoscalingPolicyRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_autoscaling_policy_async_from_dict(): + await test_delete_autoscaling_policy_async(request_type=dict) + + +def test_delete_autoscaling_policy_field_headers(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autoscaling_policies.DeleteAutoscalingPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_autoscaling_policy), "__call__" + ) as call: + call.return_value = None + client.delete_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_autoscaling_policy_field_headers_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = autoscaling_policies.DeleteAutoscalingPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_autoscaling_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_autoscaling_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_autoscaling_policy_flattened(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_autoscaling_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_autoscaling_policy_flattened_error(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_autoscaling_policy( + autoscaling_policies.DeleteAutoscalingPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_autoscaling_policy_flattened_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_autoscaling_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_autoscaling_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_autoscaling_policy_flattened_error_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_autoscaling_policy( + autoscaling_policies.DeleteAutoscalingPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autoscaling_policies.CreateAutoscalingPolicyRequest, + dict, + ], +) +def test_create_autoscaling_policy_rest(request_type): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["policy"] = { + "id": "id_value", + "name": "name_value", + "basic_algorithm": { + "yarn_config": { + "graceful_decommission_timeout": {"seconds": 751, "nanos": 543}, + "scale_up_factor": 0.1578, + "scale_down_factor": 0.1789, + "scale_up_min_worker_fraction": 0.2973, + "scale_down_min_worker_fraction": 0.3184, + }, + "cooldown_period": {}, + }, + "worker_config": {"min_instances": 1387, "max_instances": 1389, "weight": 648}, + "secondary_worker_config": {}, + "labels": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autoscaling_policies.AutoscalingPolicy( + id="id_value", + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_autoscaling_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, autoscaling_policies.AutoscalingPolicy) + assert response.id == "id_value" + assert response.name == "name_value" + + +def test_create_autoscaling_policy_rest_required_fields( + request_type=autoscaling_policies.CreateAutoscalingPolicyRequest, +): + transport_class = transports.AutoscalingPolicyServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_autoscaling_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_autoscaling_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = autoscaling_policies.AutoscalingPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_autoscaling_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_autoscaling_policy_rest_unset_required_fields(): + transport = transports.AutoscalingPolicyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_autoscaling_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "policy", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_autoscaling_policy_rest_interceptors(null_interceptor): + transport = transports.AutoscalingPolicyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AutoscalingPolicyServiceRestInterceptor(), + ) + client = AutoscalingPolicyServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AutoscalingPolicyServiceRestInterceptor, + "post_create_autoscaling_policy", + ) as post, mock.patch.object( + transports.AutoscalingPolicyServiceRestInterceptor, + "pre_create_autoscaling_policy", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = autoscaling_policies.CreateAutoscalingPolicyRequest.pb( + autoscaling_policies.CreateAutoscalingPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = autoscaling_policies.AutoscalingPolicy.to_json( + autoscaling_policies.AutoscalingPolicy() + ) + + request = autoscaling_policies.CreateAutoscalingPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = autoscaling_policies.AutoscalingPolicy() + + client.create_autoscaling_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_autoscaling_policy_rest_bad_request( + transport: str = "rest", + request_type=autoscaling_policies.CreateAutoscalingPolicyRequest, +): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["policy"] = { + "id": "id_value", + "name": "name_value", + "basic_algorithm": { + "yarn_config": { + "graceful_decommission_timeout": {"seconds": 751, "nanos": 543}, + "scale_up_factor": 0.1578, + "scale_down_factor": 0.1789, + "scale_up_min_worker_fraction": 0.2973, + "scale_down_min_worker_fraction": 0.3184, + }, + "cooldown_period": {}, + }, + "worker_config": {"min_instances": 1387, "max_instances": 1389, "weight": 648}, + "secondary_worker_config": {}, + "labels": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_autoscaling_policy(request) + + +def test_create_autoscaling_policy_rest_flattened(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autoscaling_policies.AutoscalingPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + policy=autoscaling_policies.AutoscalingPolicy(id="id_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_autoscaling_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autoscalingPolicies" + % client.transport._host, + args[1], + ) + + +def test_create_autoscaling_policy_rest_flattened_error(transport: str = "rest"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_autoscaling_policy( + autoscaling_policies.CreateAutoscalingPolicyRequest(), + parent="parent_value", + policy=autoscaling_policies.AutoscalingPolicy(id="id_value"), + ) + + +def test_create_autoscaling_policy_rest_error(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autoscaling_policies.UpdateAutoscalingPolicyRequest, + dict, + ], +) +def test_update_autoscaling_policy_rest(request_type): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "policy": { + "name": "projects/sample1/locations/sample2/autoscalingPolicies/sample3" + } + } + request_init["policy"] = { + "id": "id_value", + "name": "projects/sample1/locations/sample2/autoscalingPolicies/sample3", + "basic_algorithm": { + "yarn_config": { + "graceful_decommission_timeout": {"seconds": 751, "nanos": 543}, + "scale_up_factor": 0.1578, + "scale_down_factor": 0.1789, + "scale_up_min_worker_fraction": 0.2973, + "scale_down_min_worker_fraction": 0.3184, + }, + "cooldown_period": {}, + }, + "worker_config": {"min_instances": 1387, "max_instances": 1389, "weight": 648}, + "secondary_worker_config": {}, + "labels": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autoscaling_policies.AutoscalingPolicy( + id="id_value", + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_autoscaling_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, autoscaling_policies.AutoscalingPolicy) + assert response.id == "id_value" + assert response.name == "name_value" + + +def test_update_autoscaling_policy_rest_required_fields( + request_type=autoscaling_policies.UpdateAutoscalingPolicyRequest, +): + transport_class = transports.AutoscalingPolicyServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_autoscaling_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_autoscaling_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = autoscaling_policies.AutoscalingPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "put", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_autoscaling_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_autoscaling_policy_rest_unset_required_fields(): + transport = transports.AutoscalingPolicyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_autoscaling_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("policy",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_autoscaling_policy_rest_interceptors(null_interceptor): + transport = transports.AutoscalingPolicyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AutoscalingPolicyServiceRestInterceptor(), + ) + client = AutoscalingPolicyServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AutoscalingPolicyServiceRestInterceptor, + "post_update_autoscaling_policy", + ) as post, mock.patch.object( + transports.AutoscalingPolicyServiceRestInterceptor, + "pre_update_autoscaling_policy", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = autoscaling_policies.UpdateAutoscalingPolicyRequest.pb( + autoscaling_policies.UpdateAutoscalingPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = autoscaling_policies.AutoscalingPolicy.to_json( + autoscaling_policies.AutoscalingPolicy() + ) + + request = autoscaling_policies.UpdateAutoscalingPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = autoscaling_policies.AutoscalingPolicy() + + client.update_autoscaling_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_autoscaling_policy_rest_bad_request( + transport: str = "rest", + request_type=autoscaling_policies.UpdateAutoscalingPolicyRequest, +): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "policy": { + "name": "projects/sample1/locations/sample2/autoscalingPolicies/sample3" + } + } + request_init["policy"] = { + "id": "id_value", + "name": "projects/sample1/locations/sample2/autoscalingPolicies/sample3", + "basic_algorithm": { + "yarn_config": { + "graceful_decommission_timeout": {"seconds": 751, "nanos": 543}, + "scale_up_factor": 0.1578, + "scale_down_factor": 0.1789, + "scale_up_min_worker_fraction": 0.2973, + "scale_down_min_worker_fraction": 0.3184, + }, + "cooldown_period": {}, + }, + "worker_config": {"min_instances": 1387, "max_instances": 1389, "weight": 648}, + "secondary_worker_config": {}, + "labels": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_autoscaling_policy(request) + + +def test_update_autoscaling_policy_rest_flattened(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autoscaling_policies.AutoscalingPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "policy": { + "name": "projects/sample1/locations/sample2/autoscalingPolicies/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + policy=autoscaling_policies.AutoscalingPolicy(id="id_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_autoscaling_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_update_autoscaling_policy_rest_flattened_error(transport: str = "rest"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_autoscaling_policy( + autoscaling_policies.UpdateAutoscalingPolicyRequest(), + policy=autoscaling_policies.AutoscalingPolicy(id="id_value"), + ) + + +def test_update_autoscaling_policy_rest_error(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autoscaling_policies.GetAutoscalingPolicyRequest, + dict, + ], +) +def test_get_autoscaling_policy_rest(request_type): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autoscalingPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autoscaling_policies.AutoscalingPolicy( + id="id_value", + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_autoscaling_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, autoscaling_policies.AutoscalingPolicy) + assert response.id == "id_value" + assert response.name == "name_value" + + +def test_get_autoscaling_policy_rest_required_fields( + request_type=autoscaling_policies.GetAutoscalingPolicyRequest, +): + transport_class = transports.AutoscalingPolicyServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_autoscaling_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_autoscaling_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = autoscaling_policies.AutoscalingPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_autoscaling_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_autoscaling_policy_rest_unset_required_fields(): + transport = transports.AutoscalingPolicyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_autoscaling_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_autoscaling_policy_rest_interceptors(null_interceptor): + transport = transports.AutoscalingPolicyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AutoscalingPolicyServiceRestInterceptor(), + ) + client = AutoscalingPolicyServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AutoscalingPolicyServiceRestInterceptor, + "post_get_autoscaling_policy", + ) as post, mock.patch.object( + transports.AutoscalingPolicyServiceRestInterceptor, "pre_get_autoscaling_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = autoscaling_policies.GetAutoscalingPolicyRequest.pb( + autoscaling_policies.GetAutoscalingPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = autoscaling_policies.AutoscalingPolicy.to_json( + autoscaling_policies.AutoscalingPolicy() + ) + + request = autoscaling_policies.GetAutoscalingPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = autoscaling_policies.AutoscalingPolicy() + + client.get_autoscaling_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_autoscaling_policy_rest_bad_request( + transport: str = "rest", + request_type=autoscaling_policies.GetAutoscalingPolicyRequest, +): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autoscalingPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_autoscaling_policy(request) + + +def test_get_autoscaling_policy_rest_flattened(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autoscaling_policies.AutoscalingPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/autoscalingPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_autoscaling_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autoscalingPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_get_autoscaling_policy_rest_flattened_error(transport: str = "rest"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_autoscaling_policy( + autoscaling_policies.GetAutoscalingPolicyRequest(), + name="name_value", + ) + + +def test_get_autoscaling_policy_rest_error(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + autoscaling_policies.ListAutoscalingPoliciesRequest, + dict, + ], +) +def test_list_autoscaling_policies_rest(request_type): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autoscaling_policies.ListAutoscalingPoliciesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = autoscaling_policies.ListAutoscalingPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_autoscaling_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAutoscalingPoliciesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_autoscaling_policies_rest_required_fields( + request_type=autoscaling_policies.ListAutoscalingPoliciesRequest, +): + transport_class = transports.AutoscalingPolicyServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autoscaling_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_autoscaling_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = autoscaling_policies.ListAutoscalingPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_autoscaling_policies(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_autoscaling_policies_rest_unset_required_fields(): + transport = transports.AutoscalingPolicyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_autoscaling_policies._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_autoscaling_policies_rest_interceptors(null_interceptor): + transport = transports.AutoscalingPolicyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AutoscalingPolicyServiceRestInterceptor(), + ) + client = AutoscalingPolicyServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AutoscalingPolicyServiceRestInterceptor, + "post_list_autoscaling_policies", + ) as post, mock.patch.object( + transports.AutoscalingPolicyServiceRestInterceptor, + "pre_list_autoscaling_policies", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = autoscaling_policies.ListAutoscalingPoliciesRequest.pb( + autoscaling_policies.ListAutoscalingPoliciesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + autoscaling_policies.ListAutoscalingPoliciesResponse.to_json( + autoscaling_policies.ListAutoscalingPoliciesResponse() + ) + ) + + request = autoscaling_policies.ListAutoscalingPoliciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() + + client.list_autoscaling_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_autoscaling_policies_rest_bad_request( + transport: str = "rest", + request_type=autoscaling_policies.ListAutoscalingPoliciesRequest, +): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_autoscaling_policies(request) + + +def test_list_autoscaling_policies_rest_flattened(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = autoscaling_policies.ListAutoscalingPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_autoscaling_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/autoscalingPolicies" + % client.transport._host, + args[1], + ) + + +def test_list_autoscaling_policies_rest_flattened_error(transport: str = "rest"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_autoscaling_policies( + autoscaling_policies.ListAutoscalingPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_autoscaling_policies_rest_pager(transport: str = "rest"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + ], + next_page_token="abc", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[], + next_page_token="def", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + ], + next_page_token="ghi", + ), + autoscaling_policies.ListAutoscalingPoliciesResponse( + policies=[ + autoscaling_policies.AutoscalingPolicy(), + autoscaling_policies.AutoscalingPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + autoscaling_policies.ListAutoscalingPoliciesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_autoscaling_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, autoscaling_policies.AutoscalingPolicy) for i in results + ) + + pages = list(client.list_autoscaling_policies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + autoscaling_policies.DeleteAutoscalingPolicyRequest, + dict, + ], +) +def test_delete_autoscaling_policy_rest(request_type): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autoscalingPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_autoscaling_policy(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_autoscaling_policy_rest_required_fields( + request_type=autoscaling_policies.DeleteAutoscalingPolicyRequest, +): + transport_class = transports.AutoscalingPolicyServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_autoscaling_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_autoscaling_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_autoscaling_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_autoscaling_policy_rest_unset_required_fields(): + transport = transports.AutoscalingPolicyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_autoscaling_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_autoscaling_policy_rest_interceptors(null_interceptor): + transport = transports.AutoscalingPolicyServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AutoscalingPolicyServiceRestInterceptor(), + ) + client = AutoscalingPolicyServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AutoscalingPolicyServiceRestInterceptor, + "pre_delete_autoscaling_policy", + ) as pre: + pre.assert_not_called() + pb_message = autoscaling_policies.DeleteAutoscalingPolicyRequest.pb( + autoscaling_policies.DeleteAutoscalingPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = autoscaling_policies.DeleteAutoscalingPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_autoscaling_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_autoscaling_policy_rest_bad_request( + transport: str = "rest", + request_type=autoscaling_policies.DeleteAutoscalingPolicyRequest, +): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/autoscalingPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_autoscaling_policy(request) + + +def test_delete_autoscaling_policy_rest_flattened(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/autoscalingPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_autoscaling_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autoscalingPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_autoscaling_policy_rest_flattened_error(transport: str = "rest"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_autoscaling_policy( + autoscaling_policies.DeleteAutoscalingPolicyRequest(), + name="name_value", + ) + + +def test_delete_autoscaling_policy_rest_error(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AutoscalingPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AutoscalingPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AutoscalingPolicyServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AutoscalingPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AutoscalingPolicyServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AutoscalingPolicyServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AutoscalingPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AutoscalingPolicyServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AutoscalingPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AutoscalingPolicyServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AutoscalingPolicyServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AutoscalingPolicyServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + transports.AutoscalingPolicyServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = AutoscalingPolicyServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AutoscalingPolicyServiceGrpcTransport, + ) + + +def test_autoscaling_policy_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AutoscalingPolicyServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_autoscaling_policy_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.dataproc_v1.services.autoscaling_policy_service.transports.AutoscalingPolicyServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AutoscalingPolicyServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_autoscaling_policy", + "update_autoscaling_policy", + "get_autoscaling_policy", + "list_autoscaling_policies", + "delete_autoscaling_policy", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_autoscaling_policy_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataproc_v1.services.autoscaling_policy_service.transports.AutoscalingPolicyServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AutoscalingPolicyServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_autoscaling_policy_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.dataproc_v1.services.autoscaling_policy_service.transports.AutoscalingPolicyServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AutoscalingPolicyServiceTransport() + adc.assert_called_once() + + +def test_autoscaling_policy_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AutoscalingPolicyServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + ], +) +def test_autoscaling_policy_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + transports.AutoscalingPolicyServiceRestTransport, + ], +) +def test_autoscaling_policy_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AutoscalingPolicyServiceGrpcTransport, grpc_helpers), + (transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_autoscaling_policy_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + ], +) +def test_autoscaling_policy_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_autoscaling_policy_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AutoscalingPolicyServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_autoscaling_policy_service_host_no_port(transport_name): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataproc.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_autoscaling_policy_service_host_with_port(transport_name): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataproc.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dataproc.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_autoscaling_policy_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AutoscalingPolicyServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AutoscalingPolicyServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_autoscaling_policy._session + session2 = client2.transport.create_autoscaling_policy._session + assert session1 != session2 + session1 = client1.transport.update_autoscaling_policy._session + session2 = client2.transport.update_autoscaling_policy._session + assert session1 != session2 + session1 = client1.transport.get_autoscaling_policy._session + session2 = client2.transport.get_autoscaling_policy._session + assert session1 != session2 + session1 = client1.transport.list_autoscaling_policies._session + session2 = client2.transport.list_autoscaling_policies._session + assert session1 != session2 + session1 = client1.transport.delete_autoscaling_policy._session + session2 = client2.transport.delete_autoscaling_policy._session + assert session1 != session2 + + +def test_autoscaling_policy_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AutoscalingPolicyServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_autoscaling_policy_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AutoscalingPolicyServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + ], +) +def test_autoscaling_policy_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + ], +) +def test_autoscaling_policy_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_autoscaling_policy_path(): + project = "squid" + location = "clam" + autoscaling_policy = "whelk" + expected = "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}".format( + project=project, + location=location, + autoscaling_policy=autoscaling_policy, + ) + actual = AutoscalingPolicyServiceClient.autoscaling_policy_path( + project, location, autoscaling_policy + ) + assert expected == actual + + +def test_parse_autoscaling_policy_path(): + expected = { + "project": "octopus", + "location": "oyster", + "autoscaling_policy": "nudibranch", + } + path = AutoscalingPolicyServiceClient.autoscaling_policy_path(**expected) + + # Check that the path construction is reversible. + actual = AutoscalingPolicyServiceClient.parse_autoscaling_policy_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AutoscalingPolicyServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = AutoscalingPolicyServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AutoscalingPolicyServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AutoscalingPolicyServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = AutoscalingPolicyServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AutoscalingPolicyServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AutoscalingPolicyServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = AutoscalingPolicyServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AutoscalingPolicyServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = AutoscalingPolicyServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = AutoscalingPolicyServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AutoscalingPolicyServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AutoscalingPolicyServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = AutoscalingPolicyServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AutoscalingPolicyServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AutoscalingPolicyServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AutoscalingPolicyServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AutoscalingPolicyServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = AutoscalingPolicyServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AutoscalingPolicyServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + AutoscalingPolicyServiceClient, + transports.AutoscalingPolicyServiceGrpcTransport, + ), + ( + AutoscalingPolicyServiceAsyncClient, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py new file mode 100644 index 000000000000..3cbcbf5cd01f --- /dev/null +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py @@ -0,0 +1,5514 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.dataproc_v1.services.batch_controller import ( + BatchControllerAsyncClient, + BatchControllerClient, + pagers, + transports, +) +from google.cloud.dataproc_v1.types import batches, operations, shared + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert BatchControllerClient._get_default_mtls_endpoint(None) is None + assert ( + BatchControllerClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + BatchControllerClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + BatchControllerClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BatchControllerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BatchControllerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BatchControllerClient, "grpc"), + (BatchControllerAsyncClient, "grpc_asyncio"), + (BatchControllerClient, "rest"), + ], +) +def test_batch_controller_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.BatchControllerGrpcTransport, "grpc"), + (transports.BatchControllerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.BatchControllerRestTransport, "rest"), + ], +) +def test_batch_controller_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BatchControllerClient, "grpc"), + (BatchControllerAsyncClient, "grpc_asyncio"), + (BatchControllerClient, "rest"), + ], +) +def test_batch_controller_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +def test_batch_controller_client_get_transport_class(): + transport = BatchControllerClient.get_transport_class() + available_transports = [ + transports.BatchControllerGrpcTransport, + transports.BatchControllerRestTransport, + ] + assert transport in available_transports + + transport = BatchControllerClient.get_transport_class("grpc") + assert transport == transports.BatchControllerGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BatchControllerClient, transports.BatchControllerGrpcTransport, "grpc"), + ( + BatchControllerAsyncClient, + transports.BatchControllerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (BatchControllerClient, transports.BatchControllerRestTransport, "rest"), + ], +) +@mock.patch.object( + BatchControllerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BatchControllerClient), +) +@mock.patch.object( + BatchControllerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BatchControllerAsyncClient), +) +def test_batch_controller_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(BatchControllerClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(BatchControllerClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + BatchControllerClient, + transports.BatchControllerGrpcTransport, + "grpc", + "true", + ), + ( + BatchControllerAsyncClient, + transports.BatchControllerGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + BatchControllerClient, + transports.BatchControllerGrpcTransport, + "grpc", + "false", + ), + ( + BatchControllerAsyncClient, + transports.BatchControllerGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + BatchControllerClient, + transports.BatchControllerRestTransport, + "rest", + "true", + ), + ( + BatchControllerClient, + transports.BatchControllerRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + BatchControllerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BatchControllerClient), +) +@mock.patch.object( + BatchControllerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BatchControllerAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_batch_controller_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [BatchControllerClient, BatchControllerAsyncClient] +) +@mock.patch.object( + BatchControllerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BatchControllerClient), +) +@mock.patch.object( + BatchControllerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BatchControllerAsyncClient), +) +def test_batch_controller_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BatchControllerClient, transports.BatchControllerGrpcTransport, "grpc"), + ( + BatchControllerAsyncClient, + transports.BatchControllerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (BatchControllerClient, transports.BatchControllerRestTransport, "rest"), + ], +) +def test_batch_controller_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BatchControllerClient, + transports.BatchControllerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BatchControllerAsyncClient, + transports.BatchControllerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (BatchControllerClient, transports.BatchControllerRestTransport, "rest", None), + ], +) +def test_batch_controller_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_batch_controller_client_client_options_from_dict(): + with mock.patch( + "google.cloud.dataproc_v1.services.batch_controller.transports.BatchControllerGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = BatchControllerClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BatchControllerClient, + transports.BatchControllerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BatchControllerAsyncClient, + transports.BatchControllerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_batch_controller_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + batches.CreateBatchRequest, + dict, + ], +) +def test_create_batch(request_type, transport: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_batch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_batch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == batches.CreateBatchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_batch_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_batch), "__call__") as call: + client.create_batch() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batches.CreateBatchRequest() + + +@pytest.mark.asyncio +async def test_create_batch_async( + transport: str = "grpc_asyncio", request_type=batches.CreateBatchRequest +): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_batch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_batch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == batches.CreateBatchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_batch_async_from_dict(): + await test_create_batch_async(request_type=dict) + + +def test_create_batch_field_headers(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = batches.CreateBatchRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_batch), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_batch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_batch_field_headers_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = batches.CreateBatchRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_batch), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_batch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_batch_flattened(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_batch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_batch( + parent="parent_value", + batch=batches.Batch(name="name_value"), + batch_id="batch_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].batch + mock_val = batches.Batch(name="name_value") + assert arg == mock_val + arg = args[0].batch_id + mock_val = "batch_id_value" + assert arg == mock_val + + +def test_create_batch_flattened_error(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_batch( + batches.CreateBatchRequest(), + parent="parent_value", + batch=batches.Batch(name="name_value"), + batch_id="batch_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_batch_flattened_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_batch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_batch( + parent="parent_value", + batch=batches.Batch(name="name_value"), + batch_id="batch_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].batch + mock_val = batches.Batch(name="name_value") + assert arg == mock_val + arg = args[0].batch_id + mock_val = "batch_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_batch_flattened_error_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_batch( + batches.CreateBatchRequest(), + parent="parent_value", + batch=batches.Batch(name="name_value"), + batch_id="batch_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + batches.GetBatchRequest, + dict, + ], +) +def test_get_batch(request_type, transport: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_batch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = batches.Batch( + name="name_value", + uuid="uuid_value", + state=batches.Batch.State.PENDING, + state_message="state_message_value", + creator="creator_value", + operation="operation_value", + ) + response = client.get_batch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == batches.GetBatchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, batches.Batch) + assert response.name == "name_value" + assert response.uuid == "uuid_value" + assert response.state == batches.Batch.State.PENDING + assert response.state_message == "state_message_value" + assert response.creator == "creator_value" + assert response.operation == "operation_value" + + +def test_get_batch_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_batch), "__call__") as call: + client.get_batch() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batches.GetBatchRequest() + + +@pytest.mark.asyncio +async def test_get_batch_async( + transport: str = "grpc_asyncio", request_type=batches.GetBatchRequest +): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_batch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + batches.Batch( + name="name_value", + uuid="uuid_value", + state=batches.Batch.State.PENDING, + state_message="state_message_value", + creator="creator_value", + operation="operation_value", + ) + ) + response = await client.get_batch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == batches.GetBatchRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, batches.Batch) + assert response.name == "name_value" + assert response.uuid == "uuid_value" + assert response.state == batches.Batch.State.PENDING + assert response.state_message == "state_message_value" + assert response.creator == "creator_value" + assert response.operation == "operation_value" + + +@pytest.mark.asyncio +async def test_get_batch_async_from_dict(): + await test_get_batch_async(request_type=dict) + + +def test_get_batch_field_headers(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = batches.GetBatchRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_batch), "__call__") as call: + call.return_value = batches.Batch() + client.get_batch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_batch_field_headers_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = batches.GetBatchRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_batch), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batches.Batch()) + await client.get_batch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_batch_flattened(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_batch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = batches.Batch() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_batch( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_batch_flattened_error(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_batch( + batches.GetBatchRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_batch_flattened_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_batch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = batches.Batch() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batches.Batch()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_batch( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_batch_flattened_error_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_batch( + batches.GetBatchRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + batches.ListBatchesRequest, + dict, + ], +) +def test_list_batches(request_type, transport: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_batches), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = batches.ListBatchesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_batches(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == batches.ListBatchesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBatchesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_batches_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_batches), "__call__") as call: + client.list_batches() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batches.ListBatchesRequest() + + +@pytest.mark.asyncio +async def test_list_batches_async( + transport: str = "grpc_asyncio", request_type=batches.ListBatchesRequest +): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_batches), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + batches.ListBatchesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_batches(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == batches.ListBatchesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBatchesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_batches_async_from_dict(): + await test_list_batches_async(request_type=dict) + + +def test_list_batches_field_headers(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = batches.ListBatchesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_batches), "__call__") as call: + call.return_value = batches.ListBatchesResponse() + client.list_batches(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_batches_field_headers_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = batches.ListBatchesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_batches), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + batches.ListBatchesResponse() + ) + await client.list_batches(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_batches_flattened(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_batches), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = batches.ListBatchesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_batches( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_batches_flattened_error(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_batches( + batches.ListBatchesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_batches_flattened_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_batches), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = batches.ListBatchesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + batches.ListBatchesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_batches( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_batches_flattened_error_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_batches( + batches.ListBatchesRequest(), + parent="parent_value", + ) + + +def test_list_batches_pager(transport_name: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_batches), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + batches.Batch(), + batches.Batch(), + ], + next_page_token="abc", + ), + batches.ListBatchesResponse( + batches=[], + next_page_token="def", + ), + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + ], + next_page_token="ghi", + ), + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + batches.Batch(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_batches(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, batches.Batch) for i in results) + + +def test_list_batches_pages(transport_name: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_batches), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + batches.Batch(), + batches.Batch(), + ], + next_page_token="abc", + ), + batches.ListBatchesResponse( + batches=[], + next_page_token="def", + ), + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + ], + next_page_token="ghi", + ), + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + batches.Batch(), + ], + ), + RuntimeError, + ) + pages = list(client.list_batches(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_batches_async_pager(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_batches), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + batches.Batch(), + batches.Batch(), + ], + next_page_token="abc", + ), + batches.ListBatchesResponse( + batches=[], + next_page_token="def", + ), + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + ], + next_page_token="ghi", + ), + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + batches.Batch(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_batches( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, batches.Batch) for i in responses) + + +@pytest.mark.asyncio +async def test_list_batches_async_pages(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_batches), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + batches.Batch(), + batches.Batch(), + ], + next_page_token="abc", + ), + batches.ListBatchesResponse( + batches=[], + next_page_token="def", + ), + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + ], + next_page_token="ghi", + ), + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + batches.Batch(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_batches(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + batches.DeleteBatchRequest, + dict, + ], +) +def test_delete_batch(request_type, transport: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_batch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_batch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == batches.DeleteBatchRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_batch_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_batch), "__call__") as call: + client.delete_batch() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batches.DeleteBatchRequest() + + +@pytest.mark.asyncio +async def test_delete_batch_async( + transport: str = "grpc_asyncio", request_type=batches.DeleteBatchRequest +): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_batch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_batch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == batches.DeleteBatchRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_batch_async_from_dict(): + await test_delete_batch_async(request_type=dict) + + +def test_delete_batch_field_headers(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = batches.DeleteBatchRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_batch), "__call__") as call: + call.return_value = None + client.delete_batch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_batch_field_headers_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = batches.DeleteBatchRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_batch), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_batch(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_batch_flattened(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_batch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_batch( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_batch_flattened_error(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_batch( + batches.DeleteBatchRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_batch_flattened_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_batch), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_batch( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_batch_flattened_error_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_batch( + batches.DeleteBatchRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + batches.CreateBatchRequest, + dict, + ], +) +def test_create_batch_rest(request_type): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["batch"] = { + "name": "name_value", + "uuid": "uuid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "pyspark_batch": { + "main_python_file_uri": "main_python_file_uri_value", + "args": ["args_value1", "args_value2"], + "python_file_uris": ["python_file_uris_value1", "python_file_uris_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + }, + "spark_batch": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + }, + "spark_r_batch": { + "main_r_file_uri": "main_r_file_uri_value", + "args": ["args_value1", "args_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + }, + "spark_sql_batch": { + "query_file_uri": "query_file_uri_value", + "query_variables": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + }, + "runtime_info": { + "endpoints": {}, + "output_uri": "output_uri_value", + "diagnostic_output_uri": "diagnostic_output_uri_value", + "approximate_usage": { + "milli_dcu_seconds": 1792, + "shuffle_storage_gb_seconds": 2743, + }, + "current_usage": { + "milli_dcu": 946, + "shuffle_storage_gb": 1897, + "snapshot_time": {}, + }, + }, + "state": 1, + "state_message": "state_message_value", + "state_time": {}, + "creator": "creator_value", + "labels": {}, + "runtime_config": { + "version": "version_value", + "container_image": "container_image_value", + "properties": {}, + }, + "environment_config": { + "execution_config": { + "service_account": "service_account_value", + "network_uri": "network_uri_value", + "subnetwork_uri": "subnetwork_uri_value", + "network_tags": ["network_tags_value1", "network_tags_value2"], + "kms_key": "kms_key_value", + "ttl": {"seconds": 751, "nanos": 543}, + "staging_bucket": "staging_bucket_value", + }, + "peripherals_config": { + "metastore_service": "metastore_service_value", + "spark_history_server_config": { + "dataproc_cluster": "dataproc_cluster_value" + }, + }, + }, + "operation": "operation_value", + "state_history": [ + {"state": 1, "state_message": "state_message_value", "state_start_time": {}} + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_batch(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_batch_rest_required_fields(request_type=batches.CreateBatchRequest): + transport_class = transports.BatchControllerRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_batch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_batch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "batch_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_batch(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_batch_rest_unset_required_fields(): + transport = transports.BatchControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_batch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "batchId", + "requestId", + ) + ) + & set( + ( + "parent", + "batch", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_batch_rest_interceptors(null_interceptor): + transport = transports.BatchControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BatchControllerRestInterceptor(), + ) + client = BatchControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BatchControllerRestInterceptor, "post_create_batch" + ) as post, mock.patch.object( + transports.BatchControllerRestInterceptor, "pre_create_batch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = batches.CreateBatchRequest.pb(batches.CreateBatchRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = batches.CreateBatchRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_batch( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_batch_rest_bad_request( + transport: str = "rest", request_type=batches.CreateBatchRequest +): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["batch"] = { + "name": "name_value", + "uuid": "uuid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "pyspark_batch": { + "main_python_file_uri": "main_python_file_uri_value", + "args": ["args_value1", "args_value2"], + "python_file_uris": ["python_file_uris_value1", "python_file_uris_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + }, + "spark_batch": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + }, + "spark_r_batch": { + "main_r_file_uri": "main_r_file_uri_value", + "args": ["args_value1", "args_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + }, + "spark_sql_batch": { + "query_file_uri": "query_file_uri_value", + "query_variables": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + }, + "runtime_info": { + "endpoints": {}, + "output_uri": "output_uri_value", + "diagnostic_output_uri": "diagnostic_output_uri_value", + "approximate_usage": { + "milli_dcu_seconds": 1792, + "shuffle_storage_gb_seconds": 2743, + }, + "current_usage": { + "milli_dcu": 946, + "shuffle_storage_gb": 1897, + "snapshot_time": {}, + }, + }, + "state": 1, + "state_message": "state_message_value", + "state_time": {}, + "creator": "creator_value", + "labels": {}, + "runtime_config": { + "version": "version_value", + "container_image": "container_image_value", + "properties": {}, + }, + "environment_config": { + "execution_config": { + "service_account": "service_account_value", + "network_uri": "network_uri_value", + "subnetwork_uri": "subnetwork_uri_value", + "network_tags": ["network_tags_value1", "network_tags_value2"], + "kms_key": "kms_key_value", + "ttl": {"seconds": 751, "nanos": 543}, + "staging_bucket": "staging_bucket_value", + }, + "peripherals_config": { + "metastore_service": "metastore_service_value", + "spark_history_server_config": { + "dataproc_cluster": "dataproc_cluster_value" + }, + }, + }, + "operation": "operation_value", + "state_history": [ + {"state": 1, "state_message": "state_message_value", "state_start_time": {}} + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_batch(request) + + +def test_create_batch_rest_flattened(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + batch=batches.Batch(name="name_value"), + batch_id="batch_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_batch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/batches" % client.transport._host, + args[1], + ) + + +def test_create_batch_rest_flattened_error(transport: str = "rest"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_batch( + batches.CreateBatchRequest(), + parent="parent_value", + batch=batches.Batch(name="name_value"), + batch_id="batch_id_value", + ) + + +def test_create_batch_rest_error(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + batches.GetBatchRequest, + dict, + ], +) +def test_get_batch_rest(request_type): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/batches/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = batches.Batch( + name="name_value", + uuid="uuid_value", + state=batches.Batch.State.PENDING, + state_message="state_message_value", + creator="creator_value", + operation="operation_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = batches.Batch.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_batch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, batches.Batch) + assert response.name == "name_value" + assert response.uuid == "uuid_value" + assert response.state == batches.Batch.State.PENDING + assert response.state_message == "state_message_value" + assert response.creator == "creator_value" + assert response.operation == "operation_value" + + +def test_get_batch_rest_required_fields(request_type=batches.GetBatchRequest): + transport_class = transports.BatchControllerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_batch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_batch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = batches.Batch() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = batches.Batch.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_batch(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_batch_rest_unset_required_fields(): + transport = transports.BatchControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_batch._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_batch_rest_interceptors(null_interceptor): + transport = transports.BatchControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BatchControllerRestInterceptor(), + ) + client = BatchControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BatchControllerRestInterceptor, "post_get_batch" + ) as post, mock.patch.object( + transports.BatchControllerRestInterceptor, "pre_get_batch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = batches.GetBatchRequest.pb(batches.GetBatchRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = batches.Batch.to_json(batches.Batch()) + + request = batches.GetBatchRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = batches.Batch() + + client.get_batch( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_batch_rest_bad_request( + transport: str = "rest", request_type=batches.GetBatchRequest +): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/batches/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_batch(request) + + +def test_get_batch_rest_flattened(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = batches.Batch() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/batches/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = batches.Batch.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_batch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/batches/*}" % client.transport._host, + args[1], + ) + + +def test_get_batch_rest_flattened_error(transport: str = "rest"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_batch( + batches.GetBatchRequest(), + name="name_value", + ) + + +def test_get_batch_rest_error(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + batches.ListBatchesRequest, + dict, + ], +) +def test_list_batches_rest(request_type): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = batches.ListBatchesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = batches.ListBatchesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_batches(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBatchesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_batches_rest_required_fields(request_type=batches.ListBatchesRequest): + transport_class = transports.BatchControllerRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_batches._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_batches._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = batches.ListBatchesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = batches.ListBatchesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_batches(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_batches_rest_unset_required_fields(): + transport = transports.BatchControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_batches._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_batches_rest_interceptors(null_interceptor): + transport = transports.BatchControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BatchControllerRestInterceptor(), + ) + client = BatchControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BatchControllerRestInterceptor, "post_list_batches" + ) as post, mock.patch.object( + transports.BatchControllerRestInterceptor, "pre_list_batches" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = batches.ListBatchesRequest.pb(batches.ListBatchesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = batches.ListBatchesResponse.to_json( + batches.ListBatchesResponse() + ) + + request = batches.ListBatchesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = batches.ListBatchesResponse() + + client.list_batches( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_batches_rest_bad_request( + transport: str = "rest", request_type=batches.ListBatchesRequest +): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_batches(request) + + +def test_list_batches_rest_flattened(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = batches.ListBatchesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = batches.ListBatchesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_batches(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/batches" % client.transport._host, + args[1], + ) + + +def test_list_batches_rest_flattened_error(transport: str = "rest"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_batches( + batches.ListBatchesRequest(), + parent="parent_value", + ) + + +def test_list_batches_rest_pager(transport: str = "rest"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + batches.Batch(), + batches.Batch(), + ], + next_page_token="abc", + ), + batches.ListBatchesResponse( + batches=[], + next_page_token="def", + ), + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + ], + next_page_token="ghi", + ), + batches.ListBatchesResponse( + batches=[ + batches.Batch(), + batches.Batch(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(batches.ListBatchesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_batches(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, batches.Batch) for i in results) + + pages = list(client.list_batches(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + batches.DeleteBatchRequest, + dict, + ], +) +def test_delete_batch_rest(request_type): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/batches/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_batch(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_batch_rest_required_fields(request_type=batches.DeleteBatchRequest): + transport_class = transports.BatchControllerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_batch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_batch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_batch(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_batch_rest_unset_required_fields(): + transport = transports.BatchControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_batch._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_batch_rest_interceptors(null_interceptor): + transport = transports.BatchControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BatchControllerRestInterceptor(), + ) + client = BatchControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BatchControllerRestInterceptor, "pre_delete_batch" + ) as pre: + pre.assert_not_called() + pb_message = batches.DeleteBatchRequest.pb(batches.DeleteBatchRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = batches.DeleteBatchRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_batch( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_batch_rest_bad_request( + transport: str = "rest", request_type=batches.DeleteBatchRequest +): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/batches/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_batch(request) + + +def test_delete_batch_rest_flattened(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/batches/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_batch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/batches/*}" % client.transport._host, + args[1], + ) + + +def test_delete_batch_rest_flattened_error(transport: str = "rest"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_batch( + batches.DeleteBatchRequest(), + name="name_value", + ) + + +def test_delete_batch_rest_error(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.BatchControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.BatchControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BatchControllerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.BatchControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BatchControllerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BatchControllerClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.BatchControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BatchControllerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.BatchControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BatchControllerClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.BatchControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.BatchControllerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BatchControllerGrpcTransport, + transports.BatchControllerGrpcAsyncIOTransport, + transports.BatchControllerRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = BatchControllerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.BatchControllerGrpcTransport, + ) + + +def test_batch_controller_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.BatchControllerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_batch_controller_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.dataproc_v1.services.batch_controller.transports.BatchControllerTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.BatchControllerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_batch", + "get_batch", + "list_batches", + "delete_batch", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_batch_controller_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataproc_v1.services.batch_controller.transports.BatchControllerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BatchControllerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_batch_controller_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.dataproc_v1.services.batch_controller.transports.BatchControllerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BatchControllerTransport() + adc.assert_called_once() + + +def test_batch_controller_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + BatchControllerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BatchControllerGrpcTransport, + transports.BatchControllerGrpcAsyncIOTransport, + ], +) +def test_batch_controller_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BatchControllerGrpcTransport, + transports.BatchControllerGrpcAsyncIOTransport, + transports.BatchControllerRestTransport, + ], +) +def test_batch_controller_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.BatchControllerGrpcTransport, grpc_helpers), + (transports.BatchControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_batch_controller_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BatchControllerGrpcTransport, + transports.BatchControllerGrpcAsyncIOTransport, + ], +) +def test_batch_controller_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_batch_controller_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.BatchControllerRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_batch_controller_rest_lro_client(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_batch_controller_host_no_port(transport_name): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataproc.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_batch_controller_host_with_port(transport_name): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataproc.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dataproc.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_batch_controller_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = BatchControllerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = BatchControllerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_batch._session + session2 = client2.transport.create_batch._session + assert session1 != session2 + session1 = client1.transport.get_batch._session + session2 = client2.transport.get_batch._session + assert session1 != session2 + session1 = client1.transport.list_batches._session + session2 = client2.transport.list_batches._session + assert session1 != session2 + session1 = client1.transport.delete_batch._session + session2 = client2.transport.delete_batch._session + assert session1 != session2 + + +def test_batch_controller_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BatchControllerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_batch_controller_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BatchControllerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.BatchControllerGrpcTransport, + transports.BatchControllerGrpcAsyncIOTransport, + ], +) +def test_batch_controller_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.BatchControllerGrpcTransport, + transports.BatchControllerGrpcAsyncIOTransport, + ], +) +def test_batch_controller_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_batch_controller_grpc_lro_client(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_batch_controller_grpc_lro_async_client(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_batch_path(): + project = "squid" + location = "clam" + batch = "whelk" + expected = "projects/{project}/locations/{location}/batches/{batch}".format( + project=project, + location=location, + batch=batch, + ) + actual = BatchControllerClient.batch_path(project, location, batch) + assert expected == actual + + +def test_parse_batch_path(): + expected = { + "project": "octopus", + "location": "oyster", + "batch": "nudibranch", + } + path = BatchControllerClient.batch_path(**expected) + + # Check that the path construction is reversible. + actual = BatchControllerClient.parse_batch_path(path) + assert expected == actual + + +def test_service_path(): + project = "cuttlefish" + location = "mussel" + service = "winkle" + expected = "projects/{project}/locations/{location}/services/{service}".format( + project=project, + location=location, + service=service, + ) + actual = BatchControllerClient.service_path(project, location, service) + assert expected == actual + + +def test_parse_service_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "service": "abalone", + } + path = BatchControllerClient.service_path(**expected) + + # Check that the path construction is reversible. + actual = BatchControllerClient.parse_service_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = BatchControllerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = BatchControllerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BatchControllerClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = BatchControllerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = BatchControllerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BatchControllerClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = BatchControllerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = BatchControllerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BatchControllerClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = BatchControllerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = BatchControllerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BatchControllerClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = BatchControllerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = BatchControllerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BatchControllerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.BatchControllerTransport, "_prep_wrapped_messages" + ) as prep: + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.BatchControllerTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = BatchControllerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = BatchControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = BatchControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (BatchControllerClient, transports.BatchControllerGrpcTransport), + (BatchControllerAsyncClient, transports.BatchControllerGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py new file mode 100644 index 000000000000..f9c925c835e7 --- /dev/null +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py @@ -0,0 +1,8312 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import interval_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.dataproc_v1.services.cluster_controller import ( + ClusterControllerAsyncClient, + ClusterControllerClient, + pagers, + transports, +) +from google.cloud.dataproc_v1.types import clusters, operations, shared + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ClusterControllerClient._get_default_mtls_endpoint(None) is None + assert ( + ClusterControllerClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ClusterControllerClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ClusterControllerClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ClusterControllerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ClusterControllerClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ClusterControllerClient, "grpc"), + (ClusterControllerAsyncClient, "grpc_asyncio"), + (ClusterControllerClient, "rest"), + ], +) +def test_cluster_controller_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ClusterControllerGrpcTransport, "grpc"), + (transports.ClusterControllerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ClusterControllerRestTransport, "rest"), + ], +) +def test_cluster_controller_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ClusterControllerClient, "grpc"), + (ClusterControllerAsyncClient, "grpc_asyncio"), + (ClusterControllerClient, "rest"), + ], +) +def test_cluster_controller_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +def test_cluster_controller_client_get_transport_class(): + transport = ClusterControllerClient.get_transport_class() + available_transports = [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerRestTransport, + ] + assert transport in available_transports + + transport = ClusterControllerClient.get_transport_class("grpc") + assert transport == transports.ClusterControllerGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ClusterControllerClient, transports.ClusterControllerGrpcTransport, "grpc"), + ( + ClusterControllerAsyncClient, + transports.ClusterControllerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ClusterControllerClient, transports.ClusterControllerRestTransport, "rest"), + ], +) +@mock.patch.object( + ClusterControllerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterControllerClient), +) +@mock.patch.object( + ClusterControllerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterControllerAsyncClient), +) +def test_cluster_controller_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ClusterControllerClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ClusterControllerClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ClusterControllerClient, + transports.ClusterControllerGrpcTransport, + "grpc", + "true", + ), + ( + ClusterControllerAsyncClient, + transports.ClusterControllerGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ClusterControllerClient, + transports.ClusterControllerGrpcTransport, + "grpc", + "false", + ), + ( + ClusterControllerAsyncClient, + transports.ClusterControllerGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + ClusterControllerClient, + transports.ClusterControllerRestTransport, + "rest", + "true", + ), + ( + ClusterControllerClient, + transports.ClusterControllerRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + ClusterControllerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterControllerClient), +) +@mock.patch.object( + ClusterControllerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterControllerAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cluster_controller_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [ClusterControllerClient, ClusterControllerAsyncClient] +) +@mock.patch.object( + ClusterControllerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterControllerClient), +) +@mock.patch.object( + ClusterControllerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ClusterControllerAsyncClient), +) +def test_cluster_controller_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ClusterControllerClient, transports.ClusterControllerGrpcTransport, "grpc"), + ( + ClusterControllerAsyncClient, + transports.ClusterControllerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ClusterControllerClient, transports.ClusterControllerRestTransport, "rest"), + ], +) +def test_cluster_controller_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ClusterControllerClient, + transports.ClusterControllerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ClusterControllerAsyncClient, + transports.ClusterControllerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + ClusterControllerClient, + transports.ClusterControllerRestTransport, + "rest", + None, + ), + ], +) +def test_cluster_controller_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_cluster_controller_client_client_options_from_dict(): + with mock.patch( + "google.cloud.dataproc_v1.services.cluster_controller.transports.ClusterControllerGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ClusterControllerClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ClusterControllerClient, + transports.ClusterControllerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ClusterControllerAsyncClient, + transports.ClusterControllerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cluster_controller_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.CreateClusterRequest, + dict, + ], +) +def test_create_cluster(request_type, transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.CreateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + client.create_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.CreateClusterRequest() + + +@pytest.mark.asyncio +async def test_create_cluster_async( + transport: str = "grpc_asyncio", request_type=clusters.CreateClusterRequest +): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.CreateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_cluster_async_from_dict(): + await test_create_cluster_async(request_type=dict) + + +def test_create_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.CreateClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.CreateClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +def test_create_cluster_flattened(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_cluster( + project_id="project_id_value", + region="region_value", + cluster=clusters.Cluster(project_id="project_id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = clusters.Cluster(project_id="project_id_value") + assert arg == mock_val + + +def test_create_cluster_flattened_error(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cluster( + clusters.CreateClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster=clusters.Cluster(project_id="project_id_value"), + ) + + +@pytest.mark.asyncio +async def test_create_cluster_flattened_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_cluster( + project_id="project_id_value", + region="region_value", + cluster=clusters.Cluster(project_id="project_id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = clusters.Cluster(project_id="project_id_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_cluster_flattened_error_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_cluster( + clusters.CreateClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster=clusters.Cluster(project_id="project_id_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.UpdateClusterRequest, + dict, + ], +) +def test_update_cluster(request_type, transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.UpdateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + client.update_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.UpdateClusterRequest() + + +@pytest.mark.asyncio +async def test_update_cluster_async( + transport: str = "grpc_asyncio", request_type=clusters.UpdateClusterRequest +): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.UpdateClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_cluster_async_from_dict(): + await test_update_cluster_async(request_type=dict) + + +def test_update_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.UpdateClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.UpdateClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +def test_update_cluster_flattened(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_cluster( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + cluster=clusters.Cluster(project_id="project_id_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = clusters.Cluster(project_id="project_id_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_cluster_flattened_error(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + clusters.UpdateClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + cluster=clusters.Cluster(project_id="project_id_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_cluster_flattened_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_cluster( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + cluster=clusters.Cluster(project_id="project_id_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val + arg = args[0].cluster + mock_val = clusters.Cluster(project_id="project_id_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_cluster_flattened_error_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_cluster( + clusters.UpdateClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + cluster=clusters.Cluster(project_id="project_id_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.StopClusterRequest, + dict, + ], +) +def test_stop_cluster(request_type, transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.stop_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.StopClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_stop_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_cluster), "__call__") as call: + client.stop_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.StopClusterRequest() + + +@pytest.mark.asyncio +async def test_stop_cluster_async( + transport: str = "grpc_asyncio", request_type=clusters.StopClusterRequest +): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.stop_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.StopClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_stop_cluster_async_from_dict(): + await test_stop_cluster_async(request_type=dict) + + +def test_stop_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.StopClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.stop_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_stop_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.StopClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.stop_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.StartClusterRequest, + dict, + ], +) +def test_start_cluster(request_type, transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.start_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.StartClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_start_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_cluster), "__call__") as call: + client.start_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.StartClusterRequest() + + +@pytest.mark.asyncio +async def test_start_cluster_async( + transport: str = "grpc_asyncio", request_type=clusters.StartClusterRequest +): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.start_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.StartClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_start_cluster_async_from_dict(): + await test_start_cluster_async(request_type=dict) + + +def test_start_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.StartClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.start_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_start_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.StartClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.start_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster(request_type, transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.DeleteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + client.delete_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.DeleteClusterRequest() + + +@pytest.mark.asyncio +async def test_delete_cluster_async( + transport: str = "grpc_asyncio", request_type=clusters.DeleteClusterRequest +): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.DeleteClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_cluster_async_from_dict(): + await test_delete_cluster_async(request_type=dict) + + +def test_delete_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.DeleteClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.DeleteClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +def test_delete_cluster_flattened(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_cluster( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val + + +def test_delete_cluster_flattened_error(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cluster( + clusters.DeleteClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_cluster_flattened_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_cluster( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_cluster_flattened_error_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_cluster( + clusters.DeleteClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.GetClusterRequest, + dict, + ], +) +def test_get_cluster(request_type, transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clusters.Cluster( + project_id="project_id_value", + cluster_name="cluster_name_value", + cluster_uuid="cluster_uuid_value", + ) + response = client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.GetClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clusters.Cluster) + assert response.project_id == "project_id_value" + assert response.cluster_name == "cluster_name_value" + assert response.cluster_uuid == "cluster_uuid_value" + + +def test_get_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + client.get_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.GetClusterRequest() + + +@pytest.mark.asyncio +async def test_get_cluster_async( + transport: str = "grpc_asyncio", request_type=clusters.GetClusterRequest +): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clusters.Cluster( + project_id="project_id_value", + cluster_name="cluster_name_value", + cluster_uuid="cluster_uuid_value", + ) + ) + response = await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.GetClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clusters.Cluster) + assert response.project_id == "project_id_value" + assert response.cluster_name == "cluster_name_value" + assert response.cluster_uuid == "cluster_uuid_value" + + +@pytest.mark.asyncio +async def test_get_cluster_async_from_dict(): + await test_get_cluster_async(request_type=dict) + + +def test_get_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.GetClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = clusters.Cluster() + client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.GetClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clusters.Cluster()) + await client.get_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +def test_get_cluster_flattened(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clusters.Cluster() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_cluster( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val + + +def test_get_cluster_flattened_error(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + clusters.GetClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + +@pytest.mark.asyncio +async def test_get_cluster_flattened_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clusters.Cluster() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clusters.Cluster()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_cluster( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_cluster_flattened_error_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_cluster( + clusters.GetClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.ListClustersRequest, + dict, + ], +) +def test_list_clusters(request_type, transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clusters.ListClustersResponse( + next_page_token="next_page_token_value", + ) + response = client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.ListClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_clusters_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + client.list_clusters() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.ListClustersRequest() + + +@pytest.mark.asyncio +async def test_list_clusters_async( + transport: str = "grpc_asyncio", request_type=clusters.ListClustersRequest +): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clusters.ListClustersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.ListClustersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_clusters_async_from_dict(): + await test_list_clusters_async(request_type=dict) + + +def test_list_clusters_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.ListClustersRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = clusters.ListClustersResponse() + client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_clusters_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.ListClustersRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clusters.ListClustersResponse() + ) + await client.list_clusters(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +def test_list_clusters_flattened(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clusters.ListClustersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_clusters( + project_id="project_id_value", + region="region_value", + filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].filter + mock_val = "filter_value" + assert arg == mock_val + + +def test_list_clusters_flattened_error(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + clusters.ListClustersRequest(), + project_id="project_id_value", + region="region_value", + filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_list_clusters_flattened_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clusters.ListClustersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clusters.ListClustersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_clusters( + project_id="project_id_value", + region="region_value", + filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].filter + mock_val = "filter_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_clusters_flattened_error_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_clusters( + clusters.ListClustersRequest(), + project_id="project_id_value", + region="region_value", + filter="filter_value", + ) + + +def test_list_clusters_pager(transport_name: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + clusters.Cluster(), + clusters.Cluster(), + ], + next_page_token="abc", + ), + clusters.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + ], + next_page_token="ghi", + ), + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + clusters.Cluster(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", ""), + ("region", ""), + ) + ), + ) + pager = client.list_clusters(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, clusters.Cluster) for i in results) + + +def test_list_clusters_pages(transport_name: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + clusters.Cluster(), + clusters.Cluster(), + ], + next_page_token="abc", + ), + clusters.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + ], + next_page_token="ghi", + ), + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + clusters.Cluster(), + ], + ), + RuntimeError, + ) + pages = list(client.list_clusters(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_clusters_async_pager(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_clusters), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + clusters.Cluster(), + clusters.Cluster(), + ], + next_page_token="abc", + ), + clusters.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + ], + next_page_token="ghi", + ), + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + clusters.Cluster(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_clusters( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, clusters.Cluster) for i in responses) + + +@pytest.mark.asyncio +async def test_list_clusters_async_pages(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_clusters), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + clusters.Cluster(), + clusters.Cluster(), + ], + next_page_token="abc", + ), + clusters.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + ], + next_page_token="ghi", + ), + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + clusters.Cluster(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_clusters(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.DiagnoseClusterRequest, + dict, + ], +) +def test_diagnose_cluster(request_type, transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.diagnose_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.DiagnoseClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_diagnose_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: + client.diagnose_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.DiagnoseClusterRequest() + + +@pytest.mark.asyncio +async def test_diagnose_cluster_async( + transport: str = "grpc_asyncio", request_type=clusters.DiagnoseClusterRequest +): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.diagnose_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.DiagnoseClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_diagnose_cluster_async_from_dict(): + await test_diagnose_cluster_async(request_type=dict) + + +def test_diagnose_cluster_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.DiagnoseClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.diagnose_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_diagnose_cluster_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clusters.DiagnoseClusterRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.cluster_name = "cluster_name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.diagnose_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value", + ) in kw["metadata"] + + +def test_diagnose_cluster_flattened(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.diagnose_cluster( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val + + +def test_diagnose_cluster_flattened_error(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.diagnose_cluster( + clusters.DiagnoseClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + +@pytest.mark.asyncio +async def test_diagnose_cluster_flattened_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.diagnose_cluster( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].cluster_name + mock_val = "cluster_name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_diagnose_cluster_flattened_error_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.diagnose_cluster( + clusters.DiagnoseClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.CreateClusterRequest, + dict, + ], +) +def test_create_cluster_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2"} + request_init["cluster"] = { + "project_id": "project_id_value", + "cluster_name": "cluster_name_value", + "config": { + "config_bucket": "config_bucket_value", + "temp_bucket": "temp_bucket_value", + "gce_cluster_config": { + "zone_uri": "zone_uri_value", + "network_uri": "network_uri_value", + "subnetwork_uri": "subnetwork_uri_value", + "internal_ip_only": True, + "private_ipv6_google_access": 1, + "service_account": "service_account_value", + "service_account_scopes": [ + "service_account_scopes_value1", + "service_account_scopes_value2", + ], + "tags": ["tags_value1", "tags_value2"], + "metadata": {}, + "reservation_affinity": { + "consume_reservation_type": 1, + "key": "key_value", + "values": ["values_value1", "values_value2"], + }, + "node_group_affinity": {"node_group_uri": "node_group_uri_value"}, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": {"enable_confidential_compute": True}, + }, + "master_config": { + "num_instances": 1399, + "instance_names": ["instance_names_value1", "instance_names_value2"], + "instance_references": [ + { + "instance_name": "instance_name_value", + "instance_id": "instance_id_value", + "public_key": "public_key_value", + "public_ecies_key": "public_ecies_key_value", + } + ], + "image_uri": "image_uri_value", + "machine_type_uri": "machine_type_uri_value", + "disk_config": { + "boot_disk_type": "boot_disk_type_value", + "boot_disk_size_gb": 1792, + "num_local_ssds": 1494, + "local_ssd_interface": "local_ssd_interface_value", + }, + "is_preemptible": True, + "preemptibility": 1, + "managed_group_config": { + "instance_template_name": "instance_template_name_value", + "instance_group_manager_name": "instance_group_manager_name_value", + "instance_group_manager_uri": "instance_group_manager_uri_value", + }, + "accelerators": [ + { + "accelerator_type_uri": "accelerator_type_uri_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "min_num_instances": 1818, + "instance_flexibility_policy": { + "instance_selection_list": [ + { + "machine_types": [ + "machine_types_value1", + "machine_types_value2", + ], + "rank": 428, + } + ], + "instance_selection_results": [ + {"machine_type": "machine_type_value", "vm_count": 875} + ], + }, + }, + "worker_config": {}, + "secondary_worker_config": {}, + "software_config": { + "image_version": "image_version_value", + "properties": {}, + "optional_components": [5], + }, + "initialization_actions": [ + { + "executable_file": "executable_file_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + "encryption_config": {"gce_pd_kms_key_name": "gce_pd_kms_key_name_value"}, + "autoscaling_config": {"policy_uri": "policy_uri_value"}, + "security_config": { + "kerberos_config": { + "enable_kerberos": True, + "root_principal_password_uri": "root_principal_password_uri_value", + "kms_key_uri": "kms_key_uri_value", + "keystore_uri": "keystore_uri_value", + "truststore_uri": "truststore_uri_value", + "keystore_password_uri": "keystore_password_uri_value", + "key_password_uri": "key_password_uri_value", + "truststore_password_uri": "truststore_password_uri_value", + "cross_realm_trust_realm": "cross_realm_trust_realm_value", + "cross_realm_trust_kdc": "cross_realm_trust_kdc_value", + "cross_realm_trust_admin_server": "cross_realm_trust_admin_server_value", + "cross_realm_trust_shared_password_uri": "cross_realm_trust_shared_password_uri_value", + "kdc_db_key_uri": "kdc_db_key_uri_value", + "tgt_lifetime_hours": 1933, + "realm": "realm_value", + }, + "identity_config": {"user_service_account_mapping": {}}, + }, + "lifecycle_config": { + "idle_delete_ttl": {}, + "auto_delete_time": {"seconds": 751, "nanos": 543}, + "auto_delete_ttl": {}, + "idle_start_time": {}, + }, + "endpoint_config": {"http_ports": {}, "enable_http_port_access": True}, + "metastore_config": { + "dataproc_metastore_service": "dataproc_metastore_service_value" + }, + "dataproc_metric_config": { + "metrics": [ + { + "metric_source": 1, + "metric_overrides": [ + "metric_overrides_value1", + "metric_overrides_value2", + ], + } + ] + }, + "auxiliary_node_groups": [ + { + "node_group": { + "name": "name_value", + "roles": [1], + "node_group_config": {}, + "labels": {}, + }, + "node_group_id": "node_group_id_value", + } + ], + }, + "virtual_cluster_config": { + "staging_bucket": "staging_bucket_value", + "kubernetes_cluster_config": { + "kubernetes_namespace": "kubernetes_namespace_value", + "gke_cluster_config": { + "gke_cluster_target": "gke_cluster_target_value", + "node_pool_target": [ + { + "node_pool": "node_pool_value", + "roles": [1], + "node_pool_config": { + "config": { + "machine_type": "machine_type_value", + "local_ssd_count": 1596, + "preemptible": True, + "accelerators": [ + { + "accelerator_count": 1805, + "accelerator_type": "accelerator_type_value", + "gpu_partition_size": "gpu_partition_size_value", + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "boot_disk_kms_key": "boot_disk_kms_key_value", + "spot": True, + }, + "locations": ["locations_value1", "locations_value2"], + "autoscaling": { + "min_node_count": 1489, + "max_node_count": 1491, + }, + }, + } + ], + }, + "kubernetes_software_config": { + "component_version": {}, + "properties": {}, + }, + }, + "auxiliary_services_config": { + "metastore_config": {}, + "spark_history_server_config": { + "dataproc_cluster": "dataproc_cluster_value" + }, + }, + }, + "labels": {}, + "status": { + "state": 1, + "detail": "detail_value", + "state_start_time": {}, + "substate": 1, + }, + "status_history": {}, + "cluster_uuid": "cluster_uuid_value", + "metrics": {"hdfs_metrics": {}, "yarn_metrics": {}}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_cluster_rest_required_fields( + request_type=clusters.CreateClusterRequest, +): + transport_class = transports.ClusterControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "action_on_failed_primary_workers", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_cluster_rest_unset_required_fields(): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "actionOnFailedPrimaryWorkers", + "requestId", + ) + ) + & set( + ( + "projectId", + "region", + "cluster", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cluster_rest_interceptors(null_interceptor): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterControllerRestInterceptor(), + ) + client = ClusterControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_create_cluster" + ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "pre_create_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = clusters.CreateClusterRequest.pb(clusters.CreateClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = clusters.CreateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_cluster_rest_bad_request( + transport: str = "rest", request_type=clusters.CreateClusterRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2"} + request_init["cluster"] = { + "project_id": "project_id_value", + "cluster_name": "cluster_name_value", + "config": { + "config_bucket": "config_bucket_value", + "temp_bucket": "temp_bucket_value", + "gce_cluster_config": { + "zone_uri": "zone_uri_value", + "network_uri": "network_uri_value", + "subnetwork_uri": "subnetwork_uri_value", + "internal_ip_only": True, + "private_ipv6_google_access": 1, + "service_account": "service_account_value", + "service_account_scopes": [ + "service_account_scopes_value1", + "service_account_scopes_value2", + ], + "tags": ["tags_value1", "tags_value2"], + "metadata": {}, + "reservation_affinity": { + "consume_reservation_type": 1, + "key": "key_value", + "values": ["values_value1", "values_value2"], + }, + "node_group_affinity": {"node_group_uri": "node_group_uri_value"}, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": {"enable_confidential_compute": True}, + }, + "master_config": { + "num_instances": 1399, + "instance_names": ["instance_names_value1", "instance_names_value2"], + "instance_references": [ + { + "instance_name": "instance_name_value", + "instance_id": "instance_id_value", + "public_key": "public_key_value", + "public_ecies_key": "public_ecies_key_value", + } + ], + "image_uri": "image_uri_value", + "machine_type_uri": "machine_type_uri_value", + "disk_config": { + "boot_disk_type": "boot_disk_type_value", + "boot_disk_size_gb": 1792, + "num_local_ssds": 1494, + "local_ssd_interface": "local_ssd_interface_value", + }, + "is_preemptible": True, + "preemptibility": 1, + "managed_group_config": { + "instance_template_name": "instance_template_name_value", + "instance_group_manager_name": "instance_group_manager_name_value", + "instance_group_manager_uri": "instance_group_manager_uri_value", + }, + "accelerators": [ + { + "accelerator_type_uri": "accelerator_type_uri_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "min_num_instances": 1818, + "instance_flexibility_policy": { + "instance_selection_list": [ + { + "machine_types": [ + "machine_types_value1", + "machine_types_value2", + ], + "rank": 428, + } + ], + "instance_selection_results": [ + {"machine_type": "machine_type_value", "vm_count": 875} + ], + }, + }, + "worker_config": {}, + "secondary_worker_config": {}, + "software_config": { + "image_version": "image_version_value", + "properties": {}, + "optional_components": [5], + }, + "initialization_actions": [ + { + "executable_file": "executable_file_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + "encryption_config": {"gce_pd_kms_key_name": "gce_pd_kms_key_name_value"}, + "autoscaling_config": {"policy_uri": "policy_uri_value"}, + "security_config": { + "kerberos_config": { + "enable_kerberos": True, + "root_principal_password_uri": "root_principal_password_uri_value", + "kms_key_uri": "kms_key_uri_value", + "keystore_uri": "keystore_uri_value", + "truststore_uri": "truststore_uri_value", + "keystore_password_uri": "keystore_password_uri_value", + "key_password_uri": "key_password_uri_value", + "truststore_password_uri": "truststore_password_uri_value", + "cross_realm_trust_realm": "cross_realm_trust_realm_value", + "cross_realm_trust_kdc": "cross_realm_trust_kdc_value", + "cross_realm_trust_admin_server": "cross_realm_trust_admin_server_value", + "cross_realm_trust_shared_password_uri": "cross_realm_trust_shared_password_uri_value", + "kdc_db_key_uri": "kdc_db_key_uri_value", + "tgt_lifetime_hours": 1933, + "realm": "realm_value", + }, + "identity_config": {"user_service_account_mapping": {}}, + }, + "lifecycle_config": { + "idle_delete_ttl": {}, + "auto_delete_time": {"seconds": 751, "nanos": 543}, + "auto_delete_ttl": {}, + "idle_start_time": {}, + }, + "endpoint_config": {"http_ports": {}, "enable_http_port_access": True}, + "metastore_config": { + "dataproc_metastore_service": "dataproc_metastore_service_value" + }, + "dataproc_metric_config": { + "metrics": [ + { + "metric_source": 1, + "metric_overrides": [ + "metric_overrides_value1", + "metric_overrides_value2", + ], + } + ] + }, + "auxiliary_node_groups": [ + { + "node_group": { + "name": "name_value", + "roles": [1], + "node_group_config": {}, + "labels": {}, + }, + "node_group_id": "node_group_id_value", + } + ], + }, + "virtual_cluster_config": { + "staging_bucket": "staging_bucket_value", + "kubernetes_cluster_config": { + "kubernetes_namespace": "kubernetes_namespace_value", + "gke_cluster_config": { + "gke_cluster_target": "gke_cluster_target_value", + "node_pool_target": [ + { + "node_pool": "node_pool_value", + "roles": [1], + "node_pool_config": { + "config": { + "machine_type": "machine_type_value", + "local_ssd_count": 1596, + "preemptible": True, + "accelerators": [ + { + "accelerator_count": 1805, + "accelerator_type": "accelerator_type_value", + "gpu_partition_size": "gpu_partition_size_value", + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "boot_disk_kms_key": "boot_disk_kms_key_value", + "spot": True, + }, + "locations": ["locations_value1", "locations_value2"], + "autoscaling": { + "min_node_count": 1489, + "max_node_count": 1491, + }, + }, + } + ], + }, + "kubernetes_software_config": { + "component_version": {}, + "properties": {}, + }, + }, + "auxiliary_services_config": { + "metastore_config": {}, + "spark_history_server_config": { + "dataproc_cluster": "dataproc_cluster_value" + }, + }, + }, + "labels": {}, + "status": { + "state": 1, + "detail": "detail_value", + "state_start_time": {}, + "substate": 1, + }, + "status_history": {}, + "cluster_uuid": "cluster_uuid_value", + "metrics": {"hdfs_metrics": {}, "yarn_metrics": {}}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_cluster(request) + + +def test_create_cluster_rest_flattened(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + region="region_value", + cluster=clusters.Cluster(project_id="project_id_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/regions/{region}/clusters" + % client.transport._host, + args[1], + ) + + +def test_create_cluster_rest_flattened_error(transport: str = "rest"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cluster( + clusters.CreateClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster=clusters.Cluster(project_id="project_id_value"), + ) + + +def test_create_cluster_rest_error(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.UpdateClusterRequest, + dict, + ], +) +def test_update_cluster_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + request_init["cluster"] = { + "project_id": "project_id_value", + "cluster_name": "cluster_name_value", + "config": { + "config_bucket": "config_bucket_value", + "temp_bucket": "temp_bucket_value", + "gce_cluster_config": { + "zone_uri": "zone_uri_value", + "network_uri": "network_uri_value", + "subnetwork_uri": "subnetwork_uri_value", + "internal_ip_only": True, + "private_ipv6_google_access": 1, + "service_account": "service_account_value", + "service_account_scopes": [ + "service_account_scopes_value1", + "service_account_scopes_value2", + ], + "tags": ["tags_value1", "tags_value2"], + "metadata": {}, + "reservation_affinity": { + "consume_reservation_type": 1, + "key": "key_value", + "values": ["values_value1", "values_value2"], + }, + "node_group_affinity": {"node_group_uri": "node_group_uri_value"}, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": {"enable_confidential_compute": True}, + }, + "master_config": { + "num_instances": 1399, + "instance_names": ["instance_names_value1", "instance_names_value2"], + "instance_references": [ + { + "instance_name": "instance_name_value", + "instance_id": "instance_id_value", + "public_key": "public_key_value", + "public_ecies_key": "public_ecies_key_value", + } + ], + "image_uri": "image_uri_value", + "machine_type_uri": "machine_type_uri_value", + "disk_config": { + "boot_disk_type": "boot_disk_type_value", + "boot_disk_size_gb": 1792, + "num_local_ssds": 1494, + "local_ssd_interface": "local_ssd_interface_value", + }, + "is_preemptible": True, + "preemptibility": 1, + "managed_group_config": { + "instance_template_name": "instance_template_name_value", + "instance_group_manager_name": "instance_group_manager_name_value", + "instance_group_manager_uri": "instance_group_manager_uri_value", + }, + "accelerators": [ + { + "accelerator_type_uri": "accelerator_type_uri_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "min_num_instances": 1818, + "instance_flexibility_policy": { + "instance_selection_list": [ + { + "machine_types": [ + "machine_types_value1", + "machine_types_value2", + ], + "rank": 428, + } + ], + "instance_selection_results": [ + {"machine_type": "machine_type_value", "vm_count": 875} + ], + }, + }, + "worker_config": {}, + "secondary_worker_config": {}, + "software_config": { + "image_version": "image_version_value", + "properties": {}, + "optional_components": [5], + }, + "initialization_actions": [ + { + "executable_file": "executable_file_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + "encryption_config": {"gce_pd_kms_key_name": "gce_pd_kms_key_name_value"}, + "autoscaling_config": {"policy_uri": "policy_uri_value"}, + "security_config": { + "kerberos_config": { + "enable_kerberos": True, + "root_principal_password_uri": "root_principal_password_uri_value", + "kms_key_uri": "kms_key_uri_value", + "keystore_uri": "keystore_uri_value", + "truststore_uri": "truststore_uri_value", + "keystore_password_uri": "keystore_password_uri_value", + "key_password_uri": "key_password_uri_value", + "truststore_password_uri": "truststore_password_uri_value", + "cross_realm_trust_realm": "cross_realm_trust_realm_value", + "cross_realm_trust_kdc": "cross_realm_trust_kdc_value", + "cross_realm_trust_admin_server": "cross_realm_trust_admin_server_value", + "cross_realm_trust_shared_password_uri": "cross_realm_trust_shared_password_uri_value", + "kdc_db_key_uri": "kdc_db_key_uri_value", + "tgt_lifetime_hours": 1933, + "realm": "realm_value", + }, + "identity_config": {"user_service_account_mapping": {}}, + }, + "lifecycle_config": { + "idle_delete_ttl": {}, + "auto_delete_time": {"seconds": 751, "nanos": 543}, + "auto_delete_ttl": {}, + "idle_start_time": {}, + }, + "endpoint_config": {"http_ports": {}, "enable_http_port_access": True}, + "metastore_config": { + "dataproc_metastore_service": "dataproc_metastore_service_value" + }, + "dataproc_metric_config": { + "metrics": [ + { + "metric_source": 1, + "metric_overrides": [ + "metric_overrides_value1", + "metric_overrides_value2", + ], + } + ] + }, + "auxiliary_node_groups": [ + { + "node_group": { + "name": "name_value", + "roles": [1], + "node_group_config": {}, + "labels": {}, + }, + "node_group_id": "node_group_id_value", + } + ], + }, + "virtual_cluster_config": { + "staging_bucket": "staging_bucket_value", + "kubernetes_cluster_config": { + "kubernetes_namespace": "kubernetes_namespace_value", + "gke_cluster_config": { + "gke_cluster_target": "gke_cluster_target_value", + "node_pool_target": [ + { + "node_pool": "node_pool_value", + "roles": [1], + "node_pool_config": { + "config": { + "machine_type": "machine_type_value", + "local_ssd_count": 1596, + "preemptible": True, + "accelerators": [ + { + "accelerator_count": 1805, + "accelerator_type": "accelerator_type_value", + "gpu_partition_size": "gpu_partition_size_value", + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "boot_disk_kms_key": "boot_disk_kms_key_value", + "spot": True, + }, + "locations": ["locations_value1", "locations_value2"], + "autoscaling": { + "min_node_count": 1489, + "max_node_count": 1491, + }, + }, + } + ], + }, + "kubernetes_software_config": { + "component_version": {}, + "properties": {}, + }, + }, + "auxiliary_services_config": { + "metastore_config": {}, + "spark_history_server_config": { + "dataproc_cluster": "dataproc_cluster_value" + }, + }, + }, + "labels": {}, + "status": { + "state": 1, + "detail": "detail_value", + "state_start_time": {}, + "substate": 1, + }, + "status_history": {}, + "cluster_uuid": "cluster_uuid_value", + "metrics": {"hdfs_metrics": {}, "yarn_metrics": {}}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_cluster_rest_required_fields( + request_type=clusters.UpdateClusterRequest, +): + transport_class = transports.ClusterControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request_init["cluster_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + jsonified_request["clusterName"] = "cluster_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "graceful_decommission_timeout", + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "clusterName" in jsonified_request + assert jsonified_request["clusterName"] == "cluster_name_value" + + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_cluster_rest_unset_required_fields(): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "gracefulDecommissionTimeout", + "requestId", + "updateMask", + ) + ) + & set( + ( + "projectId", + "region", + "clusterName", + "cluster", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_cluster_rest_interceptors(null_interceptor): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterControllerRestInterceptor(), + ) + client = ClusterControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_update_cluster" + ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "pre_update_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = clusters.UpdateClusterRequest.pb(clusters.UpdateClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = clusters.UpdateClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_cluster_rest_bad_request( + transport: str = "rest", request_type=clusters.UpdateClusterRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + request_init["cluster"] = { + "project_id": "project_id_value", + "cluster_name": "cluster_name_value", + "config": { + "config_bucket": "config_bucket_value", + "temp_bucket": "temp_bucket_value", + "gce_cluster_config": { + "zone_uri": "zone_uri_value", + "network_uri": "network_uri_value", + "subnetwork_uri": "subnetwork_uri_value", + "internal_ip_only": True, + "private_ipv6_google_access": 1, + "service_account": "service_account_value", + "service_account_scopes": [ + "service_account_scopes_value1", + "service_account_scopes_value2", + ], + "tags": ["tags_value1", "tags_value2"], + "metadata": {}, + "reservation_affinity": { + "consume_reservation_type": 1, + "key": "key_value", + "values": ["values_value1", "values_value2"], + }, + "node_group_affinity": {"node_group_uri": "node_group_uri_value"}, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": {"enable_confidential_compute": True}, + }, + "master_config": { + "num_instances": 1399, + "instance_names": ["instance_names_value1", "instance_names_value2"], + "instance_references": [ + { + "instance_name": "instance_name_value", + "instance_id": "instance_id_value", + "public_key": "public_key_value", + "public_ecies_key": "public_ecies_key_value", + } + ], + "image_uri": "image_uri_value", + "machine_type_uri": "machine_type_uri_value", + "disk_config": { + "boot_disk_type": "boot_disk_type_value", + "boot_disk_size_gb": 1792, + "num_local_ssds": 1494, + "local_ssd_interface": "local_ssd_interface_value", + }, + "is_preemptible": True, + "preemptibility": 1, + "managed_group_config": { + "instance_template_name": "instance_template_name_value", + "instance_group_manager_name": "instance_group_manager_name_value", + "instance_group_manager_uri": "instance_group_manager_uri_value", + }, + "accelerators": [ + { + "accelerator_type_uri": "accelerator_type_uri_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "min_num_instances": 1818, + "instance_flexibility_policy": { + "instance_selection_list": [ + { + "machine_types": [ + "machine_types_value1", + "machine_types_value2", + ], + "rank": 428, + } + ], + "instance_selection_results": [ + {"machine_type": "machine_type_value", "vm_count": 875} + ], + }, + }, + "worker_config": {}, + "secondary_worker_config": {}, + "software_config": { + "image_version": "image_version_value", + "properties": {}, + "optional_components": [5], + }, + "initialization_actions": [ + { + "executable_file": "executable_file_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + "encryption_config": {"gce_pd_kms_key_name": "gce_pd_kms_key_name_value"}, + "autoscaling_config": {"policy_uri": "policy_uri_value"}, + "security_config": { + "kerberos_config": { + "enable_kerberos": True, + "root_principal_password_uri": "root_principal_password_uri_value", + "kms_key_uri": "kms_key_uri_value", + "keystore_uri": "keystore_uri_value", + "truststore_uri": "truststore_uri_value", + "keystore_password_uri": "keystore_password_uri_value", + "key_password_uri": "key_password_uri_value", + "truststore_password_uri": "truststore_password_uri_value", + "cross_realm_trust_realm": "cross_realm_trust_realm_value", + "cross_realm_trust_kdc": "cross_realm_trust_kdc_value", + "cross_realm_trust_admin_server": "cross_realm_trust_admin_server_value", + "cross_realm_trust_shared_password_uri": "cross_realm_trust_shared_password_uri_value", + "kdc_db_key_uri": "kdc_db_key_uri_value", + "tgt_lifetime_hours": 1933, + "realm": "realm_value", + }, + "identity_config": {"user_service_account_mapping": {}}, + }, + "lifecycle_config": { + "idle_delete_ttl": {}, + "auto_delete_time": {"seconds": 751, "nanos": 543}, + "auto_delete_ttl": {}, + "idle_start_time": {}, + }, + "endpoint_config": {"http_ports": {}, "enable_http_port_access": True}, + "metastore_config": { + "dataproc_metastore_service": "dataproc_metastore_service_value" + }, + "dataproc_metric_config": { + "metrics": [ + { + "metric_source": 1, + "metric_overrides": [ + "metric_overrides_value1", + "metric_overrides_value2", + ], + } + ] + }, + "auxiliary_node_groups": [ + { + "node_group": { + "name": "name_value", + "roles": [1], + "node_group_config": {}, + "labels": {}, + }, + "node_group_id": "node_group_id_value", + } + ], + }, + "virtual_cluster_config": { + "staging_bucket": "staging_bucket_value", + "kubernetes_cluster_config": { + "kubernetes_namespace": "kubernetes_namespace_value", + "gke_cluster_config": { + "gke_cluster_target": "gke_cluster_target_value", + "node_pool_target": [ + { + "node_pool": "node_pool_value", + "roles": [1], + "node_pool_config": { + "config": { + "machine_type": "machine_type_value", + "local_ssd_count": 1596, + "preemptible": True, + "accelerators": [ + { + "accelerator_count": 1805, + "accelerator_type": "accelerator_type_value", + "gpu_partition_size": "gpu_partition_size_value", + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "boot_disk_kms_key": "boot_disk_kms_key_value", + "spot": True, + }, + "locations": ["locations_value1", "locations_value2"], + "autoscaling": { + "min_node_count": 1489, + "max_node_count": 1491, + }, + }, + } + ], + }, + "kubernetes_software_config": { + "component_version": {}, + "properties": {}, + }, + }, + "auxiliary_services_config": { + "metastore_config": {}, + "spark_history_server_config": { + "dataproc_cluster": "dataproc_cluster_value" + }, + }, + }, + "labels": {}, + "status": { + "state": 1, + "detail": "detail_value", + "state_start_time": {}, + "substate": 1, + }, + "status_history": {}, + "cluster_uuid": "cluster_uuid_value", + "metrics": {"hdfs_metrics": {}, "yarn_metrics": {}}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_cluster(request) + + +def test_update_cluster_rest_flattened(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + cluster=clusters.Cluster(project_id="project_id_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" + % client.transport._host, + args[1], + ) + + +def test_update_cluster_rest_flattened_error(transport: str = "rest"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cluster( + clusters.UpdateClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + cluster=clusters.Cluster(project_id="project_id_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_cluster_rest_error(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.StopClusterRequest, + dict, + ], +) +def test_stop_cluster_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.stop_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_stop_cluster_rest_required_fields(request_type=clusters.StopClusterRequest): + transport_class = transports.ClusterControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request_init["cluster_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stop_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + jsonified_request["clusterName"] = "cluster_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stop_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "clusterName" in jsonified_request + assert jsonified_request["clusterName"] == "cluster_name_value" + + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.stop_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_stop_cluster_rest_unset_required_fields(): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.stop_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "region", + "clusterName", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_cluster_rest_interceptors(null_interceptor): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterControllerRestInterceptor(), + ) + client = ClusterControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_stop_cluster" + ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "pre_stop_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = clusters.StopClusterRequest.pb(clusters.StopClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = clusters.StopClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.stop_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_cluster_rest_bad_request( + transport: str = "rest", request_type=clusters.StopClusterRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_cluster(request) + + +def test_stop_cluster_rest_error(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.StartClusterRequest, + dict, + ], +) +def test_start_cluster_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.start_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_start_cluster_rest_required_fields(request_type=clusters.StartClusterRequest): + transport_class = transports.ClusterControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request_init["cluster_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).start_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + jsonified_request["clusterName"] = "cluster_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).start_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "clusterName" in jsonified_request + assert jsonified_request["clusterName"] == "cluster_name_value" + + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.start_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_start_cluster_rest_unset_required_fields(): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.start_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "region", + "clusterName", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_cluster_rest_interceptors(null_interceptor): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterControllerRestInterceptor(), + ) + client = ClusterControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_start_cluster" + ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "pre_start_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = clusters.StartClusterRequest.pb(clusters.StartClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = clusters.StartClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.start_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_cluster_rest_bad_request( + transport: str = "rest", request_type=clusters.StartClusterRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_cluster(request) + + +def test_start_cluster_rest_error(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.DeleteClusterRequest, + dict, + ], +) +def test_delete_cluster_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_cluster_rest_required_fields( + request_type=clusters.DeleteClusterRequest, +): + transport_class = transports.ClusterControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request_init["cluster_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + jsonified_request["clusterName"] = "cluster_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cluster._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "cluster_uuid", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "clusterName" in jsonified_request + assert jsonified_request["clusterName"] == "cluster_name_value" + + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_cluster_rest_unset_required_fields(): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "clusterUuid", + "requestId", + ) + ) + & set( + ( + "projectId", + "region", + "clusterName", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cluster_rest_interceptors(null_interceptor): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterControllerRestInterceptor(), + ) + client = ClusterControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_delete_cluster" + ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "pre_delete_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = clusters.DeleteClusterRequest.pb(clusters.DeleteClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = clusters.DeleteClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_cluster_rest_bad_request( + transport: str = "rest", request_type=clusters.DeleteClusterRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_cluster(request) + + +def test_delete_cluster_rest_flattened(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" + % client.transport._host, + args[1], + ) + + +def test_delete_cluster_rest_flattened_error(transport: str = "rest"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cluster( + clusters.DeleteClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + +def test_delete_cluster_rest_error(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.GetClusterRequest, + dict, + ], +) +def test_get_cluster_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = clusters.Cluster( + project_id="project_id_value", + cluster_name="cluster_name_value", + cluster_uuid="cluster_uuid_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = clusters.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_cluster(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, clusters.Cluster) + assert response.project_id == "project_id_value" + assert response.cluster_name == "cluster_name_value" + assert response.cluster_uuid == "cluster_uuid_value" + + +def test_get_cluster_rest_required_fields(request_type=clusters.GetClusterRequest): + transport_class = transports.ClusterControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request_init["cluster_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + jsonified_request["clusterName"] = "cluster_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "clusterName" in jsonified_request + assert jsonified_request["clusterName"] == "cluster_name_value" + + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = clusters.Cluster() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = clusters.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_cluster_rest_unset_required_fields(): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "region", + "clusterName", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cluster_rest_interceptors(null_interceptor): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterControllerRestInterceptor(), + ) + client = ClusterControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_get_cluster" + ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "pre_get_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = clusters.GetClusterRequest.pb(clusters.GetClusterRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = clusters.Cluster.to_json(clusters.Cluster()) + + request = clusters.GetClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = clusters.Cluster() + + client.get_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cluster_rest_bad_request( + transport: str = "rest", request_type=clusters.GetClusterRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cluster(request) + + +def test_get_cluster_rest_flattened(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = clusters.Cluster() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = clusters.Cluster.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" + % client.transport._host, + args[1], + ) + + +def test_get_cluster_rest_flattened_error(transport: str = "rest"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cluster( + clusters.GetClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + +def test_get_cluster_rest_error(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.ListClustersRequest, + dict, + ], +) +def test_list_clusters_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = clusters.ListClustersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = clusters.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_clusters(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListClustersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_clusters_rest_required_fields(request_type=clusters.ListClustersRequest): + transport_class = transports.ClusterControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_clusters._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = clusters.ListClustersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = clusters.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_clusters(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_clusters_rest_unset_required_fields(): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_clusters._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set( + ( + "projectId", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_clusters_rest_interceptors(null_interceptor): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterControllerRestInterceptor(), + ) + client = ClusterControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_list_clusters" + ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "pre_list_clusters" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = clusters.ListClustersRequest.pb(clusters.ListClustersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = clusters.ListClustersResponse.to_json( + clusters.ListClustersResponse() + ) + + request = clusters.ListClustersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = clusters.ListClustersResponse() + + client.list_clusters( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_clusters_rest_bad_request( + transport: str = "rest", request_type=clusters.ListClustersRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_clusters(request) + + +def test_list_clusters_rest_flattened(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = clusters.ListClustersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + region="region_value", + filter="filter_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = clusters.ListClustersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_clusters(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/regions/{region}/clusters" + % client.transport._host, + args[1], + ) + + +def test_list_clusters_rest_flattened_error(transport: str = "rest"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_clusters( + clusters.ListClustersRequest(), + project_id="project_id_value", + region="region_value", + filter="filter_value", + ) + + +def test_list_clusters_rest_pager(transport: str = "rest"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + clusters.Cluster(), + clusters.Cluster(), + ], + next_page_token="abc", + ), + clusters.ListClustersResponse( + clusters=[], + next_page_token="def", + ), + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + ], + next_page_token="ghi", + ), + clusters.ListClustersResponse( + clusters=[ + clusters.Cluster(), + clusters.Cluster(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(clusters.ListClustersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project_id": "sample1", "region": "sample2"} + + pager = client.list_clusters(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, clusters.Cluster) for i in results) + + pages = list(client.list_clusters(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + clusters.DiagnoseClusterRequest, + dict, + ], +) +def test_diagnose_cluster_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.diagnose_cluster(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_diagnose_cluster_rest_required_fields( + request_type=clusters.DiagnoseClusterRequest, +): + transport_class = transports.ClusterControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request_init["cluster_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).diagnose_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + jsonified_request["clusterName"] = "cluster_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).diagnose_cluster._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "clusterName" in jsonified_request + assert jsonified_request["clusterName"] == "cluster_name_value" + + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.diagnose_cluster(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_diagnose_cluster_rest_unset_required_fields(): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.diagnose_cluster._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "region", + "clusterName", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_diagnose_cluster_rest_interceptors(null_interceptor): + transport = transports.ClusterControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ClusterControllerRestInterceptor(), + ) + client = ClusterControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_diagnose_cluster" + ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "pre_diagnose_cluster" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = clusters.DiagnoseClusterRequest.pb( + clusters.DiagnoseClusterRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = clusters.DiagnoseClusterRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.diagnose_cluster( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_diagnose_cluster_rest_bad_request( + transport: str = "rest", request_type=clusters.DiagnoseClusterRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.diagnose_cluster(request) + + +def test_diagnose_cluster_rest_flattened(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "project_id": "sample1", + "region": "sample2", + "cluster_name": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.diagnose_cluster(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose" + % client.transport._host, + args[1], + ) + + +def test_diagnose_cluster_rest_flattened_error(transport: str = "rest"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.diagnose_cluster( + clusters.DiagnoseClusterRequest(), + project_id="project_id_value", + region="region_value", + cluster_name="cluster_name_value", + ) + + +def test_diagnose_cluster_rest_error(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ClusterControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ClusterControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ClusterControllerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ClusterControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ClusterControllerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ClusterControllerClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ClusterControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ClusterControllerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ClusterControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ClusterControllerClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ClusterControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ClusterControllerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerGrpcAsyncIOTransport, + transports.ClusterControllerRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = ClusterControllerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ClusterControllerGrpcTransport, + ) + + +def test_cluster_controller_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ClusterControllerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cluster_controller_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.dataproc_v1.services.cluster_controller.transports.ClusterControllerTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ClusterControllerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_cluster", + "update_cluster", + "stop_cluster", + "start_cluster", + "delete_cluster", + "get_cluster", + "list_clusters", + "diagnose_cluster", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cluster_controller_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataproc_v1.services.cluster_controller.transports.ClusterControllerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ClusterControllerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cluster_controller_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.dataproc_v1.services.cluster_controller.transports.ClusterControllerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ClusterControllerTransport() + adc.assert_called_once() + + +def test_cluster_controller_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ClusterControllerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerGrpcAsyncIOTransport, + ], +) +def test_cluster_controller_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerGrpcAsyncIOTransport, + transports.ClusterControllerRestTransport, + ], +) +def test_cluster_controller_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ClusterControllerGrpcTransport, grpc_helpers), + (transports.ClusterControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_cluster_controller_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerGrpcAsyncIOTransport, + ], +) +def test_cluster_controller_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_cluster_controller_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ClusterControllerRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_cluster_controller_rest_lro_client(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cluster_controller_host_no_port(transport_name): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataproc.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cluster_controller_host_with_port(transport_name): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataproc.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dataproc.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_cluster_controller_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ClusterControllerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ClusterControllerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_cluster._session + session2 = client2.transport.create_cluster._session + assert session1 != session2 + session1 = client1.transport.update_cluster._session + session2 = client2.transport.update_cluster._session + assert session1 != session2 + session1 = client1.transport.stop_cluster._session + session2 = client2.transport.stop_cluster._session + assert session1 != session2 + session1 = client1.transport.start_cluster._session + session2 = client2.transport.start_cluster._session + assert session1 != session2 + session1 = client1.transport.delete_cluster._session + session2 = client2.transport.delete_cluster._session + assert session1 != session2 + session1 = client1.transport.get_cluster._session + session2 = client2.transport.get_cluster._session + assert session1 != session2 + session1 = client1.transport.list_clusters._session + session2 = client2.transport.list_clusters._session + assert session1 != session2 + session1 = client1.transport.diagnose_cluster._session + session2 = client2.transport.diagnose_cluster._session + assert session1 != session2 + + +def test_cluster_controller_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ClusterControllerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cluster_controller_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ClusterControllerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerGrpcAsyncIOTransport, + ], +) +def test_cluster_controller_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerGrpcAsyncIOTransport, + ], +) +def test_cluster_controller_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cluster_controller_grpc_lro_client(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cluster_controller_grpc_lro_async_client(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cluster_path(): + project = "squid" + location = "clam" + cluster = "whelk" + expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, + location=location, + cluster=cluster, + ) + actual = ClusterControllerClient.cluster_path(project, location, cluster) + assert expected == actual + + +def test_parse_cluster_path(): + expected = { + "project": "octopus", + "location": "oyster", + "cluster": "nudibranch", + } + path = ClusterControllerClient.cluster_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterControllerClient.parse_cluster_path(path) + assert expected == actual + + +def test_node_group_path(): + project = "cuttlefish" + region = "mussel" + cluster = "winkle" + node_group = "nautilus" + expected = "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format( + project=project, + region=region, + cluster=cluster, + node_group=node_group, + ) + actual = ClusterControllerClient.node_group_path( + project, region, cluster, node_group + ) + assert expected == actual + + +def test_parse_node_group_path(): + expected = { + "project": "scallop", + "region": "abalone", + "cluster": "squid", + "node_group": "clam", + } + path = ClusterControllerClient.node_group_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterControllerClient.parse_node_group_path(path) + assert expected == actual + + +def test_service_path(): + project = "whelk" + location = "octopus" + service = "oyster" + expected = "projects/{project}/locations/{location}/services/{service}".format( + project=project, + location=location, + service=service, + ) + actual = ClusterControllerClient.service_path(project, location, service) + assert expected == actual + + +def test_parse_service_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "service": "mussel", + } + path = ClusterControllerClient.service_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterControllerClient.parse_service_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ClusterControllerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = ClusterControllerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterControllerClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ClusterControllerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = ClusterControllerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterControllerClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ClusterControllerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = ClusterControllerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterControllerClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format( + project=project, + ) + actual = ClusterControllerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = ClusterControllerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterControllerClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ClusterControllerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = ClusterControllerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterControllerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ClusterControllerTransport, "_prep_wrapped_messages" + ) as prep: + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ClusterControllerTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ClusterControllerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ClusterControllerClient, transports.ClusterControllerGrpcTransport), + ( + ClusterControllerAsyncClient, + transports.ClusterControllerGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py new file mode 100644 index 000000000000..6720d7db548a --- /dev/null +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py @@ -0,0 +1,7163 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.dataproc_v1.services.job_controller import ( + JobControllerAsyncClient, + JobControllerClient, + pagers, + transports, +) +from google.cloud.dataproc_v1.types import jobs + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert JobControllerClient._get_default_mtls_endpoint(None) is None + assert ( + JobControllerClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + JobControllerClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + JobControllerClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + JobControllerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + JobControllerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (JobControllerClient, "grpc"), + (JobControllerAsyncClient, "grpc_asyncio"), + (JobControllerClient, "rest"), + ], +) +def test_job_controller_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.JobControllerGrpcTransport, "grpc"), + (transports.JobControllerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.JobControllerRestTransport, "rest"), + ], +) +def test_job_controller_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (JobControllerClient, "grpc"), + (JobControllerAsyncClient, "grpc_asyncio"), + (JobControllerClient, "rest"), + ], +) +def test_job_controller_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +def test_job_controller_client_get_transport_class(): + transport = JobControllerClient.get_transport_class() + available_transports = [ + transports.JobControllerGrpcTransport, + transports.JobControllerRestTransport, + ] + assert transport in available_transports + + transport = JobControllerClient.get_transport_class("grpc") + assert transport == transports.JobControllerGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (JobControllerClient, transports.JobControllerGrpcTransport, "grpc"), + ( + JobControllerAsyncClient, + transports.JobControllerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (JobControllerClient, transports.JobControllerRestTransport, "rest"), + ], +) +@mock.patch.object( + JobControllerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(JobControllerClient), +) +@mock.patch.object( + JobControllerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(JobControllerAsyncClient), +) +def test_job_controller_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(JobControllerClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(JobControllerClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (JobControllerClient, transports.JobControllerGrpcTransport, "grpc", "true"), + ( + JobControllerAsyncClient, + transports.JobControllerGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (JobControllerClient, transports.JobControllerGrpcTransport, "grpc", "false"), + ( + JobControllerAsyncClient, + transports.JobControllerGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (JobControllerClient, transports.JobControllerRestTransport, "rest", "true"), + (JobControllerClient, transports.JobControllerRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + JobControllerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(JobControllerClient), +) +@mock.patch.object( + JobControllerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(JobControllerAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_job_controller_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [JobControllerClient, JobControllerAsyncClient] +) +@mock.patch.object( + JobControllerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(JobControllerClient), +) +@mock.patch.object( + JobControllerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(JobControllerAsyncClient), +) +def test_job_controller_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (JobControllerClient, transports.JobControllerGrpcTransport, "grpc"), + ( + JobControllerAsyncClient, + transports.JobControllerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (JobControllerClient, transports.JobControllerRestTransport, "rest"), + ], +) +def test_job_controller_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + JobControllerClient, + transports.JobControllerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + JobControllerAsyncClient, + transports.JobControllerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (JobControllerClient, transports.JobControllerRestTransport, "rest", None), + ], +) +def test_job_controller_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_job_controller_client_client_options_from_dict(): + with mock.patch( + "google.cloud.dataproc_v1.services.job_controller.transports.JobControllerGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = JobControllerClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + JobControllerClient, + transports.JobControllerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + JobControllerAsyncClient, + transports.JobControllerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_job_controller_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.SubmitJobRequest, + dict, + ], +) +def test_submit_job(request_type, transport: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.Job( + driver_output_resource_uri="driver_output_resource_uri_value", + driver_control_files_uri="driver_control_files_uri_value", + job_uuid="job_uuid_value", + done=True, + ) + response = client.submit_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.SubmitJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.driver_output_resource_uri == "driver_output_resource_uri_value" + assert response.driver_control_files_uri == "driver_control_files_uri_value" + assert response.job_uuid == "job_uuid_value" + assert response.done is True + + +def test_submit_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_job), "__call__") as call: + client.submit_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.SubmitJobRequest() + + +@pytest.mark.asyncio +async def test_submit_job_async( + transport: str = "grpc_asyncio", request_type=jobs.SubmitJobRequest +): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + jobs.Job( + driver_output_resource_uri="driver_output_resource_uri_value", + driver_control_files_uri="driver_control_files_uri_value", + job_uuid="job_uuid_value", + done=True, + ) + ) + response = await client.submit_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.SubmitJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.driver_output_resource_uri == "driver_output_resource_uri_value" + assert response.driver_control_files_uri == "driver_control_files_uri_value" + assert response.job_uuid == "job_uuid_value" + assert response.done is True + + +@pytest.mark.asyncio +async def test_submit_job_async_from_dict(): + await test_submit_job_async(request_type=dict) + + +def test_submit_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.SubmitJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_job), "__call__") as call: + call.return_value = jobs.Job() + client.submit_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_submit_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.SubmitJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.submit_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +def test_submit_job_flattened(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.submit_job( + project_id="project_id_value", + region="region_value", + job=jobs.Job(reference=jobs.JobReference(project_id="project_id_value")), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job + mock_val = jobs.Job(reference=jobs.JobReference(project_id="project_id_value")) + assert arg == mock_val + + +def test_submit_job_flattened_error(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.submit_job( + jobs.SubmitJobRequest(), + project_id="project_id_value", + region="region_value", + job=jobs.Job(reference=jobs.JobReference(project_id="project_id_value")), + ) + + +@pytest.mark.asyncio +async def test_submit_job_flattened_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.submit_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.submit_job( + project_id="project_id_value", + region="region_value", + job=jobs.Job(reference=jobs.JobReference(project_id="project_id_value")), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job + mock_val = jobs.Job(reference=jobs.JobReference(project_id="project_id_value")) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_submit_job_flattened_error_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.submit_job( + jobs.SubmitJobRequest(), + project_id="project_id_value", + region="region_value", + job=jobs.Job(reference=jobs.JobReference(project_id="project_id_value")), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.SubmitJobRequest, + dict, + ], +) +def test_submit_job_as_operation(request_type, transport: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_job_as_operation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.submit_job_as_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.SubmitJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_submit_job_as_operation_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_job_as_operation), "__call__" + ) as call: + client.submit_job_as_operation() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.SubmitJobRequest() + + +@pytest.mark.asyncio +async def test_submit_job_as_operation_async( + transport: str = "grpc_asyncio", request_type=jobs.SubmitJobRequest +): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_job_as_operation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.submit_job_as_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.SubmitJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_submit_job_as_operation_async_from_dict(): + await test_submit_job_as_operation_async(request_type=dict) + + +def test_submit_job_as_operation_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.SubmitJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_job_as_operation), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.submit_job_as_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_submit_job_as_operation_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.SubmitJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_job_as_operation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.submit_job_as_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +def test_submit_job_as_operation_flattened(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_job_as_operation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.submit_job_as_operation( + project_id="project_id_value", + region="region_value", + job=jobs.Job(reference=jobs.JobReference(project_id="project_id_value")), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job + mock_val = jobs.Job(reference=jobs.JobReference(project_id="project_id_value")) + assert arg == mock_val + + +def test_submit_job_as_operation_flattened_error(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.submit_job_as_operation( + jobs.SubmitJobRequest(), + project_id="project_id_value", + region="region_value", + job=jobs.Job(reference=jobs.JobReference(project_id="project_id_value")), + ) + + +@pytest.mark.asyncio +async def test_submit_job_as_operation_flattened_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.submit_job_as_operation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.submit_job_as_operation( + project_id="project_id_value", + region="region_value", + job=jobs.Job(reference=jobs.JobReference(project_id="project_id_value")), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job + mock_val = jobs.Job(reference=jobs.JobReference(project_id="project_id_value")) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_submit_job_as_operation_flattened_error_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.submit_job_as_operation( + jobs.SubmitJobRequest(), + project_id="project_id_value", + region="region_value", + job=jobs.Job(reference=jobs.JobReference(project_id="project_id_value")), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.GetJobRequest, + dict, + ], +) +def test_get_job(request_type, transport: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.Job( + driver_output_resource_uri="driver_output_resource_uri_value", + driver_control_files_uri="driver_control_files_uri_value", + job_uuid="job_uuid_value", + done=True, + ) + response = client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.GetJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.driver_output_resource_uri == "driver_output_resource_uri_value" + assert response.driver_control_files_uri == "driver_control_files_uri_value" + assert response.job_uuid == "job_uuid_value" + assert response.done is True + + +def test_get_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + client.get_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.GetJobRequest() + + +@pytest.mark.asyncio +async def test_get_job_async( + transport: str = "grpc_asyncio", request_type=jobs.GetJobRequest +): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + jobs.Job( + driver_output_resource_uri="driver_output_resource_uri_value", + driver_control_files_uri="driver_control_files_uri_value", + job_uuid="job_uuid_value", + done=True, + ) + ) + response = await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.GetJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.driver_output_resource_uri == "driver_output_resource_uri_value" + assert response.driver_control_files_uri == "driver_control_files_uri_value" + assert response.job_uuid == "job_uuid_value" + assert response.done is True + + +@pytest.mark.asyncio +async def test_get_job_async_from_dict(): + await test_get_job_async(request_type=dict) + + +def test_get_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.GetJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + call.return_value = jobs.Job() + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.GetJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +def test_get_job_flattened(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" + assert arg == mock_val + + +def test_get_job_flattened_error(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + jobs.GetJobRequest(), + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + +@pytest.mark.asyncio +async def test_get_job_flattened_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_job_flattened_error_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job( + jobs.GetJobRequest(), + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.ListJobsRequest, + dict, + ], +) +def test_list_jobs(request_type, transport: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.ListJobsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + client.list_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.ListJobsRequest() + + +@pytest.mark.asyncio +async def test_list_jobs_async( + transport: str = "grpc_asyncio", request_type=jobs.ListJobsRequest +): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + jobs.ListJobsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_jobs_async_from_dict(): + await test_list_jobs_async(request_type=dict) + + +def test_list_jobs_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.ListJobsRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value = jobs.ListJobsResponse() + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_jobs_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.ListJobsRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + jobs.ListJobsResponse() + ) + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value", + ) in kw["metadata"] + + +def test_list_jobs_flattened(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.ListJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_jobs( + project_id="project_id_value", + region="region_value", + filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].filter + mock_val = "filter_value" + assert arg == mock_val + + +def test_list_jobs_flattened_error(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + jobs.ListJobsRequest(), + project_id="project_id_value", + region="region_value", + filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_list_jobs_flattened_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.ListJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + jobs.ListJobsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_jobs( + project_id="project_id_value", + region="region_value", + filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].filter + mock_val = "filter_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_jobs_flattened_error_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_jobs( + jobs.ListJobsRequest(), + project_id="project_id_value", + region="region_value", + filter="filter_value", + ) + + +def test_list_jobs_pager(transport_name: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project_id", ""), + ("region", ""), + ) + ), + ) + pager = client.list_jobs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, jobs.Job) for i in results) + + +def test_list_jobs_pages(transport_name: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + RuntimeError, + ) + pages = list(client.list_jobs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_jobs_async_pager(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_jobs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, jobs.Job) for i in responses) + + +@pytest.mark.asyncio +async def test_list_jobs_async_pages(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.UpdateJobRequest, + dict, + ], +) +def test_update_job(request_type, transport: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.Job( + driver_output_resource_uri="driver_output_resource_uri_value", + driver_control_files_uri="driver_control_files_uri_value", + job_uuid="job_uuid_value", + done=True, + ) + response = client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.UpdateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.driver_output_resource_uri == "driver_output_resource_uri_value" + assert response.driver_control_files_uri == "driver_control_files_uri_value" + assert response.job_uuid == "job_uuid_value" + assert response.done is True + + +def test_update_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + client.update_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.UpdateJobRequest() + + +@pytest.mark.asyncio +async def test_update_job_async( + transport: str = "grpc_asyncio", request_type=jobs.UpdateJobRequest +): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + jobs.Job( + driver_output_resource_uri="driver_output_resource_uri_value", + driver_control_files_uri="driver_control_files_uri_value", + job_uuid="job_uuid_value", + done=True, + ) + ) + response = await client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.UpdateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.driver_output_resource_uri == "driver_output_resource_uri_value" + assert response.driver_control_files_uri == "driver_control_files_uri_value" + assert response.job_uuid == "job_uuid_value" + assert response.done is True + + +@pytest.mark.asyncio +async def test_update_job_async_from_dict(): + await test_update_job_async(request_type=dict) + + +def test_update_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.UpdateJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + call.return_value = jobs.Job() + client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.UpdateJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.CancelJobRequest, + dict, + ], +) +def test_cancel_job(request_type, transport: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.Job( + driver_output_resource_uri="driver_output_resource_uri_value", + driver_control_files_uri="driver_control_files_uri_value", + job_uuid="job_uuid_value", + done=True, + ) + response = client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.CancelJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.driver_output_resource_uri == "driver_output_resource_uri_value" + assert response.driver_control_files_uri == "driver_control_files_uri_value" + assert response.job_uuid == "job_uuid_value" + assert response.done is True + + +def test_cancel_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + client.cancel_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.CancelJobRequest() + + +@pytest.mark.asyncio +async def test_cancel_job_async( + transport: str = "grpc_asyncio", request_type=jobs.CancelJobRequest +): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + jobs.Job( + driver_output_resource_uri="driver_output_resource_uri_value", + driver_control_files_uri="driver_control_files_uri_value", + job_uuid="job_uuid_value", + done=True, + ) + ) + response = await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.CancelJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.driver_output_resource_uri == "driver_output_resource_uri_value" + assert response.driver_control_files_uri == "driver_control_files_uri_value" + assert response.job_uuid == "job_uuid_value" + assert response.done is True + + +@pytest.mark.asyncio +async def test_cancel_job_async_from_dict(): + await test_cancel_job_async(request_type=dict) + + +def test_cancel_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.CancelJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value = jobs.Job() + client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.CancelJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +def test_cancel_job_flattened(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_job( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" + assert arg == mock_val + + +def test_cancel_job_flattened_error(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_job( + jobs.CancelJobRequest(), + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + +@pytest.mark.asyncio +async def test_cancel_job_flattened_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_job( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_cancel_job_flattened_error_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_job( + jobs.CancelJobRequest(), + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.DeleteJobRequest, + dict, + ], +) +def test_delete_job(request_type, transport: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.DeleteJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + client.delete_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.DeleteJobRequest() + + +@pytest.mark.asyncio +async def test_delete_job_async( + transport: str = "grpc_asyncio", request_type=jobs.DeleteJobRequest +): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.DeleteJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_async_from_dict(): + await test_delete_job_async(request_type=dict) + + +def test_delete_job_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.DeleteJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + call.return_value = None + client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_job_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.DeleteJobRequest() + + request.project_id = "project_id_value" + request.region = "region_value" + request.job_id = "job_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project_id=project_id_value®ion=region_value&job_id=job_id_value", + ) in kw["metadata"] + + +def test_delete_job_flattened(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" + assert arg == mock_val + + +def test_delete_job_flattened_error(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job( + jobs.DeleteJobRequest(), + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + +@pytest.mark.asyncio +async def test_delete_job_flattened_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].region + mock_val = "region_value" + assert arg == mock_val + arg = args[0].job_id + mock_val = "job_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_job_flattened_error_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job( + jobs.DeleteJobRequest(), + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.SubmitJobRequest, + dict, + ], +) +def test_submit_job_rest(request_type): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + driver_output_resource_uri="driver_output_resource_uri_value", + driver_control_files_uri="driver_control_files_uri_value", + job_uuid="job_uuid_value", + done=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.submit_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.driver_output_resource_uri == "driver_output_resource_uri_value" + assert response.driver_control_files_uri == "driver_control_files_uri_value" + assert response.job_uuid == "job_uuid_value" + assert response.done is True + + +def test_submit_job_rest_required_fields(request_type=jobs.SubmitJobRequest): + transport_class = transports.JobControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).submit_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).submit_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = jobs.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.submit_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_submit_job_rest_unset_required_fields(): + transport = transports.JobControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.submit_job._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "region", + "job", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_submit_job_rest_interceptors(null_interceptor): + transport = transports.JobControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobControllerRestInterceptor(), + ) + client = JobControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobControllerRestInterceptor, "post_submit_job" + ) as post, mock.patch.object( + transports.JobControllerRestInterceptor, "pre_submit_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.SubmitJobRequest.pb(jobs.SubmitJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = jobs.SubmitJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.submit_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_submit_job_rest_bad_request( + transport: str = "rest", request_type=jobs.SubmitJobRequest +): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.submit_job(request) + + +def test_submit_job_rest_flattened(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + region="region_value", + job=jobs.Job(reference=jobs.JobReference(project_id="project_id_value")), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.submit_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/regions/{region}/jobs:submit" + % client.transport._host, + args[1], + ) + + +def test_submit_job_rest_flattened_error(transport: str = "rest"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.submit_job( + jobs.SubmitJobRequest(), + project_id="project_id_value", + region="region_value", + job=jobs.Job(reference=jobs.JobReference(project_id="project_id_value")), + ) + + +def test_submit_job_rest_error(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.SubmitJobRequest, + dict, + ], +) +def test_submit_job_as_operation_rest(request_type): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.submit_job_as_operation(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_submit_job_as_operation_rest_required_fields( + request_type=jobs.SubmitJobRequest, +): + transport_class = transports.JobControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).submit_job_as_operation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).submit_job_as_operation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.submit_job_as_operation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_submit_job_as_operation_rest_unset_required_fields(): + transport = transports.JobControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.submit_job_as_operation._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "region", + "job", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_submit_job_as_operation_rest_interceptors(null_interceptor): + transport = transports.JobControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobControllerRestInterceptor(), + ) + client = JobControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.JobControllerRestInterceptor, "post_submit_job_as_operation" + ) as post, mock.patch.object( + transports.JobControllerRestInterceptor, "pre_submit_job_as_operation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.SubmitJobRequest.pb(jobs.SubmitJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = jobs.SubmitJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.submit_job_as_operation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_submit_job_as_operation_rest_bad_request( + transport: str = "rest", request_type=jobs.SubmitJobRequest +): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.submit_job_as_operation(request) + + +def test_submit_job_as_operation_rest_flattened(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + region="region_value", + job=jobs.Job(reference=jobs.JobReference(project_id="project_id_value")), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.submit_job_as_operation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/regions/{region}/jobs:submitAsOperation" + % client.transport._host, + args[1], + ) + + +def test_submit_job_as_operation_rest_flattened_error(transport: str = "rest"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.submit_job_as_operation( + jobs.SubmitJobRequest(), + project_id="project_id_value", + region="region_value", + job=jobs.Job(reference=jobs.JobReference(project_id="project_id_value")), + ) + + +def test_submit_job_as_operation_rest_error(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.GetJobRequest, + dict, + ], +) +def test_get_job_rest(request_type): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + driver_output_resource_uri="driver_output_resource_uri_value", + driver_control_files_uri="driver_control_files_uri_value", + job_uuid="job_uuid_value", + done=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.driver_output_resource_uri == "driver_output_resource_uri_value" + assert response.driver_control_files_uri == "driver_control_files_uri_value" + assert response.job_uuid == "job_uuid_value" + assert response.done is True + + +def test_get_job_rest_required_fields(request_type=jobs.GetJobRequest): + transport_class = transports.JobControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request_init["job_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + jsonified_request["jobId"] = "job_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "jobId" in jsonified_request + assert jsonified_request["jobId"] == "job_id_value" + + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = jobs.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_job_rest_unset_required_fields(): + transport = transports.JobControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_job._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "region", + "jobId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_rest_interceptors(null_interceptor): + transport = transports.JobControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobControllerRestInterceptor(), + ) + client = JobControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobControllerRestInterceptor, "post_get_job" + ) as post, mock.patch.object( + transports.JobControllerRestInterceptor, "pre_get_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.GetJobRequest.pb(jobs.GetJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = jobs.GetJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.get_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_rest_bad_request( + transport: str = "rest", request_type=jobs.GetJobRequest +): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job(request) + + +def test_get_job_rest_flattened(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project_id": "sample1", + "region": "sample2", + "job_id": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" + % client.transport._host, + args[1], + ) + + +def test_get_job_rest_flattened_error(transport: str = "rest"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + jobs.GetJobRequest(), + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + +def test_get_job_rest_error(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.ListJobsRequest, + dict, + ], +) +def test_list_jobs_rest(request_type): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.ListJobsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_jobs_rest_required_fields(request_type=jobs.ListJobsRequest): + transport_class = transports.JobControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "cluster_name", + "filter", + "job_state_matcher", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = jobs.ListJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = jobs.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_jobs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_jobs_rest_unset_required_fields(): + transport = transports.JobControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_jobs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "clusterName", + "filter", + "jobStateMatcher", + "pageSize", + "pageToken", + ) + ) + & set( + ( + "projectId", + "region", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_jobs_rest_interceptors(null_interceptor): + transport = transports.JobControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobControllerRestInterceptor(), + ) + client = JobControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobControllerRestInterceptor, "post_list_jobs" + ) as post, mock.patch.object( + transports.JobControllerRestInterceptor, "pre_list_jobs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.ListJobsRequest.pb(jobs.ListJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.ListJobsResponse.to_json( + jobs.ListJobsResponse() + ) + + request = jobs.ListJobsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.ListJobsResponse() + + client.list_jobs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_jobs_rest_bad_request( + transport: str = "rest", request_type=jobs.ListJobsRequest +): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_jobs(request) + + +def test_list_jobs_rest_flattened(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.ListJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + region="region_value", + filter="filter_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/regions/{region}/jobs" + % client.transport._host, + args[1], + ) + + +def test_list_jobs_rest_flattened_error(transport: str = "rest"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + jobs.ListJobsRequest(), + project_id="project_id_value", + region="region_value", + filter="filter_value", + ) + + +def test_list_jobs_rest_pager(transport: str = "rest"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token="abc", + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token="ghi", + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(jobs.ListJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project_id": "sample1", "region": "sample2"} + + pager = client.list_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, jobs.Job) for i in results) + + pages = list(client.list_jobs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.UpdateJobRequest, + dict, + ], +) +def test_update_job_rest(request_type): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2", "job_id": "sample3"} + request_init["job"] = { + "reference": {"project_id": "project_id_value", "job_id": "job_id_value"}, + "placement": { + "cluster_name": "cluster_name_value", + "cluster_uuid": "cluster_uuid_value", + "cluster_labels": {}, + }, + "hadoop_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {"driver_log_levels": {}}, + }, + "spark_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "pyspark_job": { + "main_python_file_uri": "main_python_file_uri_value", + "args": ["args_value1", "args_value2"], + "python_file_uris": ["python_file_uris_value1", "python_file_uris_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "hive_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {"queries": ["queries_value1", "queries_value2"]}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + }, + "pig_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "spark_r_job": { + "main_r_file_uri": "main_r_file_uri_value", + "args": ["args_value1", "args_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "spark_sql_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "presto_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "trino_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "status": { + "state": 1, + "details": "details_value", + "state_start_time": {"seconds": 751, "nanos": 543}, + "substate": 1, + }, + "status_history": {}, + "yarn_applications": [ + { + "name": "name_value", + "state": 1, + "progress": 0.885, + "tracking_url": "tracking_url_value", + } + ], + "driver_output_resource_uri": "driver_output_resource_uri_value", + "driver_control_files_uri": "driver_control_files_uri_value", + "labels": {}, + "scheduling": {"max_failures_per_hour": 2243, "max_failures_total": 1923}, + "job_uuid": "job_uuid_value", + "done": True, + "driver_scheduling_config": {"memory_mb": 967, "vcores": 658}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + driver_output_resource_uri="driver_output_resource_uri_value", + driver_control_files_uri="driver_control_files_uri_value", + job_uuid="job_uuid_value", + done=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.driver_output_resource_uri == "driver_output_resource_uri_value" + assert response.driver_control_files_uri == "driver_control_files_uri_value" + assert response.job_uuid == "job_uuid_value" + assert response.done is True + + +def test_update_job_rest_required_fields(request_type=jobs.UpdateJobRequest): + transport_class = transports.JobControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request_init["job_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + jsonified_request["jobId"] = "job_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_job._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "jobId" in jsonified_request + assert jsonified_request["jobId"] == "job_id_value" + + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = jobs.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_job_rest_unset_required_fields(): + transport = transports.JobControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_job._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "projectId", + "region", + "jobId", + "job", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_job_rest_interceptors(null_interceptor): + transport = transports.JobControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobControllerRestInterceptor(), + ) + client = JobControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobControllerRestInterceptor, "post_update_job" + ) as post, mock.patch.object( + transports.JobControllerRestInterceptor, "pre_update_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.UpdateJobRequest.pb(jobs.UpdateJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = jobs.UpdateJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.update_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_job_rest_bad_request( + transport: str = "rest", request_type=jobs.UpdateJobRequest +): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2", "job_id": "sample3"} + request_init["job"] = { + "reference": {"project_id": "project_id_value", "job_id": "job_id_value"}, + "placement": { + "cluster_name": "cluster_name_value", + "cluster_uuid": "cluster_uuid_value", + "cluster_labels": {}, + }, + "hadoop_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {"driver_log_levels": {}}, + }, + "spark_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "pyspark_job": { + "main_python_file_uri": "main_python_file_uri_value", + "args": ["args_value1", "args_value2"], + "python_file_uris": ["python_file_uris_value1", "python_file_uris_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "hive_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {"queries": ["queries_value1", "queries_value2"]}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + }, + "pig_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "spark_r_job": { + "main_r_file_uri": "main_r_file_uri_value", + "args": ["args_value1", "args_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "spark_sql_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "presto_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "trino_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "status": { + "state": 1, + "details": "details_value", + "state_start_time": {"seconds": 751, "nanos": 543}, + "substate": 1, + }, + "status_history": {}, + "yarn_applications": [ + { + "name": "name_value", + "state": 1, + "progress": 0.885, + "tracking_url": "tracking_url_value", + } + ], + "driver_output_resource_uri": "driver_output_resource_uri_value", + "driver_control_files_uri": "driver_control_files_uri_value", + "labels": {}, + "scheduling": {"max_failures_per_hour": 2243, "max_failures_total": 1923}, + "job_uuid": "job_uuid_value", + "done": True, + "driver_scheduling_config": {"memory_mb": 967, "vcores": 658}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_job(request) + + +def test_update_job_rest_error(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.CancelJobRequest, + dict, + ], +) +def test_cancel_job_rest(request_type): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + driver_output_resource_uri="driver_output_resource_uri_value", + driver_control_files_uri="driver_control_files_uri_value", + job_uuid="job_uuid_value", + done=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.cancel_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.driver_output_resource_uri == "driver_output_resource_uri_value" + assert response.driver_control_files_uri == "driver_control_files_uri_value" + assert response.job_uuid == "job_uuid_value" + assert response.done is True + + +def test_cancel_job_rest_required_fields(request_type=jobs.CancelJobRequest): + transport_class = transports.JobControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request_init["job_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + jsonified_request["jobId"] = "job_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "jobId" in jsonified_request + assert jsonified_request["jobId"] == "job_id_value" + + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = jobs.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_cancel_job_rest_unset_required_fields(): + transport = transports.JobControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.cancel_job._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "region", + "jobId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_job_rest_interceptors(null_interceptor): + transport = transports.JobControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobControllerRestInterceptor(), + ) + client = JobControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobControllerRestInterceptor, "post_cancel_job" + ) as post, mock.patch.object( + transports.JobControllerRestInterceptor, "pre_cancel_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.CancelJobRequest.pb(jobs.CancelJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = jobs.CancelJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.cancel_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_cancel_job_rest_bad_request( + transport: str = "rest", request_type=jobs.CancelJobRequest +): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_job(request) + + +def test_cancel_job_rest_flattened(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project_id": "sample1", + "region": "sample2", + "job_id": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.cancel_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel" + % client.transport._host, + args[1], + ) + + +def test_cancel_job_rest_flattened_error(transport: str = "rest"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_job( + jobs.CancelJobRequest(), + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + +def test_cancel_job_rest_error(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + jobs.DeleteJobRequest, + dict, + ], +) +def test_delete_job_rest(request_type): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_rest_required_fields(request_type=jobs.DeleteJobRequest): + transport_class = transports.JobControllerRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["region"] = "" + request_init["job_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["region"] = "region_value" + jsonified_request["jobId"] = "job_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "jobId" in jsonified_request + assert jsonified_request["jobId"] == "job_id_value" + + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_job_rest_unset_required_fields(): + transport = transports.JobControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_job._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "region", + "jobId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_job_rest_interceptors(null_interceptor): + transport = transports.JobControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.JobControllerRestInterceptor(), + ) + client = JobControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.JobControllerRestInterceptor, "pre_delete_job" + ) as pre: + pre.assert_not_called() + pb_message = jobs.DeleteJobRequest.pb(jobs.DeleteJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = jobs.DeleteJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_job_rest_bad_request( + transport: str = "rest", request_type=jobs.DeleteJobRequest +): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "region": "sample2", "job_id": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_job(request) + + +def test_delete_job_rest_flattened(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "project_id": "sample1", + "region": "sample2", + "job_id": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" + % client.transport._host, + args[1], + ) + + +def test_delete_job_rest_flattened_error(transport: str = "rest"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job( + jobs.DeleteJobRequest(), + project_id="project_id_value", + region="region_value", + job_id="job_id_value", + ) + + +def test_delete_job_rest_error(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.JobControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.JobControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = JobControllerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.JobControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = JobControllerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = JobControllerClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.JobControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = JobControllerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.JobControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = JobControllerClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.JobControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.JobControllerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobControllerGrpcTransport, + transports.JobControllerGrpcAsyncIOTransport, + transports.JobControllerRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = JobControllerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.JobControllerGrpcTransport, + ) + + +def test_job_controller_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.JobControllerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_job_controller_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.dataproc_v1.services.job_controller.transports.JobControllerTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.JobControllerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "submit_job", + "submit_job_as_operation", + "get_job", + "list_jobs", + "update_job", + "cancel_job", + "delete_job", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_job_controller_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataproc_v1.services.job_controller.transports.JobControllerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.JobControllerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_job_controller_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.dataproc_v1.services.job_controller.transports.JobControllerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.JobControllerTransport() + adc.assert_called_once() + + +def test_job_controller_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + JobControllerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobControllerGrpcTransport, + transports.JobControllerGrpcAsyncIOTransport, + ], +) +def test_job_controller_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobControllerGrpcTransport, + transports.JobControllerGrpcAsyncIOTransport, + transports.JobControllerRestTransport, + ], +) +def test_job_controller_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.JobControllerGrpcTransport, grpc_helpers), + (transports.JobControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_job_controller_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobControllerGrpcTransport, + transports.JobControllerGrpcAsyncIOTransport, + ], +) +def test_job_controller_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_job_controller_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.JobControllerRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_job_controller_rest_lro_client(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_job_controller_host_no_port(transport_name): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataproc.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_job_controller_host_with_port(transport_name): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataproc.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dataproc.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_job_controller_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = JobControllerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = JobControllerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.submit_job._session + session2 = client2.transport.submit_job._session + assert session1 != session2 + session1 = client1.transport.submit_job_as_operation._session + session2 = client2.transport.submit_job_as_operation._session + assert session1 != session2 + session1 = client1.transport.get_job._session + session2 = client2.transport.get_job._session + assert session1 != session2 + session1 = client1.transport.list_jobs._session + session2 = client2.transport.list_jobs._session + assert session1 != session2 + session1 = client1.transport.update_job._session + session2 = client2.transport.update_job._session + assert session1 != session2 + session1 = client1.transport.cancel_job._session + session2 = client2.transport.cancel_job._session + assert session1 != session2 + session1 = client1.transport.delete_job._session + session2 = client2.transport.delete_job._session + assert session1 != session2 + + +def test_job_controller_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.JobControllerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_job_controller_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.JobControllerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobControllerGrpcTransport, + transports.JobControllerGrpcAsyncIOTransport, + ], +) +def test_job_controller_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobControllerGrpcTransport, + transports.JobControllerGrpcAsyncIOTransport, + ], +) +def test_job_controller_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_job_controller_grpc_lro_client(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_job_controller_grpc_lro_async_client(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = JobControllerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = JobControllerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = JobControllerClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = JobControllerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = JobControllerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = JobControllerClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = JobControllerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = JobControllerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = JobControllerClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = JobControllerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = JobControllerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = JobControllerClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = JobControllerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = JobControllerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = JobControllerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.JobControllerTransport, "_prep_wrapped_messages" + ) as prep: + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.JobControllerTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = JobControllerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = JobControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (JobControllerClient, transports.JobControllerGrpcTransport), + (JobControllerAsyncClient, transports.JobControllerGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py new file mode 100644 index 000000000000..de23658bbe65 --- /dev/null +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py @@ -0,0 +1,4781 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.dataproc_v1.services.node_group_controller import ( + NodeGroupControllerAsyncClient, + NodeGroupControllerClient, + transports, +) +from google.cloud.dataproc_v1.types import clusters, node_groups, operations + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert NodeGroupControllerClient._get_default_mtls_endpoint(None) is None + assert ( + NodeGroupControllerClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + NodeGroupControllerClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + NodeGroupControllerClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + NodeGroupControllerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + NodeGroupControllerClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (NodeGroupControllerClient, "grpc"), + (NodeGroupControllerAsyncClient, "grpc_asyncio"), + (NodeGroupControllerClient, "rest"), + ], +) +def test_node_group_controller_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.NodeGroupControllerGrpcTransport, "grpc"), + (transports.NodeGroupControllerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.NodeGroupControllerRestTransport, "rest"), + ], +) +def test_node_group_controller_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (NodeGroupControllerClient, "grpc"), + (NodeGroupControllerAsyncClient, "grpc_asyncio"), + (NodeGroupControllerClient, "rest"), + ], +) +def test_node_group_controller_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +def test_node_group_controller_client_get_transport_class(): + transport = NodeGroupControllerClient.get_transport_class() + available_transports = [ + transports.NodeGroupControllerGrpcTransport, + transports.NodeGroupControllerRestTransport, + ] + assert transport in available_transports + + transport = NodeGroupControllerClient.get_transport_class("grpc") + assert transport == transports.NodeGroupControllerGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + NodeGroupControllerClient, + transports.NodeGroupControllerGrpcTransport, + "grpc", + ), + ( + NodeGroupControllerAsyncClient, + transports.NodeGroupControllerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + NodeGroupControllerClient, + transports.NodeGroupControllerRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + NodeGroupControllerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(NodeGroupControllerClient), +) +@mock.patch.object( + NodeGroupControllerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(NodeGroupControllerAsyncClient), +) +def test_node_group_controller_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(NodeGroupControllerClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(NodeGroupControllerClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + NodeGroupControllerClient, + transports.NodeGroupControllerGrpcTransport, + "grpc", + "true", + ), + ( + NodeGroupControllerAsyncClient, + transports.NodeGroupControllerGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + NodeGroupControllerClient, + transports.NodeGroupControllerGrpcTransport, + "grpc", + "false", + ), + ( + NodeGroupControllerAsyncClient, + transports.NodeGroupControllerGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + NodeGroupControllerClient, + transports.NodeGroupControllerRestTransport, + "rest", + "true", + ), + ( + NodeGroupControllerClient, + transports.NodeGroupControllerRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + NodeGroupControllerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(NodeGroupControllerClient), +) +@mock.patch.object( + NodeGroupControllerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(NodeGroupControllerAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_node_group_controller_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [NodeGroupControllerClient, NodeGroupControllerAsyncClient] +) +@mock.patch.object( + NodeGroupControllerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(NodeGroupControllerClient), +) +@mock.patch.object( + NodeGroupControllerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(NodeGroupControllerAsyncClient), +) +def test_node_group_controller_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + NodeGroupControllerClient, + transports.NodeGroupControllerGrpcTransport, + "grpc", + ), + ( + NodeGroupControllerAsyncClient, + transports.NodeGroupControllerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + NodeGroupControllerClient, + transports.NodeGroupControllerRestTransport, + "rest", + ), + ], +) +def test_node_group_controller_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + NodeGroupControllerClient, + transports.NodeGroupControllerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + NodeGroupControllerAsyncClient, + transports.NodeGroupControllerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + NodeGroupControllerClient, + transports.NodeGroupControllerRestTransport, + "rest", + None, + ), + ], +) +def test_node_group_controller_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_node_group_controller_client_client_options_from_dict(): + with mock.patch( + "google.cloud.dataproc_v1.services.node_group_controller.transports.NodeGroupControllerGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = NodeGroupControllerClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + NodeGroupControllerClient, + transports.NodeGroupControllerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + NodeGroupControllerAsyncClient, + transports.NodeGroupControllerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_node_group_controller_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + node_groups.CreateNodeGroupRequest, + dict, + ], +) +def test_create_node_group(request_type, transport: str = "grpc"): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_node_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_node_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == node_groups.CreateNodeGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_node_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_node_group), "__call__" + ) as call: + client.create_node_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == node_groups.CreateNodeGroupRequest() + + +@pytest.mark.asyncio +async def test_create_node_group_async( + transport: str = "grpc_asyncio", request_type=node_groups.CreateNodeGroupRequest +): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_node_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_node_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == node_groups.CreateNodeGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_node_group_async_from_dict(): + await test_create_node_group_async(request_type=dict) + + +def test_create_node_group_field_headers(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = node_groups.CreateNodeGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_node_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_node_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_node_group_field_headers_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = node_groups.CreateNodeGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_node_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_node_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_node_group_flattened(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_node_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_node_group( + parent="parent_value", + node_group=clusters.NodeGroup(name="name_value"), + node_group_id="node_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].node_group + mock_val = clusters.NodeGroup(name="name_value") + assert arg == mock_val + arg = args[0].node_group_id + mock_val = "node_group_id_value" + assert arg == mock_val + + +def test_create_node_group_flattened_error(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_node_group( + node_groups.CreateNodeGroupRequest(), + parent="parent_value", + node_group=clusters.NodeGroup(name="name_value"), + node_group_id="node_group_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_node_group_flattened_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_node_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_node_group( + parent="parent_value", + node_group=clusters.NodeGroup(name="name_value"), + node_group_id="node_group_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].node_group + mock_val = clusters.NodeGroup(name="name_value") + assert arg == mock_val + arg = args[0].node_group_id + mock_val = "node_group_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_node_group_flattened_error_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_node_group( + node_groups.CreateNodeGroupRequest(), + parent="parent_value", + node_group=clusters.NodeGroup(name="name_value"), + node_group_id="node_group_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + node_groups.ResizeNodeGroupRequest, + dict, + ], +) +def test_resize_node_group(request_type, transport: str = "grpc"): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resize_node_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.resize_node_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == node_groups.ResizeNodeGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_resize_node_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resize_node_group), "__call__" + ) as call: + client.resize_node_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == node_groups.ResizeNodeGroupRequest() + + +@pytest.mark.asyncio +async def test_resize_node_group_async( + transport: str = "grpc_asyncio", request_type=node_groups.ResizeNodeGroupRequest +): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resize_node_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.resize_node_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == node_groups.ResizeNodeGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_resize_node_group_async_from_dict(): + await test_resize_node_group_async(request_type=dict) + + +def test_resize_node_group_field_headers(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = node_groups.ResizeNodeGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resize_node_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.resize_node_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_resize_node_group_field_headers_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = node_groups.ResizeNodeGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resize_node_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.resize_node_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_resize_node_group_flattened(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resize_node_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resize_node_group( + name="name_value", + size=443, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].size + mock_val = 443 + assert arg == mock_val + + +def test_resize_node_group_flattened_error(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resize_node_group( + node_groups.ResizeNodeGroupRequest(), + name="name_value", + size=443, + ) + + +@pytest.mark.asyncio +async def test_resize_node_group_flattened_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resize_node_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resize_node_group( + name="name_value", + size=443, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].size + mock_val = 443 + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_resize_node_group_flattened_error_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resize_node_group( + node_groups.ResizeNodeGroupRequest(), + name="name_value", + size=443, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + node_groups.GetNodeGroupRequest, + dict, + ], +) +def test_get_node_group(request_type, transport: str = "grpc"): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clusters.NodeGroup( + name="name_value", + roles=[clusters.NodeGroup.Role.DRIVER], + ) + response = client.get_node_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == node_groups.GetNodeGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clusters.NodeGroup) + assert response.name == "name_value" + assert response.roles == [clusters.NodeGroup.Role.DRIVER] + + +def test_get_node_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_group), "__call__") as call: + client.get_node_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == node_groups.GetNodeGroupRequest() + + +@pytest.mark.asyncio +async def test_get_node_group_async( + transport: str = "grpc_asyncio", request_type=node_groups.GetNodeGroupRequest +): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clusters.NodeGroup( + name="name_value", + roles=[clusters.NodeGroup.Role.DRIVER], + ) + ) + response = await client.get_node_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == node_groups.GetNodeGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clusters.NodeGroup) + assert response.name == "name_value" + assert response.roles == [clusters.NodeGroup.Role.DRIVER] + + +@pytest.mark.asyncio +async def test_get_node_group_async_from_dict(): + await test_get_node_group_async(request_type=dict) + + +def test_get_node_group_field_headers(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = node_groups.GetNodeGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_group), "__call__") as call: + call.return_value = clusters.NodeGroup() + client.get_node_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_node_group_field_headers_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = node_groups.GetNodeGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_group), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clusters.NodeGroup()) + await client.get_node_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_node_group_flattened(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clusters.NodeGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_node_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_node_group_flattened_error(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_node_group( + node_groups.GetNodeGroupRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_node_group_flattened_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_node_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clusters.NodeGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clusters.NodeGroup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_node_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_node_group_flattened_error_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_node_group( + node_groups.GetNodeGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + node_groups.CreateNodeGroupRequest, + dict, + ], +) +def test_create_node_group_rest(request_type): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/regions/sample2/clusters/sample3"} + request_init["node_group"] = { + "name": "name_value", + "roles": [1], + "node_group_config": { + "num_instances": 1399, + "instance_names": ["instance_names_value1", "instance_names_value2"], + "instance_references": [ + { + "instance_name": "instance_name_value", + "instance_id": "instance_id_value", + "public_key": "public_key_value", + "public_ecies_key": "public_ecies_key_value", + } + ], + "image_uri": "image_uri_value", + "machine_type_uri": "machine_type_uri_value", + "disk_config": { + "boot_disk_type": "boot_disk_type_value", + "boot_disk_size_gb": 1792, + "num_local_ssds": 1494, + "local_ssd_interface": "local_ssd_interface_value", + }, + "is_preemptible": True, + "preemptibility": 1, + "managed_group_config": { + "instance_template_name": "instance_template_name_value", + "instance_group_manager_name": "instance_group_manager_name_value", + "instance_group_manager_uri": "instance_group_manager_uri_value", + }, + "accelerators": [ + { + "accelerator_type_uri": "accelerator_type_uri_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "min_num_instances": 1818, + "instance_flexibility_policy": { + "instance_selection_list": [ + { + "machine_types": [ + "machine_types_value1", + "machine_types_value2", + ], + "rank": 428, + } + ], + "instance_selection_results": [ + {"machine_type": "machine_type_value", "vm_count": 875} + ], + }, + }, + "labels": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_node_group(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_node_group_rest_required_fields( + request_type=node_groups.CreateNodeGroupRequest, +): + transport_class = transports.NodeGroupControllerRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_node_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_node_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "node_group_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_node_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_node_group_rest_unset_required_fields(): + transport = transports.NodeGroupControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_node_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "nodeGroupId", + "requestId", + ) + ) + & set( + ( + "parent", + "nodeGroup", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_node_group_rest_interceptors(null_interceptor): + transport = transports.NodeGroupControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NodeGroupControllerRestInterceptor(), + ) + client = NodeGroupControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NodeGroupControllerRestInterceptor, "post_create_node_group" + ) as post, mock.patch.object( + transports.NodeGroupControllerRestInterceptor, "pre_create_node_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = node_groups.CreateNodeGroupRequest.pb( + node_groups.CreateNodeGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = node_groups.CreateNodeGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_node_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_node_group_rest_bad_request( + transport: str = "rest", request_type=node_groups.CreateNodeGroupRequest +): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/regions/sample2/clusters/sample3"} + request_init["node_group"] = { + "name": "name_value", + "roles": [1], + "node_group_config": { + "num_instances": 1399, + "instance_names": ["instance_names_value1", "instance_names_value2"], + "instance_references": [ + { + "instance_name": "instance_name_value", + "instance_id": "instance_id_value", + "public_key": "public_key_value", + "public_ecies_key": "public_ecies_key_value", + } + ], + "image_uri": "image_uri_value", + "machine_type_uri": "machine_type_uri_value", + "disk_config": { + "boot_disk_type": "boot_disk_type_value", + "boot_disk_size_gb": 1792, + "num_local_ssds": 1494, + "local_ssd_interface": "local_ssd_interface_value", + }, + "is_preemptible": True, + "preemptibility": 1, + "managed_group_config": { + "instance_template_name": "instance_template_name_value", + "instance_group_manager_name": "instance_group_manager_name_value", + "instance_group_manager_uri": "instance_group_manager_uri_value", + }, + "accelerators": [ + { + "accelerator_type_uri": "accelerator_type_uri_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "min_num_instances": 1818, + "instance_flexibility_policy": { + "instance_selection_list": [ + { + "machine_types": [ + "machine_types_value1", + "machine_types_value2", + ], + "rank": 428, + } + ], + "instance_selection_results": [ + {"machine_type": "machine_type_value", "vm_count": 875} + ], + }, + }, + "labels": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_node_group(request) + + +def test_create_node_group_rest_flattened(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/regions/sample2/clusters/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + node_group=clusters.NodeGroup(name="name_value"), + node_group_id="node_group_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_node_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/regions/*/clusters/*}/nodeGroups" + % client.transport._host, + args[1], + ) + + +def test_create_node_group_rest_flattened_error(transport: str = "rest"): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_node_group( + node_groups.CreateNodeGroupRequest(), + parent="parent_value", + node_group=clusters.NodeGroup(name="name_value"), + node_group_id="node_group_id_value", + ) + + +def test_create_node_group_rest_error(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + node_groups.ResizeNodeGroupRequest, + dict, + ], +) +def test_resize_node_group_rest(request_type): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/regions/sample2/clusters/sample3/nodeGroups/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.resize_node_group(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_resize_node_group_rest_required_fields( + request_type=node_groups.ResizeNodeGroupRequest, +): + transport_class = transports.NodeGroupControllerRestTransport + + request_init = {} + request_init["name"] = "" + request_init["size"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resize_node_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["size"] = 443 + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resize_node_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "size" in jsonified_request + assert jsonified_request["size"] == 443 + + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.resize_node_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_resize_node_group_rest_unset_required_fields(): + transport = transports.NodeGroupControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.resize_node_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "size", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resize_node_group_rest_interceptors(null_interceptor): + transport = transports.NodeGroupControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NodeGroupControllerRestInterceptor(), + ) + client = NodeGroupControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.NodeGroupControllerRestInterceptor, "post_resize_node_group" + ) as post, mock.patch.object( + transports.NodeGroupControllerRestInterceptor, "pre_resize_node_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = node_groups.ResizeNodeGroupRequest.pb( + node_groups.ResizeNodeGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = node_groups.ResizeNodeGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.resize_node_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resize_node_group_rest_bad_request( + transport: str = "rest", request_type=node_groups.ResizeNodeGroupRequest +): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/regions/sample2/clusters/sample3/nodeGroups/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize_node_group(request) + + +def test_resize_node_group_rest_flattened(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/regions/sample2/clusters/sample3/nodeGroups/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + size=443, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.resize_node_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/regions/*/clusters/*/nodeGroups/*}:resize" + % client.transport._host, + args[1], + ) + + +def test_resize_node_group_rest_flattened_error(transport: str = "rest"): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resize_node_group( + node_groups.ResizeNodeGroupRequest(), + name="name_value", + size=443, + ) + + +def test_resize_node_group_rest_error(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + node_groups.GetNodeGroupRequest, + dict, + ], +) +def test_get_node_group_rest(request_type): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/regions/sample2/clusters/sample3/nodeGroups/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = clusters.NodeGroup( + name="name_value", + roles=[clusters.NodeGroup.Role.DRIVER], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = clusters.NodeGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_node_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, clusters.NodeGroup) + assert response.name == "name_value" + assert response.roles == [clusters.NodeGroup.Role.DRIVER] + + +def test_get_node_group_rest_required_fields( + request_type=node_groups.GetNodeGroupRequest, +): + transport_class = transports.NodeGroupControllerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_node_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_node_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = clusters.NodeGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = clusters.NodeGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_node_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_node_group_rest_unset_required_fields(): + transport = transports.NodeGroupControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_node_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_node_group_rest_interceptors(null_interceptor): + transport = transports.NodeGroupControllerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NodeGroupControllerRestInterceptor(), + ) + client = NodeGroupControllerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NodeGroupControllerRestInterceptor, "post_get_node_group" + ) as post, mock.patch.object( + transports.NodeGroupControllerRestInterceptor, "pre_get_node_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = node_groups.GetNodeGroupRequest.pb( + node_groups.GetNodeGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = clusters.NodeGroup.to_json(clusters.NodeGroup()) + + request = node_groups.GetNodeGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = clusters.NodeGroup() + + client.get_node_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_node_group_rest_bad_request( + transport: str = "rest", request_type=node_groups.GetNodeGroupRequest +): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/regions/sample2/clusters/sample3/nodeGroups/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_node_group(request) + + +def test_get_node_group_rest_flattened(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = clusters.NodeGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/regions/sample2/clusters/sample3/nodeGroups/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = clusters.NodeGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_node_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/regions/*/clusters/*/nodeGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_node_group_rest_flattened_error(transport: str = "rest"): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_node_group( + node_groups.GetNodeGroupRequest(), + name="name_value", + ) + + +def test_get_node_group_rest_error(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.NodeGroupControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.NodeGroupControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NodeGroupControllerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.NodeGroupControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NodeGroupControllerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = NodeGroupControllerClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.NodeGroupControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = NodeGroupControllerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.NodeGroupControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = NodeGroupControllerClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.NodeGroupControllerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.NodeGroupControllerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.NodeGroupControllerGrpcTransport, + transports.NodeGroupControllerGrpcAsyncIOTransport, + transports.NodeGroupControllerRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = NodeGroupControllerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.NodeGroupControllerGrpcTransport, + ) + + +def test_node_group_controller_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.NodeGroupControllerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_node_group_controller_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.dataproc_v1.services.node_group_controller.transports.NodeGroupControllerTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.NodeGroupControllerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_node_group", + "resize_node_group", + "get_node_group", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_node_group_controller_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataproc_v1.services.node_group_controller.transports.NodeGroupControllerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NodeGroupControllerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_node_group_controller_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.dataproc_v1.services.node_group_controller.transports.NodeGroupControllerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.NodeGroupControllerTransport() + adc.assert_called_once() + + +def test_node_group_controller_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + NodeGroupControllerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.NodeGroupControllerGrpcTransport, + transports.NodeGroupControllerGrpcAsyncIOTransport, + ], +) +def test_node_group_controller_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.NodeGroupControllerGrpcTransport, + transports.NodeGroupControllerGrpcAsyncIOTransport, + transports.NodeGroupControllerRestTransport, + ], +) +def test_node_group_controller_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.NodeGroupControllerGrpcTransport, grpc_helpers), + (transports.NodeGroupControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_node_group_controller_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.NodeGroupControllerGrpcTransport, + transports.NodeGroupControllerGrpcAsyncIOTransport, + ], +) +def test_node_group_controller_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_node_group_controller_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.NodeGroupControllerRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_node_group_controller_rest_lro_client(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_node_group_controller_host_no_port(transport_name): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataproc.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_node_group_controller_host_with_port(transport_name): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataproc.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dataproc.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_node_group_controller_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = NodeGroupControllerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = NodeGroupControllerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_node_group._session + session2 = client2.transport.create_node_group._session + assert session1 != session2 + session1 = client1.transport.resize_node_group._session + session2 = client2.transport.resize_node_group._session + assert session1 != session2 + session1 = client1.transport.get_node_group._session + session2 = client2.transport.get_node_group._session + assert session1 != session2 + + +def test_node_group_controller_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.NodeGroupControllerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_node_group_controller_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.NodeGroupControllerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.NodeGroupControllerGrpcTransport, + transports.NodeGroupControllerGrpcAsyncIOTransport, + ], +) +def test_node_group_controller_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.NodeGroupControllerGrpcTransport, + transports.NodeGroupControllerGrpcAsyncIOTransport, + ], +) +def test_node_group_controller_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_node_group_controller_grpc_lro_client(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_node_group_controller_grpc_lro_async_client(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_node_group_path(): + project = "squid" + region = "clam" + cluster = "whelk" + node_group = "octopus" + expected = "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format( + project=project, + region=region, + cluster=cluster, + node_group=node_group, + ) + actual = NodeGroupControllerClient.node_group_path( + project, region, cluster, node_group + ) + assert expected == actual + + +def test_parse_node_group_path(): + expected = { + "project": "oyster", + "region": "nudibranch", + "cluster": "cuttlefish", + "node_group": "mussel", + } + path = NodeGroupControllerClient.node_group_path(**expected) + + # Check that the path construction is reversible. + actual = NodeGroupControllerClient.parse_node_group_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = NodeGroupControllerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = NodeGroupControllerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = NodeGroupControllerClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = NodeGroupControllerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = NodeGroupControllerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = NodeGroupControllerClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = NodeGroupControllerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = NodeGroupControllerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = NodeGroupControllerClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format( + project=project, + ) + actual = NodeGroupControllerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = NodeGroupControllerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = NodeGroupControllerClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = NodeGroupControllerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = NodeGroupControllerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = NodeGroupControllerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.NodeGroupControllerTransport, "_prep_wrapped_messages" + ) as prep: + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.NodeGroupControllerTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = NodeGroupControllerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = NodeGroupControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = NodeGroupControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (NodeGroupControllerClient, transports.NodeGroupControllerGrpcTransport), + ( + NodeGroupControllerAsyncClient, + transports.NodeGroupControllerGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py new file mode 100644 index 000000000000..ccf45729ce71 --- /dev/null +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py @@ -0,0 +1,8791 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.dataproc_v1.services.workflow_template_service import ( + WorkflowTemplateServiceAsyncClient, + WorkflowTemplateServiceClient, + pagers, + transports, +) +from google.cloud.dataproc_v1.types import clusters, jobs, shared, workflow_templates + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert WorkflowTemplateServiceClient._get_default_mtls_endpoint(None) is None + assert ( + WorkflowTemplateServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + WorkflowTemplateServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + WorkflowTemplateServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + WorkflowTemplateServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + WorkflowTemplateServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (WorkflowTemplateServiceClient, "grpc"), + (WorkflowTemplateServiceAsyncClient, "grpc_asyncio"), + (WorkflowTemplateServiceClient, "rest"), + ], +) +def test_workflow_template_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.WorkflowTemplateServiceGrpcTransport, "grpc"), + (transports.WorkflowTemplateServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.WorkflowTemplateServiceRestTransport, "rest"), + ], +) +def test_workflow_template_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (WorkflowTemplateServiceClient, "grpc"), + (WorkflowTemplateServiceAsyncClient, "grpc_asyncio"), + (WorkflowTemplateServiceClient, "rest"), + ], +) +def test_workflow_template_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +def test_workflow_template_service_client_get_transport_class(): + transport = WorkflowTemplateServiceClient.get_transport_class() + available_transports = [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceRestTransport, + ] + assert transport in available_transports + + transport = WorkflowTemplateServiceClient.get_transport_class("grpc") + assert transport == transports.WorkflowTemplateServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + WorkflowTemplateServiceClient, + transports.WorkflowTemplateServiceGrpcTransport, + "grpc", + ), + ( + WorkflowTemplateServiceAsyncClient, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + WorkflowTemplateServiceClient, + transports.WorkflowTemplateServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + WorkflowTemplateServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(WorkflowTemplateServiceClient), +) +@mock.patch.object( + WorkflowTemplateServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(WorkflowTemplateServiceAsyncClient), +) +def test_workflow_template_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(WorkflowTemplateServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(WorkflowTemplateServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + WorkflowTemplateServiceClient, + transports.WorkflowTemplateServiceGrpcTransport, + "grpc", + "true", + ), + ( + WorkflowTemplateServiceAsyncClient, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + WorkflowTemplateServiceClient, + transports.WorkflowTemplateServiceGrpcTransport, + "grpc", + "false", + ), + ( + WorkflowTemplateServiceAsyncClient, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + WorkflowTemplateServiceClient, + transports.WorkflowTemplateServiceRestTransport, + "rest", + "true", + ), + ( + WorkflowTemplateServiceClient, + transports.WorkflowTemplateServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + WorkflowTemplateServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(WorkflowTemplateServiceClient), +) +@mock.patch.object( + WorkflowTemplateServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(WorkflowTemplateServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_workflow_template_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [WorkflowTemplateServiceClient, WorkflowTemplateServiceAsyncClient] +) +@mock.patch.object( + WorkflowTemplateServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(WorkflowTemplateServiceClient), +) +@mock.patch.object( + WorkflowTemplateServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(WorkflowTemplateServiceAsyncClient), +) +def test_workflow_template_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + WorkflowTemplateServiceClient, + transports.WorkflowTemplateServiceGrpcTransport, + "grpc", + ), + ( + WorkflowTemplateServiceAsyncClient, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + WorkflowTemplateServiceClient, + transports.WorkflowTemplateServiceRestTransport, + "rest", + ), + ], +) +def test_workflow_template_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + WorkflowTemplateServiceClient, + transports.WorkflowTemplateServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + WorkflowTemplateServiceAsyncClient, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + WorkflowTemplateServiceClient, + transports.WorkflowTemplateServiceRestTransport, + "rest", + None, + ), + ], +) +def test_workflow_template_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_workflow_template_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.dataproc_v1.services.workflow_template_service.transports.WorkflowTemplateServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = WorkflowTemplateServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + WorkflowTemplateServiceClient, + transports.WorkflowTemplateServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + WorkflowTemplateServiceAsyncClient, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_workflow_template_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workflow_templates.CreateWorkflowTemplateRequest, + dict, + ], +) +def test_create_workflow_template(request_type, transport: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workflow_templates.WorkflowTemplate( + id="id_value", + name="name_value", + version=774, + ) + response = client.create_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.CreateWorkflowTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workflow_templates.WorkflowTemplate) + assert response.id == "id_value" + assert response.name == "name_value" + assert response.version == 774 + + +def test_create_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workflow_template), "__call__" + ) as call: + client.create_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.CreateWorkflowTemplateRequest() + + +@pytest.mark.asyncio +async def test_create_workflow_template_async( + transport: str = "grpc_asyncio", + request_type=workflow_templates.CreateWorkflowTemplateRequest, +): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflow_templates.WorkflowTemplate( + id="id_value", + name="name_value", + version=774, + ) + ) + response = await client.create_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.CreateWorkflowTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workflow_templates.WorkflowTemplate) + assert response.id == "id_value" + assert response.name == "name_value" + assert response.version == 774 + + +@pytest.mark.asyncio +async def test_create_workflow_template_async_from_dict(): + await test_create_workflow_template_async(request_type=dict) + + +def test_create_workflow_template_field_headers(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflow_templates.CreateWorkflowTemplateRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workflow_template), "__call__" + ) as call: + call.return_value = workflow_templates.WorkflowTemplate() + client.create_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_workflow_template_field_headers_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflow_templates.CreateWorkflowTemplateRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workflow_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflow_templates.WorkflowTemplate() + ) + await client.create_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_workflow_template_flattened(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workflow_templates.WorkflowTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_workflow_template( + parent="parent_value", + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].template + mock_val = workflow_templates.WorkflowTemplate(id="id_value") + assert arg == mock_val + + +def test_create_workflow_template_flattened_error(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workflow_template( + workflow_templates.CreateWorkflowTemplateRequest(), + parent="parent_value", + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + +@pytest.mark.asyncio +async def test_create_workflow_template_flattened_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workflow_templates.WorkflowTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflow_templates.WorkflowTemplate() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_workflow_template( + parent="parent_value", + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].template + mock_val = workflow_templates.WorkflowTemplate(id="id_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_workflow_template_flattened_error_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_workflow_template( + workflow_templates.CreateWorkflowTemplateRequest(), + parent="parent_value", + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workflow_templates.GetWorkflowTemplateRequest, + dict, + ], +) +def test_get_workflow_template(request_type, transport: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workflow_templates.WorkflowTemplate( + id="id_value", + name="name_value", + version=774, + ) + response = client.get_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.GetWorkflowTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workflow_templates.WorkflowTemplate) + assert response.id == "id_value" + assert response.name == "name_value" + assert response.version == 774 + + +def test_get_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workflow_template), "__call__" + ) as call: + client.get_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.GetWorkflowTemplateRequest() + + +@pytest.mark.asyncio +async def test_get_workflow_template_async( + transport: str = "grpc_asyncio", + request_type=workflow_templates.GetWorkflowTemplateRequest, +): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflow_templates.WorkflowTemplate( + id="id_value", + name="name_value", + version=774, + ) + ) + response = await client.get_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.GetWorkflowTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workflow_templates.WorkflowTemplate) + assert response.id == "id_value" + assert response.name == "name_value" + assert response.version == 774 + + +@pytest.mark.asyncio +async def test_get_workflow_template_async_from_dict(): + await test_get_workflow_template_async(request_type=dict) + + +def test_get_workflow_template_field_headers(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflow_templates.GetWorkflowTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workflow_template), "__call__" + ) as call: + call.return_value = workflow_templates.WorkflowTemplate() + client.get_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_workflow_template_field_headers_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflow_templates.GetWorkflowTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workflow_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflow_templates.WorkflowTemplate() + ) + await client.get_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_workflow_template_flattened(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workflow_templates.WorkflowTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_workflow_template( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_workflow_template_flattened_error(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workflow_template( + workflow_templates.GetWorkflowTemplateRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_workflow_template_flattened_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workflow_templates.WorkflowTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflow_templates.WorkflowTemplate() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_workflow_template( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_workflow_template_flattened_error_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_workflow_template( + workflow_templates.GetWorkflowTemplateRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workflow_templates.InstantiateWorkflowTemplateRequest, + dict, + ], +) +def test_instantiate_workflow_template(request_type, transport: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.instantiate_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.InstantiateWorkflowTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_instantiate_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_workflow_template), "__call__" + ) as call: + client.instantiate_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.InstantiateWorkflowTemplateRequest() + + +@pytest.mark.asyncio +async def test_instantiate_workflow_template_async( + transport: str = "grpc_asyncio", + request_type=workflow_templates.InstantiateWorkflowTemplateRequest, +): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.instantiate_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.InstantiateWorkflowTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_instantiate_workflow_template_async_from_dict(): + await test_instantiate_workflow_template_async(request_type=dict) + + +def test_instantiate_workflow_template_field_headers(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflow_templates.InstantiateWorkflowTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_workflow_template), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.instantiate_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_instantiate_workflow_template_field_headers_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflow_templates.InstantiateWorkflowTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_workflow_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.instantiate_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_instantiate_workflow_template_flattened(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.instantiate_workflow_template( + name="name_value", + parameters={"key_value": "value_value"}, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].parameters + mock_val = {"key_value": "value_value"} + assert arg == mock_val + + +def test_instantiate_workflow_template_flattened_error(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.instantiate_workflow_template( + workflow_templates.InstantiateWorkflowTemplateRequest(), + name="name_value", + parameters={"key_value": "value_value"}, + ) + + +@pytest.mark.asyncio +async def test_instantiate_workflow_template_flattened_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.instantiate_workflow_template( + name="name_value", + parameters={"key_value": "value_value"}, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].parameters + mock_val = {"key_value": "value_value"} + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_instantiate_workflow_template_flattened_error_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.instantiate_workflow_template( + workflow_templates.InstantiateWorkflowTemplateRequest(), + name="name_value", + parameters={"key_value": "value_value"}, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workflow_templates.InstantiateInlineWorkflowTemplateRequest, + dict, + ], +) +def test_instantiate_inline_workflow_template(request_type, transport: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_inline_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.instantiate_inline_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.InstantiateInlineWorkflowTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_instantiate_inline_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_inline_workflow_template), "__call__" + ) as call: + client.instantiate_inline_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.InstantiateInlineWorkflowTemplateRequest() + + +@pytest.mark.asyncio +async def test_instantiate_inline_workflow_template_async( + transport: str = "grpc_asyncio", + request_type=workflow_templates.InstantiateInlineWorkflowTemplateRequest, +): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_inline_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.instantiate_inline_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.InstantiateInlineWorkflowTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_instantiate_inline_workflow_template_async_from_dict(): + await test_instantiate_inline_workflow_template_async(request_type=dict) + + +def test_instantiate_inline_workflow_template_field_headers(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflow_templates.InstantiateInlineWorkflowTemplateRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_inline_workflow_template), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.instantiate_inline_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_instantiate_inline_workflow_template_field_headers_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflow_templates.InstantiateInlineWorkflowTemplateRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_inline_workflow_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.instantiate_inline_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_instantiate_inline_workflow_template_flattened(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_inline_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.instantiate_inline_workflow_template( + parent="parent_value", + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].template + mock_val = workflow_templates.WorkflowTemplate(id="id_value") + assert arg == mock_val + + +def test_instantiate_inline_workflow_template_flattened_error(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.instantiate_inline_workflow_template( + workflow_templates.InstantiateInlineWorkflowTemplateRequest(), + parent="parent_value", + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + +@pytest.mark.asyncio +async def test_instantiate_inline_workflow_template_flattened_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.instantiate_inline_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.instantiate_inline_workflow_template( + parent="parent_value", + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].template + mock_val = workflow_templates.WorkflowTemplate(id="id_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_instantiate_inline_workflow_template_flattened_error_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.instantiate_inline_workflow_template( + workflow_templates.InstantiateInlineWorkflowTemplateRequest(), + parent="parent_value", + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workflow_templates.UpdateWorkflowTemplateRequest, + dict, + ], +) +def test_update_workflow_template(request_type, transport: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workflow_templates.WorkflowTemplate( + id="id_value", + name="name_value", + version=774, + ) + response = client.update_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.UpdateWorkflowTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workflow_templates.WorkflowTemplate) + assert response.id == "id_value" + assert response.name == "name_value" + assert response.version == 774 + + +def test_update_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workflow_template), "__call__" + ) as call: + client.update_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.UpdateWorkflowTemplateRequest() + + +@pytest.mark.asyncio +async def test_update_workflow_template_async( + transport: str = "grpc_asyncio", + request_type=workflow_templates.UpdateWorkflowTemplateRequest, +): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflow_templates.WorkflowTemplate( + id="id_value", + name="name_value", + version=774, + ) + ) + response = await client.update_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.UpdateWorkflowTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, workflow_templates.WorkflowTemplate) + assert response.id == "id_value" + assert response.name == "name_value" + assert response.version == 774 + + +@pytest.mark.asyncio +async def test_update_workflow_template_async_from_dict(): + await test_update_workflow_template_async(request_type=dict) + + +def test_update_workflow_template_field_headers(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflow_templates.UpdateWorkflowTemplateRequest() + + request.template.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workflow_template), "__call__" + ) as call: + call.return_value = workflow_templates.WorkflowTemplate() + client.update_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "template.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_workflow_template_field_headers_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflow_templates.UpdateWorkflowTemplateRequest() + + request.template.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workflow_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflow_templates.WorkflowTemplate() + ) + await client.update_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "template.name=name_value", + ) in kw["metadata"] + + +def test_update_workflow_template_flattened(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workflow_templates.WorkflowTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_workflow_template( + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].template + mock_val = workflow_templates.WorkflowTemplate(id="id_value") + assert arg == mock_val + + +def test_update_workflow_template_flattened_error(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workflow_template( + workflow_templates.UpdateWorkflowTemplateRequest(), + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + +@pytest.mark.asyncio +async def test_update_workflow_template_flattened_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workflow_templates.WorkflowTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflow_templates.WorkflowTemplate() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_workflow_template( + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].template + mock_val = workflow_templates.WorkflowTemplate(id="id_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_workflow_template_flattened_error_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_workflow_template( + workflow_templates.UpdateWorkflowTemplateRequest(), + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workflow_templates.ListWorkflowTemplatesRequest, + dict, + ], +) +def test_list_workflow_templates(request_type, transport: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflow_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workflow_templates.ListWorkflowTemplatesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_workflow_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.ListWorkflowTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkflowTemplatesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_workflow_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflow_templates), "__call__" + ) as call: + client.list_workflow_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.ListWorkflowTemplatesRequest() + + +@pytest.mark.asyncio +async def test_list_workflow_templates_async( + transport: str = "grpc_asyncio", + request_type=workflow_templates.ListWorkflowTemplatesRequest, +): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflow_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflow_templates.ListWorkflowTemplatesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_workflow_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.ListWorkflowTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkflowTemplatesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_workflow_templates_async_from_dict(): + await test_list_workflow_templates_async(request_type=dict) + + +def test_list_workflow_templates_field_headers(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflow_templates.ListWorkflowTemplatesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflow_templates), "__call__" + ) as call: + call.return_value = workflow_templates.ListWorkflowTemplatesResponse() + client.list_workflow_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_workflow_templates_field_headers_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflow_templates.ListWorkflowTemplatesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflow_templates), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflow_templates.ListWorkflowTemplatesResponse() + ) + await client.list_workflow_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_workflow_templates_flattened(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflow_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workflow_templates.ListWorkflowTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_workflow_templates( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_workflow_templates_flattened_error(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workflow_templates( + workflow_templates.ListWorkflowTemplatesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_workflow_templates_flattened_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflow_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = workflow_templates.ListWorkflowTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + workflow_templates.ListWorkflowTemplatesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_workflow_templates( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_workflow_templates_flattened_error_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_workflow_templates( + workflow_templates.ListWorkflowTemplatesRequest(), + parent="parent_value", + ) + + +def test_list_workflow_templates_pager(transport_name: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflow_templates), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + ], + next_page_token="abc", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[], + next_page_token="def", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + ], + next_page_token="ghi", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_workflow_templates(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workflow_templates.WorkflowTemplate) for i in results) + + +def test_list_workflow_templates_pages(transport_name: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflow_templates), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + ], + next_page_token="abc", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[], + next_page_token="def", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + ], + next_page_token="ghi", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_workflow_templates(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_workflow_templates_async_pager(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflow_templates), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + ], + next_page_token="abc", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[], + next_page_token="def", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + ], + next_page_token="ghi", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_workflow_templates( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, workflow_templates.WorkflowTemplate) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_workflow_templates_async_pages(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_workflow_templates), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + ], + next_page_token="abc", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[], + next_page_token="def", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + ], + next_page_token="ghi", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_workflow_templates(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workflow_templates.DeleteWorkflowTemplateRequest, + dict, + ], +) +def test_delete_workflow_template(request_type, transport: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.DeleteWorkflowTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_workflow_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workflow_template), "__call__" + ) as call: + client.delete_workflow_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.DeleteWorkflowTemplateRequest() + + +@pytest.mark.asyncio +async def test_delete_workflow_template_async( + transport: str = "grpc_asyncio", + request_type=workflow_templates.DeleteWorkflowTemplateRequest, +): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == workflow_templates.DeleteWorkflowTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_workflow_template_async_from_dict(): + await test_delete_workflow_template_async(request_type=dict) + + +def test_delete_workflow_template_field_headers(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflow_templates.DeleteWorkflowTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workflow_template), "__call__" + ) as call: + call.return_value = None + client.delete_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_workflow_template_field_headers_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = workflow_templates.DeleteWorkflowTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workflow_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_workflow_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_workflow_template_flattened(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_workflow_template( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_workflow_template_flattened_error(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workflow_template( + workflow_templates.DeleteWorkflowTemplateRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_workflow_template_flattened_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_workflow_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_workflow_template( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_workflow_template_flattened_error_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_workflow_template( + workflow_templates.DeleteWorkflowTemplateRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workflow_templates.CreateWorkflowTemplateRequest, + dict, + ], +) +def test_create_workflow_template_rest(request_type): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["template"] = { + "id": "id_value", + "name": "name_value", + "version": 774, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "placement": { + "managed_cluster": { + "cluster_name": "cluster_name_value", + "config": { + "config_bucket": "config_bucket_value", + "temp_bucket": "temp_bucket_value", + "gce_cluster_config": { + "zone_uri": "zone_uri_value", + "network_uri": "network_uri_value", + "subnetwork_uri": "subnetwork_uri_value", + "internal_ip_only": True, + "private_ipv6_google_access": 1, + "service_account": "service_account_value", + "service_account_scopes": [ + "service_account_scopes_value1", + "service_account_scopes_value2", + ], + "tags": ["tags_value1", "tags_value2"], + "metadata": {}, + "reservation_affinity": { + "consume_reservation_type": 1, + "key": "key_value", + "values": ["values_value1", "values_value2"], + }, + "node_group_affinity": { + "node_group_uri": "node_group_uri_value" + }, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": { + "enable_confidential_compute": True + }, + }, + "master_config": { + "num_instances": 1399, + "instance_names": [ + "instance_names_value1", + "instance_names_value2", + ], + "instance_references": [ + { + "instance_name": "instance_name_value", + "instance_id": "instance_id_value", + "public_key": "public_key_value", + "public_ecies_key": "public_ecies_key_value", + } + ], + "image_uri": "image_uri_value", + "machine_type_uri": "machine_type_uri_value", + "disk_config": { + "boot_disk_type": "boot_disk_type_value", + "boot_disk_size_gb": 1792, + "num_local_ssds": 1494, + "local_ssd_interface": "local_ssd_interface_value", + }, + "is_preemptible": True, + "preemptibility": 1, + "managed_group_config": { + "instance_template_name": "instance_template_name_value", + "instance_group_manager_name": "instance_group_manager_name_value", + "instance_group_manager_uri": "instance_group_manager_uri_value", + }, + "accelerators": [ + { + "accelerator_type_uri": "accelerator_type_uri_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "min_num_instances": 1818, + "instance_flexibility_policy": { + "instance_selection_list": [ + { + "machine_types": [ + "machine_types_value1", + "machine_types_value2", + ], + "rank": 428, + } + ], + "instance_selection_results": [ + {"machine_type": "machine_type_value", "vm_count": 875} + ], + }, + }, + "worker_config": {}, + "secondary_worker_config": {}, + "software_config": { + "image_version": "image_version_value", + "properties": {}, + "optional_components": [5], + }, + "initialization_actions": [ + { + "executable_file": "executable_file_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + "encryption_config": { + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + }, + "autoscaling_config": {"policy_uri": "policy_uri_value"}, + "security_config": { + "kerberos_config": { + "enable_kerberos": True, + "root_principal_password_uri": "root_principal_password_uri_value", + "kms_key_uri": "kms_key_uri_value", + "keystore_uri": "keystore_uri_value", + "truststore_uri": "truststore_uri_value", + "keystore_password_uri": "keystore_password_uri_value", + "key_password_uri": "key_password_uri_value", + "truststore_password_uri": "truststore_password_uri_value", + "cross_realm_trust_realm": "cross_realm_trust_realm_value", + "cross_realm_trust_kdc": "cross_realm_trust_kdc_value", + "cross_realm_trust_admin_server": "cross_realm_trust_admin_server_value", + "cross_realm_trust_shared_password_uri": "cross_realm_trust_shared_password_uri_value", + "kdc_db_key_uri": "kdc_db_key_uri_value", + "tgt_lifetime_hours": 1933, + "realm": "realm_value", + }, + "identity_config": {"user_service_account_mapping": {}}, + }, + "lifecycle_config": { + "idle_delete_ttl": {}, + "auto_delete_time": {}, + "auto_delete_ttl": {}, + "idle_start_time": {}, + }, + "endpoint_config": { + "http_ports": {}, + "enable_http_port_access": True, + }, + "metastore_config": { + "dataproc_metastore_service": "dataproc_metastore_service_value" + }, + "dataproc_metric_config": { + "metrics": [ + { + "metric_source": 1, + "metric_overrides": [ + "metric_overrides_value1", + "metric_overrides_value2", + ], + } + ] + }, + "auxiliary_node_groups": [ + { + "node_group": { + "name": "name_value", + "roles": [1], + "node_group_config": {}, + "labels": {}, + }, + "node_group_id": "node_group_id_value", + } + ], + }, + "labels": {}, + }, + "cluster_selector": {"zone": "zone_value", "cluster_labels": {}}, + }, + "jobs": [ + { + "step_id": "step_id_value", + "hadoop_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {"driver_log_levels": {}}, + }, + "spark_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "pyspark_job": { + "main_python_file_uri": "main_python_file_uri_value", + "args": ["args_value1", "args_value2"], + "python_file_uris": [ + "python_file_uris_value1", + "python_file_uris_value2", + ], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "hive_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {"queries": ["queries_value1", "queries_value2"]}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + }, + "pig_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "spark_r_job": { + "main_r_file_uri": "main_r_file_uri_value", + "args": ["args_value1", "args_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "spark_sql_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "presto_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "labels": {}, + "scheduling": { + "max_failures_per_hour": 2243, + "max_failures_total": 1923, + }, + "prerequisite_step_ids": [ + "prerequisite_step_ids_value1", + "prerequisite_step_ids_value2", + ], + } + ], + "parameters": [ + { + "name": "name_value", + "fields": ["fields_value1", "fields_value2"], + "description": "description_value", + "validation": { + "regex": {"regexes": ["regexes_value1", "regexes_value2"]}, + "values": {"values": ["values_value1", "values_value2"]}, + }, + } + ], + "dag_timeout": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workflow_templates.WorkflowTemplate( + id="id_value", + name="name_value", + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_workflow_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, workflow_templates.WorkflowTemplate) + assert response.id == "id_value" + assert response.name == "name_value" + assert response.version == 774 + + +def test_create_workflow_template_rest_required_fields( + request_type=workflow_templates.CreateWorkflowTemplateRequest, +): + transport_class = transports.WorkflowTemplateServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workflow_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_workflow_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workflow_templates.WorkflowTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_workflow_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_workflow_template_rest_unset_required_fields(): + transport = transports.WorkflowTemplateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_workflow_template._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "template", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_workflow_template_rest_interceptors(null_interceptor): + transport = transports.WorkflowTemplateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkflowTemplateServiceRestInterceptor(), + ) + client = WorkflowTemplateServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "post_create_workflow_template", + ) as post, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "pre_create_workflow_template", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workflow_templates.CreateWorkflowTemplateRequest.pb( + workflow_templates.CreateWorkflowTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workflow_templates.WorkflowTemplate.to_json( + workflow_templates.WorkflowTemplate() + ) + + request = workflow_templates.CreateWorkflowTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workflow_templates.WorkflowTemplate() + + client.create_workflow_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_workflow_template_rest_bad_request( + transport: str = "rest", + request_type=workflow_templates.CreateWorkflowTemplateRequest, +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["template"] = { + "id": "id_value", + "name": "name_value", + "version": 774, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "placement": { + "managed_cluster": { + "cluster_name": "cluster_name_value", + "config": { + "config_bucket": "config_bucket_value", + "temp_bucket": "temp_bucket_value", + "gce_cluster_config": { + "zone_uri": "zone_uri_value", + "network_uri": "network_uri_value", + "subnetwork_uri": "subnetwork_uri_value", + "internal_ip_only": True, + "private_ipv6_google_access": 1, + "service_account": "service_account_value", + "service_account_scopes": [ + "service_account_scopes_value1", + "service_account_scopes_value2", + ], + "tags": ["tags_value1", "tags_value2"], + "metadata": {}, + "reservation_affinity": { + "consume_reservation_type": 1, + "key": "key_value", + "values": ["values_value1", "values_value2"], + }, + "node_group_affinity": { + "node_group_uri": "node_group_uri_value" + }, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": { + "enable_confidential_compute": True + }, + }, + "master_config": { + "num_instances": 1399, + "instance_names": [ + "instance_names_value1", + "instance_names_value2", + ], + "instance_references": [ + { + "instance_name": "instance_name_value", + "instance_id": "instance_id_value", + "public_key": "public_key_value", + "public_ecies_key": "public_ecies_key_value", + } + ], + "image_uri": "image_uri_value", + "machine_type_uri": "machine_type_uri_value", + "disk_config": { + "boot_disk_type": "boot_disk_type_value", + "boot_disk_size_gb": 1792, + "num_local_ssds": 1494, + "local_ssd_interface": "local_ssd_interface_value", + }, + "is_preemptible": True, + "preemptibility": 1, + "managed_group_config": { + "instance_template_name": "instance_template_name_value", + "instance_group_manager_name": "instance_group_manager_name_value", + "instance_group_manager_uri": "instance_group_manager_uri_value", + }, + "accelerators": [ + { + "accelerator_type_uri": "accelerator_type_uri_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "min_num_instances": 1818, + "instance_flexibility_policy": { + "instance_selection_list": [ + { + "machine_types": [ + "machine_types_value1", + "machine_types_value2", + ], + "rank": 428, + } + ], + "instance_selection_results": [ + {"machine_type": "machine_type_value", "vm_count": 875} + ], + }, + }, + "worker_config": {}, + "secondary_worker_config": {}, + "software_config": { + "image_version": "image_version_value", + "properties": {}, + "optional_components": [5], + }, + "initialization_actions": [ + { + "executable_file": "executable_file_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + "encryption_config": { + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + }, + "autoscaling_config": {"policy_uri": "policy_uri_value"}, + "security_config": { + "kerberos_config": { + "enable_kerberos": True, + "root_principal_password_uri": "root_principal_password_uri_value", + "kms_key_uri": "kms_key_uri_value", + "keystore_uri": "keystore_uri_value", + "truststore_uri": "truststore_uri_value", + "keystore_password_uri": "keystore_password_uri_value", + "key_password_uri": "key_password_uri_value", + "truststore_password_uri": "truststore_password_uri_value", + "cross_realm_trust_realm": "cross_realm_trust_realm_value", + "cross_realm_trust_kdc": "cross_realm_trust_kdc_value", + "cross_realm_trust_admin_server": "cross_realm_trust_admin_server_value", + "cross_realm_trust_shared_password_uri": "cross_realm_trust_shared_password_uri_value", + "kdc_db_key_uri": "kdc_db_key_uri_value", + "tgt_lifetime_hours": 1933, + "realm": "realm_value", + }, + "identity_config": {"user_service_account_mapping": {}}, + }, + "lifecycle_config": { + "idle_delete_ttl": {}, + "auto_delete_time": {}, + "auto_delete_ttl": {}, + "idle_start_time": {}, + }, + "endpoint_config": { + "http_ports": {}, + "enable_http_port_access": True, + }, + "metastore_config": { + "dataproc_metastore_service": "dataproc_metastore_service_value" + }, + "dataproc_metric_config": { + "metrics": [ + { + "metric_source": 1, + "metric_overrides": [ + "metric_overrides_value1", + "metric_overrides_value2", + ], + } + ] + }, + "auxiliary_node_groups": [ + { + "node_group": { + "name": "name_value", + "roles": [1], + "node_group_config": {}, + "labels": {}, + }, + "node_group_id": "node_group_id_value", + } + ], + }, + "labels": {}, + }, + "cluster_selector": {"zone": "zone_value", "cluster_labels": {}}, + }, + "jobs": [ + { + "step_id": "step_id_value", + "hadoop_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {"driver_log_levels": {}}, + }, + "spark_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "pyspark_job": { + "main_python_file_uri": "main_python_file_uri_value", + "args": ["args_value1", "args_value2"], + "python_file_uris": [ + "python_file_uris_value1", + "python_file_uris_value2", + ], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "hive_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {"queries": ["queries_value1", "queries_value2"]}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + }, + "pig_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "spark_r_job": { + "main_r_file_uri": "main_r_file_uri_value", + "args": ["args_value1", "args_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "spark_sql_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "presto_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "labels": {}, + "scheduling": { + "max_failures_per_hour": 2243, + "max_failures_total": 1923, + }, + "prerequisite_step_ids": [ + "prerequisite_step_ids_value1", + "prerequisite_step_ids_value2", + ], + } + ], + "parameters": [ + { + "name": "name_value", + "fields": ["fields_value1", "fields_value2"], + "description": "description_value", + "validation": { + "regex": {"regexes": ["regexes_value1", "regexes_value2"]}, + "values": {"values": ["values_value1", "values_value2"]}, + }, + } + ], + "dag_timeout": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_workflow_template(request) + + +def test_create_workflow_template_rest_flattened(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workflow_templates.WorkflowTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_workflow_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/workflowTemplates" + % client.transport._host, + args[1], + ) + + +def test_create_workflow_template_rest_flattened_error(transport: str = "rest"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_workflow_template( + workflow_templates.CreateWorkflowTemplateRequest(), + parent="parent_value", + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + +def test_create_workflow_template_rest_error(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workflow_templates.GetWorkflowTemplateRequest, + dict, + ], +) +def test_get_workflow_template_rest(request_type): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workflowTemplates/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workflow_templates.WorkflowTemplate( + id="id_value", + name="name_value", + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_workflow_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, workflow_templates.WorkflowTemplate) + assert response.id == "id_value" + assert response.name == "name_value" + assert response.version == 774 + + +def test_get_workflow_template_rest_required_fields( + request_type=workflow_templates.GetWorkflowTemplateRequest, +): + transport_class = transports.WorkflowTemplateServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workflow_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_workflow_template._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("version",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workflow_templates.WorkflowTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_workflow_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_workflow_template_rest_unset_required_fields(): + transport = transports.WorkflowTemplateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_workflow_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(("version",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_workflow_template_rest_interceptors(null_interceptor): + transport = transports.WorkflowTemplateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkflowTemplateServiceRestInterceptor(), + ) + client = WorkflowTemplateServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, "post_get_workflow_template" + ) as post, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, "pre_get_workflow_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workflow_templates.GetWorkflowTemplateRequest.pb( + workflow_templates.GetWorkflowTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workflow_templates.WorkflowTemplate.to_json( + workflow_templates.WorkflowTemplate() + ) + + request = workflow_templates.GetWorkflowTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workflow_templates.WorkflowTemplate() + + client.get_workflow_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_workflow_template_rest_bad_request( + transport: str = "rest", request_type=workflow_templates.GetWorkflowTemplateRequest +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workflowTemplates/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_workflow_template(request) + + +def test_get_workflow_template_rest_flattened(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workflow_templates.WorkflowTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workflowTemplates/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_workflow_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/workflowTemplates/*}" + % client.transport._host, + args[1], + ) + + +def test_get_workflow_template_rest_flattened_error(transport: str = "rest"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_workflow_template( + workflow_templates.GetWorkflowTemplateRequest(), + name="name_value", + ) + + +def test_get_workflow_template_rest_error(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workflow_templates.InstantiateWorkflowTemplateRequest, + dict, + ], +) +def test_instantiate_workflow_template_rest(request_type): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workflowTemplates/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.instantiate_workflow_template(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_instantiate_workflow_template_rest_required_fields( + request_type=workflow_templates.InstantiateWorkflowTemplateRequest, +): + transport_class = transports.WorkflowTemplateServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).instantiate_workflow_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).instantiate_workflow_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.instantiate_workflow_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_instantiate_workflow_template_rest_unset_required_fields(): + transport = transports.WorkflowTemplateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.instantiate_workflow_template._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_instantiate_workflow_template_rest_interceptors(null_interceptor): + transport = transports.WorkflowTemplateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkflowTemplateServiceRestInterceptor(), + ) + client = WorkflowTemplateServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "post_instantiate_workflow_template", + ) as post, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "pre_instantiate_workflow_template", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workflow_templates.InstantiateWorkflowTemplateRequest.pb( + workflow_templates.InstantiateWorkflowTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workflow_templates.InstantiateWorkflowTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.instantiate_workflow_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_instantiate_workflow_template_rest_bad_request( + transport: str = "rest", + request_type=workflow_templates.InstantiateWorkflowTemplateRequest, +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workflowTemplates/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.instantiate_workflow_template(request) + + +def test_instantiate_workflow_template_rest_flattened(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workflowTemplates/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + parameters={"key_value": "value_value"}, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.instantiate_workflow_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate" + % client.transport._host, + args[1], + ) + + +def test_instantiate_workflow_template_rest_flattened_error(transport: str = "rest"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.instantiate_workflow_template( + workflow_templates.InstantiateWorkflowTemplateRequest(), + name="name_value", + parameters={"key_value": "value_value"}, + ) + + +def test_instantiate_workflow_template_rest_error(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workflow_templates.InstantiateInlineWorkflowTemplateRequest, + dict, + ], +) +def test_instantiate_inline_workflow_template_rest(request_type): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["template"] = { + "id": "id_value", + "name": "name_value", + "version": 774, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "placement": { + "managed_cluster": { + "cluster_name": "cluster_name_value", + "config": { + "config_bucket": "config_bucket_value", + "temp_bucket": "temp_bucket_value", + "gce_cluster_config": { + "zone_uri": "zone_uri_value", + "network_uri": "network_uri_value", + "subnetwork_uri": "subnetwork_uri_value", + "internal_ip_only": True, + "private_ipv6_google_access": 1, + "service_account": "service_account_value", + "service_account_scopes": [ + "service_account_scopes_value1", + "service_account_scopes_value2", + ], + "tags": ["tags_value1", "tags_value2"], + "metadata": {}, + "reservation_affinity": { + "consume_reservation_type": 1, + "key": "key_value", + "values": ["values_value1", "values_value2"], + }, + "node_group_affinity": { + "node_group_uri": "node_group_uri_value" + }, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": { + "enable_confidential_compute": True + }, + }, + "master_config": { + "num_instances": 1399, + "instance_names": [ + "instance_names_value1", + "instance_names_value2", + ], + "instance_references": [ + { + "instance_name": "instance_name_value", + "instance_id": "instance_id_value", + "public_key": "public_key_value", + "public_ecies_key": "public_ecies_key_value", + } + ], + "image_uri": "image_uri_value", + "machine_type_uri": "machine_type_uri_value", + "disk_config": { + "boot_disk_type": "boot_disk_type_value", + "boot_disk_size_gb": 1792, + "num_local_ssds": 1494, + "local_ssd_interface": "local_ssd_interface_value", + }, + "is_preemptible": True, + "preemptibility": 1, + "managed_group_config": { + "instance_template_name": "instance_template_name_value", + "instance_group_manager_name": "instance_group_manager_name_value", + "instance_group_manager_uri": "instance_group_manager_uri_value", + }, + "accelerators": [ + { + "accelerator_type_uri": "accelerator_type_uri_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "min_num_instances": 1818, + "instance_flexibility_policy": { + "instance_selection_list": [ + { + "machine_types": [ + "machine_types_value1", + "machine_types_value2", + ], + "rank": 428, + } + ], + "instance_selection_results": [ + {"machine_type": "machine_type_value", "vm_count": 875} + ], + }, + }, + "worker_config": {}, + "secondary_worker_config": {}, + "software_config": { + "image_version": "image_version_value", + "properties": {}, + "optional_components": [5], + }, + "initialization_actions": [ + { + "executable_file": "executable_file_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + "encryption_config": { + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + }, + "autoscaling_config": {"policy_uri": "policy_uri_value"}, + "security_config": { + "kerberos_config": { + "enable_kerberos": True, + "root_principal_password_uri": "root_principal_password_uri_value", + "kms_key_uri": "kms_key_uri_value", + "keystore_uri": "keystore_uri_value", + "truststore_uri": "truststore_uri_value", + "keystore_password_uri": "keystore_password_uri_value", + "key_password_uri": "key_password_uri_value", + "truststore_password_uri": "truststore_password_uri_value", + "cross_realm_trust_realm": "cross_realm_trust_realm_value", + "cross_realm_trust_kdc": "cross_realm_trust_kdc_value", + "cross_realm_trust_admin_server": "cross_realm_trust_admin_server_value", + "cross_realm_trust_shared_password_uri": "cross_realm_trust_shared_password_uri_value", + "kdc_db_key_uri": "kdc_db_key_uri_value", + "tgt_lifetime_hours": 1933, + "realm": "realm_value", + }, + "identity_config": {"user_service_account_mapping": {}}, + }, + "lifecycle_config": { + "idle_delete_ttl": {}, + "auto_delete_time": {}, + "auto_delete_ttl": {}, + "idle_start_time": {}, + }, + "endpoint_config": { + "http_ports": {}, + "enable_http_port_access": True, + }, + "metastore_config": { + "dataproc_metastore_service": "dataproc_metastore_service_value" + }, + "dataproc_metric_config": { + "metrics": [ + { + "metric_source": 1, + "metric_overrides": [ + "metric_overrides_value1", + "metric_overrides_value2", + ], + } + ] + }, + "auxiliary_node_groups": [ + { + "node_group": { + "name": "name_value", + "roles": [1], + "node_group_config": {}, + "labels": {}, + }, + "node_group_id": "node_group_id_value", + } + ], + }, + "labels": {}, + }, + "cluster_selector": {"zone": "zone_value", "cluster_labels": {}}, + }, + "jobs": [ + { + "step_id": "step_id_value", + "hadoop_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {"driver_log_levels": {}}, + }, + "spark_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "pyspark_job": { + "main_python_file_uri": "main_python_file_uri_value", + "args": ["args_value1", "args_value2"], + "python_file_uris": [ + "python_file_uris_value1", + "python_file_uris_value2", + ], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "hive_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {"queries": ["queries_value1", "queries_value2"]}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + }, + "pig_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "spark_r_job": { + "main_r_file_uri": "main_r_file_uri_value", + "args": ["args_value1", "args_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "spark_sql_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "presto_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "labels": {}, + "scheduling": { + "max_failures_per_hour": 2243, + "max_failures_total": 1923, + }, + "prerequisite_step_ids": [ + "prerequisite_step_ids_value1", + "prerequisite_step_ids_value2", + ], + } + ], + "parameters": [ + { + "name": "name_value", + "fields": ["fields_value1", "fields_value2"], + "description": "description_value", + "validation": { + "regex": {"regexes": ["regexes_value1", "regexes_value2"]}, + "values": {"values": ["values_value1", "values_value2"]}, + }, + } + ], + "dag_timeout": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.instantiate_inline_workflow_template(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_instantiate_inline_workflow_template_rest_required_fields( + request_type=workflow_templates.InstantiateInlineWorkflowTemplateRequest, +): + transport_class = transports.WorkflowTemplateServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).instantiate_inline_workflow_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).instantiate_inline_workflow_template._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.instantiate_inline_workflow_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_instantiate_inline_workflow_template_rest_unset_required_fields(): + transport = transports.WorkflowTemplateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.instantiate_inline_workflow_template._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "parent", + "template", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_instantiate_inline_workflow_template_rest_interceptors(null_interceptor): + transport = transports.WorkflowTemplateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkflowTemplateServiceRestInterceptor(), + ) + client = WorkflowTemplateServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "post_instantiate_inline_workflow_template", + ) as post, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "pre_instantiate_inline_workflow_template", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workflow_templates.InstantiateInlineWorkflowTemplateRequest.pb( + workflow_templates.InstantiateInlineWorkflowTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = workflow_templates.InstantiateInlineWorkflowTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.instantiate_inline_workflow_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_instantiate_inline_workflow_template_rest_bad_request( + transport: str = "rest", + request_type=workflow_templates.InstantiateInlineWorkflowTemplateRequest, +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["template"] = { + "id": "id_value", + "name": "name_value", + "version": 774, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "placement": { + "managed_cluster": { + "cluster_name": "cluster_name_value", + "config": { + "config_bucket": "config_bucket_value", + "temp_bucket": "temp_bucket_value", + "gce_cluster_config": { + "zone_uri": "zone_uri_value", + "network_uri": "network_uri_value", + "subnetwork_uri": "subnetwork_uri_value", + "internal_ip_only": True, + "private_ipv6_google_access": 1, + "service_account": "service_account_value", + "service_account_scopes": [ + "service_account_scopes_value1", + "service_account_scopes_value2", + ], + "tags": ["tags_value1", "tags_value2"], + "metadata": {}, + "reservation_affinity": { + "consume_reservation_type": 1, + "key": "key_value", + "values": ["values_value1", "values_value2"], + }, + "node_group_affinity": { + "node_group_uri": "node_group_uri_value" + }, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": { + "enable_confidential_compute": True + }, + }, + "master_config": { + "num_instances": 1399, + "instance_names": [ + "instance_names_value1", + "instance_names_value2", + ], + "instance_references": [ + { + "instance_name": "instance_name_value", + "instance_id": "instance_id_value", + "public_key": "public_key_value", + "public_ecies_key": "public_ecies_key_value", + } + ], + "image_uri": "image_uri_value", + "machine_type_uri": "machine_type_uri_value", + "disk_config": { + "boot_disk_type": "boot_disk_type_value", + "boot_disk_size_gb": 1792, + "num_local_ssds": 1494, + "local_ssd_interface": "local_ssd_interface_value", + }, + "is_preemptible": True, + "preemptibility": 1, + "managed_group_config": { + "instance_template_name": "instance_template_name_value", + "instance_group_manager_name": "instance_group_manager_name_value", + "instance_group_manager_uri": "instance_group_manager_uri_value", + }, + "accelerators": [ + { + "accelerator_type_uri": "accelerator_type_uri_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "min_num_instances": 1818, + "instance_flexibility_policy": { + "instance_selection_list": [ + { + "machine_types": [ + "machine_types_value1", + "machine_types_value2", + ], + "rank": 428, + } + ], + "instance_selection_results": [ + {"machine_type": "machine_type_value", "vm_count": 875} + ], + }, + }, + "worker_config": {}, + "secondary_worker_config": {}, + "software_config": { + "image_version": "image_version_value", + "properties": {}, + "optional_components": [5], + }, + "initialization_actions": [ + { + "executable_file": "executable_file_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + "encryption_config": { + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + }, + "autoscaling_config": {"policy_uri": "policy_uri_value"}, + "security_config": { + "kerberos_config": { + "enable_kerberos": True, + "root_principal_password_uri": "root_principal_password_uri_value", + "kms_key_uri": "kms_key_uri_value", + "keystore_uri": "keystore_uri_value", + "truststore_uri": "truststore_uri_value", + "keystore_password_uri": "keystore_password_uri_value", + "key_password_uri": "key_password_uri_value", + "truststore_password_uri": "truststore_password_uri_value", + "cross_realm_trust_realm": "cross_realm_trust_realm_value", + "cross_realm_trust_kdc": "cross_realm_trust_kdc_value", + "cross_realm_trust_admin_server": "cross_realm_trust_admin_server_value", + "cross_realm_trust_shared_password_uri": "cross_realm_trust_shared_password_uri_value", + "kdc_db_key_uri": "kdc_db_key_uri_value", + "tgt_lifetime_hours": 1933, + "realm": "realm_value", + }, + "identity_config": {"user_service_account_mapping": {}}, + }, + "lifecycle_config": { + "idle_delete_ttl": {}, + "auto_delete_time": {}, + "auto_delete_ttl": {}, + "idle_start_time": {}, + }, + "endpoint_config": { + "http_ports": {}, + "enable_http_port_access": True, + }, + "metastore_config": { + "dataproc_metastore_service": "dataproc_metastore_service_value" + }, + "dataproc_metric_config": { + "metrics": [ + { + "metric_source": 1, + "metric_overrides": [ + "metric_overrides_value1", + "metric_overrides_value2", + ], + } + ] + }, + "auxiliary_node_groups": [ + { + "node_group": { + "name": "name_value", + "roles": [1], + "node_group_config": {}, + "labels": {}, + }, + "node_group_id": "node_group_id_value", + } + ], + }, + "labels": {}, + }, + "cluster_selector": {"zone": "zone_value", "cluster_labels": {}}, + }, + "jobs": [ + { + "step_id": "step_id_value", + "hadoop_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {"driver_log_levels": {}}, + }, + "spark_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "pyspark_job": { + "main_python_file_uri": "main_python_file_uri_value", + "args": ["args_value1", "args_value2"], + "python_file_uris": [ + "python_file_uris_value1", + "python_file_uris_value2", + ], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "hive_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {"queries": ["queries_value1", "queries_value2"]}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + }, + "pig_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "spark_r_job": { + "main_r_file_uri": "main_r_file_uri_value", + "args": ["args_value1", "args_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "spark_sql_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "presto_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "labels": {}, + "scheduling": { + "max_failures_per_hour": 2243, + "max_failures_total": 1923, + }, + "prerequisite_step_ids": [ + "prerequisite_step_ids_value1", + "prerequisite_step_ids_value2", + ], + } + ], + "parameters": [ + { + "name": "name_value", + "fields": ["fields_value1", "fields_value2"], + "description": "description_value", + "validation": { + "regex": {"regexes": ["regexes_value1", "regexes_value2"]}, + "values": {"values": ["values_value1", "values_value2"]}, + }, + } + ], + "dag_timeout": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.instantiate_inline_workflow_template(request) + + +def test_instantiate_inline_workflow_template_rest_flattened(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.instantiate_inline_workflow_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline" + % client.transport._host, + args[1], + ) + + +def test_instantiate_inline_workflow_template_rest_flattened_error( + transport: str = "rest", +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.instantiate_inline_workflow_template( + workflow_templates.InstantiateInlineWorkflowTemplateRequest(), + parent="parent_value", + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + +def test_instantiate_inline_workflow_template_rest_error(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workflow_templates.UpdateWorkflowTemplateRequest, + dict, + ], +) +def test_update_workflow_template_rest(request_type): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "template": { + "name": "projects/sample1/locations/sample2/workflowTemplates/sample3" + } + } + request_init["template"] = { + "id": "id_value", + "name": "projects/sample1/locations/sample2/workflowTemplates/sample3", + "version": 774, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "placement": { + "managed_cluster": { + "cluster_name": "cluster_name_value", + "config": { + "config_bucket": "config_bucket_value", + "temp_bucket": "temp_bucket_value", + "gce_cluster_config": { + "zone_uri": "zone_uri_value", + "network_uri": "network_uri_value", + "subnetwork_uri": "subnetwork_uri_value", + "internal_ip_only": True, + "private_ipv6_google_access": 1, + "service_account": "service_account_value", + "service_account_scopes": [ + "service_account_scopes_value1", + "service_account_scopes_value2", + ], + "tags": ["tags_value1", "tags_value2"], + "metadata": {}, + "reservation_affinity": { + "consume_reservation_type": 1, + "key": "key_value", + "values": ["values_value1", "values_value2"], + }, + "node_group_affinity": { + "node_group_uri": "node_group_uri_value" + }, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": { + "enable_confidential_compute": True + }, + }, + "master_config": { + "num_instances": 1399, + "instance_names": [ + "instance_names_value1", + "instance_names_value2", + ], + "instance_references": [ + { + "instance_name": "instance_name_value", + "instance_id": "instance_id_value", + "public_key": "public_key_value", + "public_ecies_key": "public_ecies_key_value", + } + ], + "image_uri": "image_uri_value", + "machine_type_uri": "machine_type_uri_value", + "disk_config": { + "boot_disk_type": "boot_disk_type_value", + "boot_disk_size_gb": 1792, + "num_local_ssds": 1494, + "local_ssd_interface": "local_ssd_interface_value", + }, + "is_preemptible": True, + "preemptibility": 1, + "managed_group_config": { + "instance_template_name": "instance_template_name_value", + "instance_group_manager_name": "instance_group_manager_name_value", + "instance_group_manager_uri": "instance_group_manager_uri_value", + }, + "accelerators": [ + { + "accelerator_type_uri": "accelerator_type_uri_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "min_num_instances": 1818, + "instance_flexibility_policy": { + "instance_selection_list": [ + { + "machine_types": [ + "machine_types_value1", + "machine_types_value2", + ], + "rank": 428, + } + ], + "instance_selection_results": [ + {"machine_type": "machine_type_value", "vm_count": 875} + ], + }, + }, + "worker_config": {}, + "secondary_worker_config": {}, + "software_config": { + "image_version": "image_version_value", + "properties": {}, + "optional_components": [5], + }, + "initialization_actions": [ + { + "executable_file": "executable_file_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + "encryption_config": { + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + }, + "autoscaling_config": {"policy_uri": "policy_uri_value"}, + "security_config": { + "kerberos_config": { + "enable_kerberos": True, + "root_principal_password_uri": "root_principal_password_uri_value", + "kms_key_uri": "kms_key_uri_value", + "keystore_uri": "keystore_uri_value", + "truststore_uri": "truststore_uri_value", + "keystore_password_uri": "keystore_password_uri_value", + "key_password_uri": "key_password_uri_value", + "truststore_password_uri": "truststore_password_uri_value", + "cross_realm_trust_realm": "cross_realm_trust_realm_value", + "cross_realm_trust_kdc": "cross_realm_trust_kdc_value", + "cross_realm_trust_admin_server": "cross_realm_trust_admin_server_value", + "cross_realm_trust_shared_password_uri": "cross_realm_trust_shared_password_uri_value", + "kdc_db_key_uri": "kdc_db_key_uri_value", + "tgt_lifetime_hours": 1933, + "realm": "realm_value", + }, + "identity_config": {"user_service_account_mapping": {}}, + }, + "lifecycle_config": { + "idle_delete_ttl": {}, + "auto_delete_time": {}, + "auto_delete_ttl": {}, + "idle_start_time": {}, + }, + "endpoint_config": { + "http_ports": {}, + "enable_http_port_access": True, + }, + "metastore_config": { + "dataproc_metastore_service": "dataproc_metastore_service_value" + }, + "dataproc_metric_config": { + "metrics": [ + { + "metric_source": 1, + "metric_overrides": [ + "metric_overrides_value1", + "metric_overrides_value2", + ], + } + ] + }, + "auxiliary_node_groups": [ + { + "node_group": { + "name": "name_value", + "roles": [1], + "node_group_config": {}, + "labels": {}, + }, + "node_group_id": "node_group_id_value", + } + ], + }, + "labels": {}, + }, + "cluster_selector": {"zone": "zone_value", "cluster_labels": {}}, + }, + "jobs": [ + { + "step_id": "step_id_value", + "hadoop_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {"driver_log_levels": {}}, + }, + "spark_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "pyspark_job": { + "main_python_file_uri": "main_python_file_uri_value", + "args": ["args_value1", "args_value2"], + "python_file_uris": [ + "python_file_uris_value1", + "python_file_uris_value2", + ], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "hive_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {"queries": ["queries_value1", "queries_value2"]}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + }, + "pig_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "spark_r_job": { + "main_r_file_uri": "main_r_file_uri_value", + "args": ["args_value1", "args_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "spark_sql_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "presto_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "labels": {}, + "scheduling": { + "max_failures_per_hour": 2243, + "max_failures_total": 1923, + }, + "prerequisite_step_ids": [ + "prerequisite_step_ids_value1", + "prerequisite_step_ids_value2", + ], + } + ], + "parameters": [ + { + "name": "name_value", + "fields": ["fields_value1", "fields_value2"], + "description": "description_value", + "validation": { + "regex": {"regexes": ["regexes_value1", "regexes_value2"]}, + "values": {"values": ["values_value1", "values_value2"]}, + }, + } + ], + "dag_timeout": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workflow_templates.WorkflowTemplate( + id="id_value", + name="name_value", + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_workflow_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, workflow_templates.WorkflowTemplate) + assert response.id == "id_value" + assert response.name == "name_value" + assert response.version == 774 + + +def test_update_workflow_template_rest_required_fields( + request_type=workflow_templates.UpdateWorkflowTemplateRequest, +): + transport_class = transports.WorkflowTemplateServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workflow_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_workflow_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workflow_templates.WorkflowTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "put", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_workflow_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_workflow_template_rest_unset_required_fields(): + transport = transports.WorkflowTemplateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_workflow_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("template",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_workflow_template_rest_interceptors(null_interceptor): + transport = transports.WorkflowTemplateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkflowTemplateServiceRestInterceptor(), + ) + client = WorkflowTemplateServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "post_update_workflow_template", + ) as post, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "pre_update_workflow_template", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workflow_templates.UpdateWorkflowTemplateRequest.pb( + workflow_templates.UpdateWorkflowTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = workflow_templates.WorkflowTemplate.to_json( + workflow_templates.WorkflowTemplate() + ) + + request = workflow_templates.UpdateWorkflowTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workflow_templates.WorkflowTemplate() + + client.update_workflow_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_workflow_template_rest_bad_request( + transport: str = "rest", + request_type=workflow_templates.UpdateWorkflowTemplateRequest, +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "template": { + "name": "projects/sample1/locations/sample2/workflowTemplates/sample3" + } + } + request_init["template"] = { + "id": "id_value", + "name": "projects/sample1/locations/sample2/workflowTemplates/sample3", + "version": 774, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "placement": { + "managed_cluster": { + "cluster_name": "cluster_name_value", + "config": { + "config_bucket": "config_bucket_value", + "temp_bucket": "temp_bucket_value", + "gce_cluster_config": { + "zone_uri": "zone_uri_value", + "network_uri": "network_uri_value", + "subnetwork_uri": "subnetwork_uri_value", + "internal_ip_only": True, + "private_ipv6_google_access": 1, + "service_account": "service_account_value", + "service_account_scopes": [ + "service_account_scopes_value1", + "service_account_scopes_value2", + ], + "tags": ["tags_value1", "tags_value2"], + "metadata": {}, + "reservation_affinity": { + "consume_reservation_type": 1, + "key": "key_value", + "values": ["values_value1", "values_value2"], + }, + "node_group_affinity": { + "node_group_uri": "node_group_uri_value" + }, + "shielded_instance_config": { + "enable_secure_boot": True, + "enable_vtpm": True, + "enable_integrity_monitoring": True, + }, + "confidential_instance_config": { + "enable_confidential_compute": True + }, + }, + "master_config": { + "num_instances": 1399, + "instance_names": [ + "instance_names_value1", + "instance_names_value2", + ], + "instance_references": [ + { + "instance_name": "instance_name_value", + "instance_id": "instance_id_value", + "public_key": "public_key_value", + "public_ecies_key": "public_ecies_key_value", + } + ], + "image_uri": "image_uri_value", + "machine_type_uri": "machine_type_uri_value", + "disk_config": { + "boot_disk_type": "boot_disk_type_value", + "boot_disk_size_gb": 1792, + "num_local_ssds": 1494, + "local_ssd_interface": "local_ssd_interface_value", + }, + "is_preemptible": True, + "preemptibility": 1, + "managed_group_config": { + "instance_template_name": "instance_template_name_value", + "instance_group_manager_name": "instance_group_manager_name_value", + "instance_group_manager_uri": "instance_group_manager_uri_value", + }, + "accelerators": [ + { + "accelerator_type_uri": "accelerator_type_uri_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "min_num_instances": 1818, + "instance_flexibility_policy": { + "instance_selection_list": [ + { + "machine_types": [ + "machine_types_value1", + "machine_types_value2", + ], + "rank": 428, + } + ], + "instance_selection_results": [ + {"machine_type": "machine_type_value", "vm_count": 875} + ], + }, + }, + "worker_config": {}, + "secondary_worker_config": {}, + "software_config": { + "image_version": "image_version_value", + "properties": {}, + "optional_components": [5], + }, + "initialization_actions": [ + { + "executable_file": "executable_file_value", + "execution_timeout": {"seconds": 751, "nanos": 543}, + } + ], + "encryption_config": { + "gce_pd_kms_key_name": "gce_pd_kms_key_name_value" + }, + "autoscaling_config": {"policy_uri": "policy_uri_value"}, + "security_config": { + "kerberos_config": { + "enable_kerberos": True, + "root_principal_password_uri": "root_principal_password_uri_value", + "kms_key_uri": "kms_key_uri_value", + "keystore_uri": "keystore_uri_value", + "truststore_uri": "truststore_uri_value", + "keystore_password_uri": "keystore_password_uri_value", + "key_password_uri": "key_password_uri_value", + "truststore_password_uri": "truststore_password_uri_value", + "cross_realm_trust_realm": "cross_realm_trust_realm_value", + "cross_realm_trust_kdc": "cross_realm_trust_kdc_value", + "cross_realm_trust_admin_server": "cross_realm_trust_admin_server_value", + "cross_realm_trust_shared_password_uri": "cross_realm_trust_shared_password_uri_value", + "kdc_db_key_uri": "kdc_db_key_uri_value", + "tgt_lifetime_hours": 1933, + "realm": "realm_value", + }, + "identity_config": {"user_service_account_mapping": {}}, + }, + "lifecycle_config": { + "idle_delete_ttl": {}, + "auto_delete_time": {}, + "auto_delete_ttl": {}, + "idle_start_time": {}, + }, + "endpoint_config": { + "http_ports": {}, + "enable_http_port_access": True, + }, + "metastore_config": { + "dataproc_metastore_service": "dataproc_metastore_service_value" + }, + "dataproc_metric_config": { + "metrics": [ + { + "metric_source": 1, + "metric_overrides": [ + "metric_overrides_value1", + "metric_overrides_value2", + ], + } + ] + }, + "auxiliary_node_groups": [ + { + "node_group": { + "name": "name_value", + "roles": [1], + "node_group_config": {}, + "labels": {}, + }, + "node_group_id": "node_group_id_value", + } + ], + }, + "labels": {}, + }, + "cluster_selector": {"zone": "zone_value", "cluster_labels": {}}, + }, + "jobs": [ + { + "step_id": "step_id_value", + "hadoop_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {"driver_log_levels": {}}, + }, + "spark_job": { + "main_jar_file_uri": "main_jar_file_uri_value", + "main_class": "main_class_value", + "args": ["args_value1", "args_value2"], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "pyspark_job": { + "main_python_file_uri": "main_python_file_uri_value", + "args": ["args_value1", "args_value2"], + "python_file_uris": [ + "python_file_uris_value1", + "python_file_uris_value2", + ], + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "hive_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {"queries": ["queries_value1", "queries_value2"]}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + }, + "pig_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "spark_r_job": { + "main_r_file_uri": "main_r_file_uri_value", + "args": ["args_value1", "args_value2"], + "file_uris": ["file_uris_value1", "file_uris_value2"], + "archive_uris": ["archive_uris_value1", "archive_uris_value2"], + "properties": {}, + "logging_config": {}, + }, + "spark_sql_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "script_variables": {}, + "properties": {}, + "jar_file_uris": ["jar_file_uris_value1", "jar_file_uris_value2"], + "logging_config": {}, + }, + "presto_job": { + "query_file_uri": "query_file_uri_value", + "query_list": {}, + "continue_on_failure": True, + "output_format": "output_format_value", + "client_tags": ["client_tags_value1", "client_tags_value2"], + "properties": {}, + "logging_config": {}, + }, + "labels": {}, + "scheduling": { + "max_failures_per_hour": 2243, + "max_failures_total": 1923, + }, + "prerequisite_step_ids": [ + "prerequisite_step_ids_value1", + "prerequisite_step_ids_value2", + ], + } + ], + "parameters": [ + { + "name": "name_value", + "fields": ["fields_value1", "fields_value2"], + "description": "description_value", + "validation": { + "regex": {"regexes": ["regexes_value1", "regexes_value2"]}, + "values": {"values": ["values_value1", "values_value2"]}, + }, + } + ], + "dag_timeout": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_workflow_template(request) + + +def test_update_workflow_template_rest_flattened(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workflow_templates.WorkflowTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = { + "template": { + "name": "projects/sample1/locations/sample2/workflowTemplates/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_workflow_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{template.name=projects/*/locations/*/workflowTemplates/*}" + % client.transport._host, + args[1], + ) + + +def test_update_workflow_template_rest_flattened_error(transport: str = "rest"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_workflow_template( + workflow_templates.UpdateWorkflowTemplateRequest(), + template=workflow_templates.WorkflowTemplate(id="id_value"), + ) + + +def test_update_workflow_template_rest_error(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + workflow_templates.ListWorkflowTemplatesRequest, + dict, + ], +) +def test_list_workflow_templates_rest(request_type): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workflow_templates.ListWorkflowTemplatesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workflow_templates.ListWorkflowTemplatesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_workflow_templates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkflowTemplatesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_workflow_templates_rest_required_fields( + request_type=workflow_templates.ListWorkflowTemplatesRequest, +): + transport_class = transports.WorkflowTemplateServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workflow_templates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_workflow_templates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = workflow_templates.ListWorkflowTemplatesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = workflow_templates.ListWorkflowTemplatesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_workflow_templates(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_workflow_templates_rest_unset_required_fields(): + transport = transports.WorkflowTemplateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_workflow_templates._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_workflow_templates_rest_interceptors(null_interceptor): + transport = transports.WorkflowTemplateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkflowTemplateServiceRestInterceptor(), + ) + client = WorkflowTemplateServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "post_list_workflow_templates", + ) as post, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, "pre_list_workflow_templates" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = workflow_templates.ListWorkflowTemplatesRequest.pb( + workflow_templates.ListWorkflowTemplatesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + workflow_templates.ListWorkflowTemplatesResponse.to_json( + workflow_templates.ListWorkflowTemplatesResponse() + ) + ) + + request = workflow_templates.ListWorkflowTemplatesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = workflow_templates.ListWorkflowTemplatesResponse() + + client.list_workflow_templates( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_workflow_templates_rest_bad_request( + transport: str = "rest", + request_type=workflow_templates.ListWorkflowTemplatesRequest, +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_workflow_templates(request) + + +def test_list_workflow_templates_rest_flattened(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = workflow_templates.ListWorkflowTemplatesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = workflow_templates.ListWorkflowTemplatesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_workflow_templates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/workflowTemplates" + % client.transport._host, + args[1], + ) + + +def test_list_workflow_templates_rest_flattened_error(transport: str = "rest"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_workflow_templates( + workflow_templates.ListWorkflowTemplatesRequest(), + parent="parent_value", + ) + + +def test_list_workflow_templates_rest_pager(transport: str = "rest"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + ], + next_page_token="abc", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[], + next_page_token="def", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + ], + next_page_token="ghi", + ), + workflow_templates.ListWorkflowTemplatesResponse( + templates=[ + workflow_templates.WorkflowTemplate(), + workflow_templates.WorkflowTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + workflow_templates.ListWorkflowTemplatesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_workflow_templates(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, workflow_templates.WorkflowTemplate) for i in results) + + pages = list(client.list_workflow_templates(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + workflow_templates.DeleteWorkflowTemplateRequest, + dict, + ], +) +def test_delete_workflow_template_rest(request_type): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workflowTemplates/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_workflow_template(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_workflow_template_rest_required_fields( + request_type=workflow_templates.DeleteWorkflowTemplateRequest, +): + transport_class = transports.WorkflowTemplateServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workflow_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_workflow_template._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("version",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_workflow_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_workflow_template_rest_unset_required_fields(): + transport = transports.WorkflowTemplateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_workflow_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(("version",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_workflow_template_rest_interceptors(null_interceptor): + transport = transports.WorkflowTemplateServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.WorkflowTemplateServiceRestInterceptor(), + ) + client = WorkflowTemplateServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "pre_delete_workflow_template", + ) as pre: + pre.assert_not_called() + pb_message = workflow_templates.DeleteWorkflowTemplateRequest.pb( + workflow_templates.DeleteWorkflowTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = workflow_templates.DeleteWorkflowTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_workflow_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_workflow_template_rest_bad_request( + transport: str = "rest", + request_type=workflow_templates.DeleteWorkflowTemplateRequest, +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/workflowTemplates/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_workflow_template(request) + + +def test_delete_workflow_template_rest_flattened(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workflowTemplates/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_workflow_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/workflowTemplates/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_workflow_template_rest_flattened_error(transport: str = "rest"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_workflow_template( + workflow_templates.DeleteWorkflowTemplateRequest(), + name="name_value", + ) + + +def test_delete_workflow_template_rest_error(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.WorkflowTemplateServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.WorkflowTemplateServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = WorkflowTemplateServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.WorkflowTemplateServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = WorkflowTemplateServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = WorkflowTemplateServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.WorkflowTemplateServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = WorkflowTemplateServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.WorkflowTemplateServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = WorkflowTemplateServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.WorkflowTemplateServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.WorkflowTemplateServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + transports.WorkflowTemplateServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = WorkflowTemplateServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.WorkflowTemplateServiceGrpcTransport, + ) + + +def test_workflow_template_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.WorkflowTemplateServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_workflow_template_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.dataproc_v1.services.workflow_template_service.transports.WorkflowTemplateServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.WorkflowTemplateServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_workflow_template", + "get_workflow_template", + "instantiate_workflow_template", + "instantiate_inline_workflow_template", + "update_workflow_template", + "list_workflow_templates", + "delete_workflow_template", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_workflow_template_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataproc_v1.services.workflow_template_service.transports.WorkflowTemplateServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.WorkflowTemplateServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_workflow_template_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.dataproc_v1.services.workflow_template_service.transports.WorkflowTemplateServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.WorkflowTemplateServiceTransport() + adc.assert_called_once() + + +def test_workflow_template_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + WorkflowTemplateServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + ], +) +def test_workflow_template_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + transports.WorkflowTemplateServiceRestTransport, + ], +) +def test_workflow_template_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.WorkflowTemplateServiceGrpcTransport, grpc_helpers), + (transports.WorkflowTemplateServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_workflow_template_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + ], +) +def test_workflow_template_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_workflow_template_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.WorkflowTemplateServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_workflow_template_service_rest_lro_client(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_workflow_template_service_host_no_port(transport_name): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataproc.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dataproc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_workflow_template_service_host_with_port(transport_name): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataproc.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dataproc.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataproc.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_workflow_template_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = WorkflowTemplateServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = WorkflowTemplateServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_workflow_template._session + session2 = client2.transport.create_workflow_template._session + assert session1 != session2 + session1 = client1.transport.get_workflow_template._session + session2 = client2.transport.get_workflow_template._session + assert session1 != session2 + session1 = client1.transport.instantiate_workflow_template._session + session2 = client2.transport.instantiate_workflow_template._session + assert session1 != session2 + session1 = client1.transport.instantiate_inline_workflow_template._session + session2 = client2.transport.instantiate_inline_workflow_template._session + assert session1 != session2 + session1 = client1.transport.update_workflow_template._session + session2 = client2.transport.update_workflow_template._session + assert session1 != session2 + session1 = client1.transport.list_workflow_templates._session + session2 = client2.transport.list_workflow_templates._session + assert session1 != session2 + session1 = client1.transport.delete_workflow_template._session + session2 = client2.transport.delete_workflow_template._session + assert session1 != session2 + + +def test_workflow_template_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.WorkflowTemplateServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_workflow_template_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.WorkflowTemplateServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + ], +) +def test_workflow_template_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + ], +) +def test_workflow_template_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_workflow_template_service_grpc_lro_client(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_workflow_template_service_grpc_lro_async_client(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_node_group_path(): + project = "squid" + region = "clam" + cluster = "whelk" + node_group = "octopus" + expected = "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format( + project=project, + region=region, + cluster=cluster, + node_group=node_group, + ) + actual = WorkflowTemplateServiceClient.node_group_path( + project, region, cluster, node_group + ) + assert expected == actual + + +def test_parse_node_group_path(): + expected = { + "project": "oyster", + "region": "nudibranch", + "cluster": "cuttlefish", + "node_group": "mussel", + } + path = WorkflowTemplateServiceClient.node_group_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowTemplateServiceClient.parse_node_group_path(path) + assert expected == actual + + +def test_service_path(): + project = "winkle" + location = "nautilus" + service = "scallop" + expected = "projects/{project}/locations/{location}/services/{service}".format( + project=project, + location=location, + service=service, + ) + actual = WorkflowTemplateServiceClient.service_path(project, location, service) + assert expected == actual + + +def test_parse_service_path(): + expected = { + "project": "abalone", + "location": "squid", + "service": "clam", + } + path = WorkflowTemplateServiceClient.service_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowTemplateServiceClient.parse_service_path(path) + assert expected == actual + + +def test_workflow_template_path(): + project = "whelk" + region = "octopus" + workflow_template = "oyster" + expected = "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}".format( + project=project, + region=region, + workflow_template=workflow_template, + ) + actual = WorkflowTemplateServiceClient.workflow_template_path( + project, region, workflow_template + ) + assert expected == actual + + +def test_parse_workflow_template_path(): + expected = { + "project": "nudibranch", + "region": "cuttlefish", + "workflow_template": "mussel", + } + path = WorkflowTemplateServiceClient.workflow_template_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowTemplateServiceClient.parse_workflow_template_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = WorkflowTemplateServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = WorkflowTemplateServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowTemplateServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = WorkflowTemplateServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = WorkflowTemplateServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowTemplateServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = WorkflowTemplateServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = WorkflowTemplateServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowTemplateServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format( + project=project, + ) + actual = WorkflowTemplateServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = WorkflowTemplateServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowTemplateServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = WorkflowTemplateServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = WorkflowTemplateServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowTemplateServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.WorkflowTemplateServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.WorkflowTemplateServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = WorkflowTemplateServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/regions/sample2/clusters/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/regions/sample2/clusters/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/regions/sample2/operations"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/regions/sample2/operations"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = WorkflowTemplateServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = WorkflowTemplateServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + WorkflowTemplateServiceClient, + transports.WorkflowTemplateServiceGrpcTransport, + ), + ( + WorkflowTemplateServiceAsyncClient, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-dialogflow-cx/CONTRIBUTING.rst b/packages/google-cloud-dialogflow-cx/CONTRIBUTING.rst index edca94cc39b8..7404d8b0a2cd 100644 --- a/packages/google-cloud-dialogflow-cx/CONTRIBUTING.rst +++ b/packages/google-cloud-dialogflow-cx/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system- -- -k + $ nox -s system-3.11 -- -k .. note:: - System tests are only configured to run under Python. + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/__init__.py index 5954cfbe8fe4..e4e9b452dd7a 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/__init__.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/__init__.py @@ -196,10 +196,17 @@ from google.cloud.dialogflowcx_v3.types.gcs import GcsDestination from google.cloud.dialogflowcx_v3.types.generative_settings import GenerativeSettings from google.cloud.dialogflowcx_v3.types.import_strategy import ImportStrategy +from google.cloud.dialogflowcx_v3.types.inline import InlineDestination, InlineSource from google.cloud.dialogflowcx_v3.types.intent import ( CreateIntentRequest, DeleteIntentRequest, + ExportIntentsMetadata, + ExportIntentsRequest, + ExportIntentsResponse, GetIntentRequest, + ImportIntentsMetadata, + ImportIntentsRequest, + ImportIntentsResponse, Intent, IntentView, ListIntentsRequest, @@ -466,9 +473,17 @@ "GcsDestination", "GenerativeSettings", "ImportStrategy", + "InlineDestination", + "InlineSource", "CreateIntentRequest", "DeleteIntentRequest", + "ExportIntentsMetadata", + "ExportIntentsRequest", + "ExportIntentsResponse", "GetIntentRequest", + "ImportIntentsMetadata", + "ImportIntentsRequest", + "ImportIntentsResponse", "Intent", "ListIntentsRequest", "ListIntentsResponse", diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py index 6972597de6c2..360a0d13ebdd 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.27.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/__init__.py index ba7a88c5d5c9..8896c397d569 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/__init__.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/__init__.py @@ -154,10 +154,17 @@ from .types.gcs import GcsDestination from .types.generative_settings import GenerativeSettings from .types.import_strategy import ImportStrategy +from .types.inline import InlineDestination, InlineSource from .types.intent import ( CreateIntentRequest, DeleteIntentRequest, + ExportIntentsMetadata, + ExportIntentsRequest, + ExportIntentsResponse, GetIntentRequest, + ImportIntentsMetadata, + ImportIntentsRequest, + ImportIntentsResponse, Intent, IntentView, ListIntentsRequest, @@ -376,6 +383,9 @@ "ExportAgentResponse", "ExportFlowRequest", "ExportFlowResponse", + "ExportIntentsMetadata", + "ExportIntentsRequest", + "ExportIntentsResponse", "ExportTestCasesMetadata", "ExportTestCasesRequest", "ExportTestCasesResponse", @@ -410,10 +420,15 @@ "GetWebhookRequest", "ImportFlowRequest", "ImportFlowResponse", + "ImportIntentsMetadata", + "ImportIntentsRequest", + "ImportIntentsResponse", "ImportStrategy", "ImportTestCasesMetadata", "ImportTestCasesRequest", "ImportTestCasesResponse", + "InlineDestination", + "InlineSource", "InputAudioConfig", "Intent", "IntentCoverage", diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_metadata.json b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_metadata.json index 17c042151b36..a6d6e72ea387 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_metadata.json +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_metadata.json @@ -843,11 +843,21 @@ "delete_intent" ] }, + "ExportIntents": { + "methods": [ + "export_intents" + ] + }, "GetIntent": { "methods": [ "get_intent" ] }, + "ImportIntents": { + "methods": [ + "import_intents" + ] + }, "ListIntents": { "methods": [ "list_intents" @@ -873,11 +883,21 @@ "delete_intent" ] }, + "ExportIntents": { + "methods": [ + "export_intents" + ] + }, "GetIntent": { "methods": [ "get_intent" ] }, + "ImportIntents": { + "methods": [ + "import_intents" + ] + }, "ListIntents": { "methods": [ "list_intents" @@ -903,11 +923,21 @@ "delete_intent" ] }, + "ExportIntents": { + "methods": [ + "export_intents" + ] + }, "GetIntent": { "methods": [ "get_intent" ] }, + "ImportIntents": { + "methods": [ + "import_intents" + ] + }, "ListIntents": { "methods": [ "list_intents" diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py index 6972597de6c2..360a0d13ebdd 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.27.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py index a7bddb9405a8..0f539f37fdba 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/async_client.py @@ -52,6 +52,7 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.dialogflowcx_v3.services.flows import pagers +from google.cloud.dialogflowcx_v3.types import advanced_settings from google.cloud.dialogflowcx_v3.types import flow from google.cloud.dialogflowcx_v3.types import flow as gcdc_flow from google.cloud.dialogflowcx_v3.types import page, validation_message diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py index 91eb1e763c0e..72e253b4382a 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py @@ -56,6 +56,7 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.dialogflowcx_v3.services.flows import pagers +from google.cloud.dialogflowcx_v3.types import advanced_settings from google.cloud.dialogflowcx_v3.types import flow from google.cloud.dialogflowcx_v3.types import flow as gcdc_flow from google.cloud.dialogflowcx_v3.types import page, validation_message diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py index b68cb35976f9..c67b7ccfe0f6 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/async_client.py @@ -42,6 +42,8 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -780,6 +782,217 @@ async def sample_delete_intent(): metadata=metadata, ) + async def import_intents( + self, + request: Optional[Union[intent.ImportIntentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports the specified intents into the agent. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ImportIntentsMetadata][google.cloud.dialogflow.cx.v3.ImportIntentsMetadata] + - ``response``: + [ImportIntentsResponse][google.cloud.dialogflow.cx.v3.ImportIntentsResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_import_intents(): + # Create a client + client = dialogflowcx_v3.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ImportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_intents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ImportIntentsRequest, dict]]): + The request object. The request message for + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3.Intents.ImportIntents]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.ImportIntentsResponse` The response message for + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3.Intents.ImportIntents]. + + """ + # Create or coerce a protobuf request object. + request = intent.ImportIntentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_intents, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + intent.ImportIntentsResponse, + metadata_type=intent.ImportIntentsMetadata, + ) + + # Done; return the response. + return response + + async def export_intents( + self, + request: Optional[Union[intent.ExportIntentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports the selected intents. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ExportIntentsMetadata][google.cloud.dialogflow.cx.v3.ExportIntentsMetadata] + - ``response``: + [ExportIntentsResponse][google.cloud.dialogflow.cx.v3.ExportIntentsResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + async def sample_export_intents(): + # Create a client + client = dialogflowcx_v3.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ExportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + intents=['intents_value1', 'intents_value2'], + ) + + # Make the request + operation = client.export_intents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3.types.ExportIntentsRequest, dict]]): + The request object. The request message for + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3.Intents.ExportIntents]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.ExportIntentsResponse` The response message for + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3.Intents.ExportIntents]. + + """ + # Create or coerce a protobuf request object. + request = intent.ExportIntentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_intents, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + intent.ExportIntentsResponse, + metadata_type=intent.ExportIntentsMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py index 138b88b1273e..981e6dbed62d 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py @@ -46,6 +46,8 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -1039,6 +1041,219 @@ def sample_delete_intent(): metadata=metadata, ) + def import_intents( + self, + request: Optional[Union[intent.ImportIntentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports the specified intents into the agent. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ImportIntentsMetadata][google.cloud.dialogflow.cx.v3.ImportIntentsMetadata] + - ``response``: + [ImportIntentsResponse][google.cloud.dialogflow.cx.v3.ImportIntentsResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_import_intents(): + # Create a client + client = dialogflowcx_v3.IntentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ImportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_intents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ImportIntentsRequest, dict]): + The request object. The request message for + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3.Intents.ImportIntents]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.ImportIntentsResponse` The response message for + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3.Intents.ImportIntents]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a intent.ImportIntentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, intent.ImportIntentsRequest): + request = intent.ImportIntentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_intents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + intent.ImportIntentsResponse, + metadata_type=intent.ImportIntentsMetadata, + ) + + # Done; return the response. + return response + + def export_intents( + self, + request: Optional[Union[intent.ExportIntentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports the selected intents. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ExportIntentsMetadata][google.cloud.dialogflow.cx.v3.ExportIntentsMetadata] + - ``response``: + [ExportIntentsResponse][google.cloud.dialogflow.cx.v3.ExportIntentsResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3 + + def sample_export_intents(): + # Create a client + client = dialogflowcx_v3.IntentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ExportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + intents=['intents_value1', 'intents_value2'], + ) + + # Make the request + operation = client.export_intents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3.types.ExportIntentsRequest, dict]): + The request object. The request message for + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3.Intents.ExportIntents]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3.types.ExportIntentsResponse` The response message for + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3.Intents.ExportIntents]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a intent.ExportIntentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, intent.ExportIntentsRequest): + request = intent.ExportIntentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_intents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + intent.ExportIntentsResponse, + metadata_type=intent.ExportIntentsMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "IntentsClient": return self diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/base.py index 964064444612..14c97de933bb 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/base.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/base.py @@ -18,7 +18,7 @@ import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore @@ -154,6 +154,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.import_intents: gapic_v1.method.wrap_method( + self.import_intents, + default_timeout=None, + client_info=client_info, + ), + self.export_intents: gapic_v1.method.wrap_method( + self.export_intents, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -165,6 +175,11 @@ def close(self): """ raise NotImplementedError() + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + @property def list_intents( self, @@ -208,6 +223,24 @@ def delete_intent( ]: raise NotImplementedError() + @property + def import_intents( + self, + ) -> Callable[ + [intent.ImportIntentsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def export_intents( + self, + ) -> Callable[ + [intent.ExportIntentsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/grpc.py index 4ff2b2d2ecd9..7ab9c5d2c6e7 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/grpc.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/grpc.py @@ -16,7 +16,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, grpc_helpers +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -115,6 +115,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -233,6 +234,20 @@ def grpc_channel(self) -> grpc.Channel: """Return the channel designed to connect to this service.""" return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + @property def list_intents( self, @@ -372,6 +387,78 @@ def delete_intent(self) -> Callable[[intent.DeleteIntentRequest], empty_pb2.Empt ) return self._stubs["delete_intent"] + @property + def import_intents( + self, + ) -> Callable[[intent.ImportIntentsRequest], operations_pb2.Operation]: + r"""Return a callable for the import intents method over gRPC. + + Imports the specified intents into the agent. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ImportIntentsMetadata][google.cloud.dialogflow.cx.v3.ImportIntentsMetadata] + - ``response``: + [ImportIntentsResponse][google.cloud.dialogflow.cx.v3.ImportIntentsResponse] + + Returns: + Callable[[~.ImportIntentsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_intents" not in self._stubs: + self._stubs["import_intents"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Intents/ImportIntents", + request_serializer=intent.ImportIntentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_intents"] + + @property + def export_intents( + self, + ) -> Callable[[intent.ExportIntentsRequest], operations_pb2.Operation]: + r"""Return a callable for the export intents method over gRPC. + + Exports the selected intents. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ExportIntentsMetadata][google.cloud.dialogflow.cx.v3.ExportIntentsMetadata] + - ``response``: + [ExportIntentsResponse][google.cloud.dialogflow.cx.v3.ExportIntentsResponse] + + Returns: + Callable[[~.ExportIntentsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_intents" not in self._stubs: + self._stubs["export_intents"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Intents/ExportIntents", + request_serializer=intent.ExportIntentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_intents"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/grpc_asyncio.py index 54f3ce27c1c6..92231fe4aebc 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/grpc_asyncio.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/grpc_asyncio.py @@ -16,7 +16,7 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -161,6 +161,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -236,6 +237,22 @@ def grpc_channel(self) -> aio.Channel: # Return the channel from cache. return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + @property def list_intents( self, @@ -379,6 +396,78 @@ def delete_intent( ) return self._stubs["delete_intent"] + @property + def import_intents( + self, + ) -> Callable[[intent.ImportIntentsRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the import intents method over gRPC. + + Imports the specified intents into the agent. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ImportIntentsMetadata][google.cloud.dialogflow.cx.v3.ImportIntentsMetadata] + - ``response``: + [ImportIntentsResponse][google.cloud.dialogflow.cx.v3.ImportIntentsResponse] + + Returns: + Callable[[~.ImportIntentsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_intents" not in self._stubs: + self._stubs["import_intents"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Intents/ImportIntents", + request_serializer=intent.ImportIntentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_intents"] + + @property + def export_intents( + self, + ) -> Callable[[intent.ExportIntentsRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the export intents method over gRPC. + + Exports the selected intents. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ExportIntentsMetadata][google.cloud.dialogflow.cx.v3.ExportIntentsMetadata] + - ``response``: + [ExportIntentsResponse][google.cloud.dialogflow.cx.v3.ExportIntentsResponse] + + Returns: + Callable[[~.ExportIntentsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_intents" not in self._stubs: + self._stubs["export_intents"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3.Intents/ExportIntents", + request_serializer=intent.ExportIntentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_intents"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/rest.py index f7a8306e7811..aebf7ad075ce 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/rest.py @@ -20,7 +20,13 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore @@ -80,6 +86,14 @@ def pre_delete_intent(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_export_intents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_intents(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_intent(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -88,6 +102,14 @@ def post_get_intent(self, response): logging.log(f"Received response: {response}") return response + def pre_import_intents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_intents(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_intents(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -141,6 +163,27 @@ def pre_delete_intent( """ return request, metadata + def pre_export_intents( + self, request: intent.ExportIntentsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[intent.ExportIntentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_intents + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intents server. + """ + return request, metadata + + def post_export_intents( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_intents + + Override in a subclass to manipulate the response + after it is returned by the Intents server but before + it is returned to user code. + """ + return response + def pre_get_intent( self, request: intent.GetIntentRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[intent.GetIntentRequest, Sequence[Tuple[str, str]]]: @@ -160,6 +203,27 @@ def post_get_intent(self, response: intent.Intent) -> intent.Intent: """ return response + def pre_import_intents( + self, request: intent.ImportIntentsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[intent.ImportIntentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_intents + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intents server. + """ + return request, metadata + + def post_import_intents( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_intents + + Override in a subclass to manipulate the response + after it is returned by the Intents server but before + it is returned to user code. + """ + return response + def pre_list_intents( self, request: intent.ListIntentsRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[intent.ListIntentsRequest, Sequence[Tuple[str, str]]]: @@ -408,11 +472,70 @@ def __init__( self._session = AuthorizedSession( self._credentials, default_host=self.DEFAULT_HOST ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) self._interceptor = interceptor or IntentsRestInterceptor() self._prep_wrapped_messages(client_info) + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v3/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v3/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v3/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v3", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + class _CreateIntent(IntentsRestStub): def __hash__(self): return hash("CreateIntent") @@ -591,6 +714,103 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + class _ExportIntents(IntentsRestStub): + def __hash__(self): + return hash("ExportIntents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: intent.ExportIntentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export intents method over HTTP. + + Args: + request (~.intent.ExportIntentsRequest): + The request object. The request message for + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3.Intents.ExportIntents]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/intents:export", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_intents(request, metadata) + pb_request = intent.ExportIntentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_intents(resp) + return resp + class _GetIntent(IntentsRestStub): def __hash__(self): return hash("GetIntent") @@ -685,6 +905,103 @@ def __call__( resp = self._interceptor.post_get_intent(resp) return resp + class _ImportIntents(IntentsRestStub): + def __hash__(self): + return hash("ImportIntents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: intent.ImportIntentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import intents method over HTTP. + + Args: + request (~.intent.ImportIntentsRequest): + The request object. The request message for + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3.Intents.ImportIntents]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3/{parent=projects/*/locations/*/agents/*}/intents:import", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_intents(request, metadata) + pb_request = intent.ImportIntentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_intents(resp) + return resp + class _ListIntents(IntentsRestStub): def __hash__(self): return hash("ListIntents") @@ -891,12 +1208,28 @@ def delete_intent(self) -> Callable[[intent.DeleteIntentRequest], empty_pb2.Empt # In C++ this would require a dynamic_cast return self._DeleteIntent(self._session, self._host, self._interceptor) # type: ignore + @property + def export_intents( + self, + ) -> Callable[[intent.ExportIntentsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportIntents(self._session, self._host, self._interceptor) # type: ignore + @property def get_intent(self) -> Callable[[intent.GetIntentRequest], intent.Intent]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetIntent(self._session, self._host, self._interceptor) # type: ignore + @property + def import_intents( + self, + ) -> Callable[[intent.ImportIntentsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportIntents(self._session, self._host, self._interceptor) # type: ignore + @property def list_intents( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py index a78e97d96db0..65209968eff5 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/async_client.py @@ -47,7 +47,7 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.cloud.dialogflowcx_v3.services.pages import pagers -from google.cloud.dialogflowcx_v3.types import fulfillment +from google.cloud.dialogflowcx_v3.types import advanced_settings, fulfillment from google.cloud.dialogflowcx_v3.types import page from google.cloud.dialogflowcx_v3.types import page as gcdc_page diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py index 0b21f84f1d24..4000f1bc8edb 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py @@ -51,7 +51,7 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.cloud.dialogflowcx_v3.services.pages import pagers -from google.cloud.dialogflowcx_v3.types import fulfillment +from google.cloud.dialogflowcx_v3.types import advanced_settings, fulfillment from google.cloud.dialogflowcx_v3.types import page from google.cloud.dialogflowcx_v3.types import page as gcdc_page diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/__init__.py index b72780f5f9f7..e0a0c814015a 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/__init__.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/__init__.py @@ -123,10 +123,17 @@ from .fulfillment import Fulfillment from .gcs import GcsDestination from .generative_settings import GenerativeSettings +from .inline import InlineDestination, InlineSource from .intent import ( CreateIntentRequest, DeleteIntentRequest, + ExportIntentsMetadata, + ExportIntentsRequest, + ExportIntentsResponse, GetIntentRequest, + ImportIntentsMetadata, + ImportIntentsRequest, + ImportIntentsResponse, Intent, IntentView, ListIntentsRequest, @@ -358,9 +365,17 @@ "GcsDestination", "GenerativeSettings", "ImportStrategy", + "InlineDestination", + "InlineSource", "CreateIntentRequest", "DeleteIntentRequest", + "ExportIntentsMetadata", + "ExportIntentsRequest", + "ExportIntentsResponse", "GetIntentRequest", + "ImportIntentsMetadata", + "ImportIntentsRequest", + "ImportIntentsResponse", "Intent", "ListIntentsRequest", "ListIntentsResponse", diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/advanced_settings.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/advanced_settings.py index 83e5ca206895..0e342928663c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/advanced_settings.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/advanced_settings.py @@ -52,6 +52,14 @@ class AdvancedSettings(proto.Message): - Agent level - Flow level + dtmf_settings (google.cloud.dialogflowcx_v3.types.AdvancedSettings.DtmfSettings): + Settings for DTMF. + Exposed at the following levels: + + - Agent level + - Flow level + - Page level + - Parameter level. logging_settings (google.cloud.dialogflowcx_v3.types.AdvancedSettings.LoggingSettings): Settings for logging. Settings for Dialogflow History, Contact Center @@ -61,6 +69,39 @@ class AdvancedSettings(proto.Message): - Agent level. """ + class DtmfSettings(proto.Message): + r"""Define behaviors for DTMF (dual tone multi frequency). + + Attributes: + enabled (bool): + If true, incoming audio is processed for DTMF + (dual tone multi frequency) events. For example, + if the caller presses a button on their + telephone keypad and DTMF processing is enabled, + Dialogflow will detect the event (e.g. a "3" was + pressed) in the incoming audio and pass the + event to the bot to drive business logic (e.g. + when 3 is pressed, return the account balance). + max_digits (int): + Max length of DTMF digits. + finish_digit (str): + The digit that terminates a DTMF digit + sequence. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + max_digits: int = proto.Field( + proto.INT32, + number=2, + ) + finish_digit: str = proto.Field( + proto.STRING, + number=3, + ) + class LoggingSettings(proto.Message): r"""Define behaviors on logging. @@ -87,6 +128,11 @@ class LoggingSettings(proto.Message): number=2, message=gcs.GcsDestination, ) + dtmf_settings: DtmfSettings = proto.Field( + proto.MESSAGE, + number=5, + message=DtmfSettings, + ) logging_settings: LoggingSettings = proto.Field( proto.MESSAGE, number=6, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/flow.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/flow.py index 88755b20e9ba..a5efea2ae1c3 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/flow.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/flow.py @@ -22,6 +22,9 @@ import proto # type: ignore from google.cloud.dialogflowcx_v3.types import import_strategy, page, validation_message +from google.cloud.dialogflowcx_v3.types import ( + advanced_settings as gcdc_advanced_settings, +) __protobuf__ = proto.module( package="google.cloud.dialogflow.cx.v3", @@ -198,6 +201,11 @@ class Flow(proto.Message): for agent-level groups. nlu_settings (google.cloud.dialogflowcx_v3.types.NluSettings): NLU related settings of the flow. + advanced_settings (google.cloud.dialogflowcx_v3.types.AdvancedSettings): + Hierarchical advanced settings for this flow. + The settings exposed at the lower level + overrides the settings exposed at the higher + level. knowledge_connector_settings (google.cloud.dialogflowcx_v3.types.KnowledgeConnectorSettings): Optional. Knowledge connector configuration. """ @@ -233,6 +241,11 @@ class Flow(proto.Message): number=11, message="NluSettings", ) + advanced_settings: gcdc_advanced_settings.AdvancedSettings = proto.Field( + proto.MESSAGE, + number=14, + message=gcdc_advanced_settings.AdvancedSettings, + ) knowledge_connector_settings: page.KnowledgeConnectorSettings = proto.Field( proto.MESSAGE, number=18, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/fulfillment.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/fulfillment.py index e477b5abc3f5..44b36a87fbee 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/fulfillment.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/fulfillment.py @@ -20,6 +20,9 @@ from google.protobuf import struct_pb2 # type: ignore import proto # type: ignore +from google.cloud.dialogflowcx_v3.types import ( + advanced_settings as gcdc_advanced_settings, +) from google.cloud.dialogflowcx_v3.types import response_message __protobuf__ = proto.module( @@ -85,6 +88,18 @@ class Fulfillment(proto.Message): webhook. conditional_cases (MutableSequence[google.cloud.dialogflowcx_v3.types.Fulfillment.ConditionalCases]): Conditional cases for this fulfillment. + advanced_settings (google.cloud.dialogflowcx_v3.types.AdvancedSettings): + Hierarchical advanced settings for this + fulfillment. The settings exposed at the lower + level overrides the settings exposed at the + higher level. + enable_generative_fallback (bool): + If the flag is true, the agent will utilize LLM to generate + a text response. If LLM generation fails, the defined + [responses][google.cloud.dialogflow.cx.v3.Fulfillment.messages] + in the fulfillment will be respected. This flag is only + useful for fulfillments associated with no-match event + handlers. """ class SetParameterAction(proto.Message): @@ -218,6 +233,15 @@ class CaseContent(proto.Message): number=5, message=ConditionalCases, ) + advanced_settings: gcdc_advanced_settings.AdvancedSettings = proto.Field( + proto.MESSAGE, + number=7, + message=gcdc_advanced_settings.AdvancedSettings, + ) + enable_generative_fallback: bool = proto.Field( + proto.BOOL, + number=12, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/inline.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/inline.py new file mode 100644 index 000000000000..ec169fec3a6f --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/inline.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3", + manifest={ + "InlineDestination", + "InlineSource", + }, +) + + +class InlineDestination(proto.Message): + r"""Inline destination for a Dialogflow operation that writes or exports + objects (e.g. [intents][google.cloud.dialogflow.cx.v3.Intent]) + outside of Dialogflow. + + Attributes: + content (bytes): + Output only. The uncompressed byte content + for the objects. Only populated in responses. + """ + + content: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +class InlineSource(proto.Message): + r"""Inline source for a Dialogflow operation that reads or imports + objects (e.g. [intents][google.cloud.dialogflow.cx.v3.Intent]) into + Dialogflow. + + Attributes: + content (bytes): + The uncompressed byte content for the + objects. + """ + + content: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/intent.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/intent.py index 3cae590ad252..0b4159716a98 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/intent.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/intent.py @@ -20,6 +20,8 @@ from google.protobuf import field_mask_pb2 # type: ignore import proto # type: ignore +from google.cloud.dialogflowcx_v3.types import inline + __protobuf__ = proto.module( package="google.cloud.dialogflow.cx.v3", manifest={ @@ -31,6 +33,12 @@ "CreateIntentRequest", "UpdateIntentRequest", "DeleteIntentRequest", + "ImportIntentsRequest", + "ImportIntentsResponse", + "ImportIntentsMetadata", + "ExportIntentsRequest", + "ExportIntentsResponse", + "ExportIntentsMetadata", }, ) @@ -484,4 +492,289 @@ class DeleteIntentRequest(proto.Message): ) +class ImportIntentsRequest(proto.Message): + r"""The request message for + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3.Intents.ImportIntents]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The agent to import the intents into. Format: + ``projects//locations//agents/``. + intents_uri (str): + The `Google Cloud + Storage `__ URI to + import intents from. The format of this URI must be + ``gs:///``. + + Dialogflow performs a read operation for the Cloud Storage + object on the caller's behalf, so your request + authentication must have read permissions for the object. + For more information, see `Dialogflow access + control `__. + + This field is a member of `oneof`_ ``intents``. + intents_content (google.cloud.dialogflowcx_v3.types.InlineSource): + Uncompressed byte content of intents. + + This field is a member of `oneof`_ ``intents``. + merge_option (google.cloud.dialogflowcx_v3.types.ImportIntentsRequest.MergeOption): + Merge option for importing intents. If not specified, + ``REJECT`` is assumed. + """ + + class MergeOption(proto.Enum): + r"""Merge option when display name conflicts exist during import. + + Values: + MERGE_OPTION_UNSPECIFIED (0): + Unspecified. Should not be used. + REJECT (1): + DEPRECATED: Please use + [REPORT_CONFLICT][ImportIntentsRequest.REPORT_CONFLICT] + instead. Fail the request if there are intents whose display + names conflict with the display names of intents in the + agent. + REPLACE (2): + Replace the original intent in the agent with + the new intent when display name conflicts + exist. + MERGE (3): + Merge the original intent with the new intent + when display name conflicts exist. + RENAME (4): + Create new intents with new display names to + differentiate them from the existing intents + when display name conflicts exist. + REPORT_CONFLICT (5): + Report conflict information if display names + conflict is detected. Otherwise, import intents. + KEEP (6): + Keep the original intent and discard the + conflicting new intent when display name + conflicts exist. + """ + MERGE_OPTION_UNSPECIFIED = 0 + REJECT = 1 + REPLACE = 2 + MERGE = 3 + RENAME = 4 + REPORT_CONFLICT = 5 + KEEP = 6 + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + intents_uri: str = proto.Field( + proto.STRING, + number=2, + oneof="intents", + ) + intents_content: inline.InlineSource = proto.Field( + proto.MESSAGE, + number=3, + oneof="intents", + message=inline.InlineSource, + ) + merge_option: MergeOption = proto.Field( + proto.ENUM, + number=4, + enum=MergeOption, + ) + + +class ImportIntentsResponse(proto.Message): + r"""The response message for + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3.Intents.ImportIntents]. + + Attributes: + intents (MutableSequence[str]): + The unique identifier of the imported intents. Format: + ``projects//locations//agents//intents/``. + conflicting_resources (google.cloud.dialogflowcx_v3.types.ImportIntentsResponse.ConflictingResources): + Info which resources have conflicts when + [REPORT_CONFLICT][ImportIntentsResponse.REPORT_CONFLICT] + merge_option is set in ImportIntentsRequest. + """ + + class ConflictingResources(proto.Message): + r"""Conflicting resources detected during the import process. Only + filled when [REPORT_CONFLICT][ImportIntentsResponse.REPORT_CONFLICT] + is set in the request and there are conflicts in the display names. + + Attributes: + intent_display_names (MutableSequence[str]): + Display names of conflicting intents. + entity_display_names (MutableSequence[str]): + Display names of conflicting entities. + """ + + intent_display_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + entity_display_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + intents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + conflicting_resources: ConflictingResources = proto.Field( + proto.MESSAGE, + number=2, + message=ConflictingResources, + ) + + +class ImportIntentsMetadata(proto.Message): + r"""Metadata returned for the + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3.Intents.ImportIntents] + long running operation. + + """ + + +class ExportIntentsRequest(proto.Message): + r"""The request message for + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3.Intents.ExportIntents]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The name of the parent agent to export intents. + Format: + ``projects//locations//agents/``. + intents (MutableSequence[str]): + Required. The name of the intents to export. Format: + ``projects//locations//agents//intents/``. + intents_uri (str): + Optional. The `Google Cloud + Storage `__ URI to + export the intents to. The format of this URI must be + ``gs:///``. + + Dialogflow performs a write operation for the Cloud Storage + object on the caller's behalf, so your request + authentication must have write permissions for the object. + For more information, see `Dialogflow access + control `__. + + This field is a member of `oneof`_ ``destination``. + intents_content_inline (bool): + Optional. The option to return the serialized + intents inline. + + This field is a member of `oneof`_ ``destination``. + data_format (google.cloud.dialogflowcx_v3.types.ExportIntentsRequest.DataFormat): + Optional. The data format of the exported intents. If not + specified, ``BLOB`` is assumed. + """ + + class DataFormat(proto.Enum): + r"""Data format of the exported intents. + + Values: + DATA_FORMAT_UNSPECIFIED (0): + Unspecified format. Treated as ``BLOB``. + BLOB (1): + Intents will be exported as raw bytes. + JSON (2): + Intents will be exported in JSON format. + CSV (3): + Intents will be exported in CSV format. + """ + DATA_FORMAT_UNSPECIFIED = 0 + BLOB = 1 + JSON = 2 + CSV = 3 + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + intents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + intents_uri: str = proto.Field( + proto.STRING, + number=3, + oneof="destination", + ) + intents_content_inline: bool = proto.Field( + proto.BOOL, + number=4, + oneof="destination", + ) + data_format: DataFormat = proto.Field( + proto.ENUM, + number=5, + enum=DataFormat, + ) + + +class ExportIntentsResponse(proto.Message): + r"""The response message for + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3.Intents.ExportIntents]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + intents_uri (str): + The URI to a file containing the exported intents. This + field is populated only if ``intents_uri`` is specified in + [ExportIntentsRequest][google.cloud.dialogflow.cx.v3.ExportIntentsRequest]. + + This field is a member of `oneof`_ ``intents``. + intents_content (google.cloud.dialogflowcx_v3.types.InlineDestination): + Uncompressed byte content for intents. This field is + populated only if ``intents_content_inline`` is set to true + in + [ExportIntentsRequest][google.cloud.dialogflow.cx.v3.ExportIntentsRequest]. + + This field is a member of `oneof`_ ``intents``. + """ + + intents_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="intents", + ) + intents_content: inline.InlineDestination = proto.Field( + proto.MESSAGE, + number=2, + oneof="intents", + message=inline.InlineDestination, + ) + + +class ExportIntentsMetadata(proto.Message): + r"""Metadata returned for the + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3.Intents.ExportIntents] + long running operation. + + """ + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/page.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/page.py index 323551e02028..0ba9da1be3bc 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/page.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/page.py @@ -22,6 +22,9 @@ import proto # type: ignore from google.cloud.dialogflowcx_v3.types import data_store_connection, fulfillment +from google.cloud.dialogflowcx_v3.types import ( + advanced_settings as gcdc_advanced_settings, +) __protobuf__ = proto.module( package="google.cloud.dialogflow.cx.v3", @@ -127,6 +130,11 @@ class Page(proto.Message): Handlers associated with the page to handle events such as webhook errors, no match or no input. + advanced_settings (google.cloud.dialogflowcx_v3.types.AdvancedSettings): + Hierarchical advanced settings for this page. + The settings exposed at the lower level + overrides the settings exposed at the higher + level. knowledge_connector_settings (google.cloud.dialogflowcx_v3.types.KnowledgeConnectorSettings): Optional. Knowledge connector configuration. """ @@ -163,6 +171,11 @@ class Page(proto.Message): number=10, message="EventHandler", ) + advanced_settings: gcdc_advanced_settings.AdvancedSettings = proto.Field( + proto.MESSAGE, + number=13, + message=gcdc_advanced_settings.AdvancedSettings, + ) knowledge_connector_settings: "KnowledgeConnectorSettings" = proto.Field( proto.MESSAGE, number=18, @@ -223,6 +236,11 @@ class Parameter(proto.Message): parameter level redaction or [entity type level redaction][google.cloud.dialogflow.cx.v3.EntityType.redact] is enabled. + advanced_settings (google.cloud.dialogflowcx_v3.types.AdvancedSettings): + Hierarchical advanced settings for this + parameter. The settings exposed at the lower + level overrides the settings exposed at the + higher level. """ class FillBehavior(proto.Message): @@ -317,6 +335,11 @@ class FillBehavior(proto.Message): proto.BOOL, number=11, ) + advanced_settings: gcdc_advanced_settings.AdvancedSettings = proto.Field( + proto.MESSAGE, + number=12, + message=gcdc_advanced_settings.AdvancedSettings, + ) parameters: MutableSequence[Parameter] = proto.RepeatedField( proto.MESSAGE, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/session.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/session.py index 37545db5a8d8..252ff7f13679 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/session.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/types/session.py @@ -28,6 +28,9 @@ response_message, session_entity_type, ) +from google.cloud.dialogflowcx_v3.types import ( + advanced_settings as gcdc_advanced_settings, +) from google.cloud.dialogflowcx_v3.types import audio_config from google.cloud.dialogflowcx_v3.types import intent as gcdc_intent @@ -1071,6 +1074,16 @@ class QueryResult(proto.Message): [``analyze_query_text_sentiment``] [google.cloud.dialogflow.cx.v3.QueryParameters.analyze_query_text_sentiment], specified in the request. + advanced_settings (google.cloud.dialogflowcx_v3.types.AdvancedSettings): + Returns the current advanced settings + including IVR settings. Even though the + operations configured by these settings are + performed by Dialogflow, the client may need to + perform special logic at the moment. For + example, if Dialogflow exports audio to Google + Cloud Storage, then the client may need to wait + for the resulting object to appear in the bucket + before proceeding. """ text: str = proto.Field( @@ -1154,6 +1167,11 @@ class QueryResult(proto.Message): number=17, message="SentimentAnalysisResult", ) + advanced_settings: gcdc_advanced_settings.AdvancedSettings = proto.Field( + proto.MESSAGE, + number=21, + message=gcdc_advanced_settings.AdvancedSettings, + ) class TextInput(proto.Message): diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/__init__.py index 772ad9d4aeda..248882ae6c8a 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/__init__.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/__init__.py @@ -154,10 +154,17 @@ from .types.gcs import GcsDestination from .types.generative_settings import GenerativeSettings from .types.import_strategy import ImportStrategy +from .types.inline import InlineDestination, InlineSource from .types.intent import ( CreateIntentRequest, DeleteIntentRequest, + ExportIntentsMetadata, + ExportIntentsRequest, + ExportIntentsResponse, GetIntentRequest, + ImportIntentsMetadata, + ImportIntentsRequest, + ImportIntentsResponse, Intent, IntentView, ListIntentsRequest, @@ -376,6 +383,9 @@ "ExportAgentResponse", "ExportFlowRequest", "ExportFlowResponse", + "ExportIntentsMetadata", + "ExportIntentsRequest", + "ExportIntentsResponse", "ExportTestCasesMetadata", "ExportTestCasesRequest", "ExportTestCasesResponse", @@ -410,10 +420,15 @@ "GetWebhookRequest", "ImportFlowRequest", "ImportFlowResponse", + "ImportIntentsMetadata", + "ImportIntentsRequest", + "ImportIntentsResponse", "ImportStrategy", "ImportTestCasesMetadata", "ImportTestCasesRequest", "ImportTestCasesResponse", + "InlineDestination", + "InlineSource", "InputAudioConfig", "Intent", "IntentCoverage", diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_metadata.json b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_metadata.json index 9c839facf225..aed58c497bdb 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_metadata.json +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_metadata.json @@ -843,11 +843,21 @@ "delete_intent" ] }, + "ExportIntents": { + "methods": [ + "export_intents" + ] + }, "GetIntent": { "methods": [ "get_intent" ] }, + "ImportIntents": { + "methods": [ + "import_intents" + ] + }, "ListIntents": { "methods": [ "list_intents" @@ -873,11 +883,21 @@ "delete_intent" ] }, + "ExportIntents": { + "methods": [ + "export_intents" + ] + }, "GetIntent": { "methods": [ "get_intent" ] }, + "ImportIntents": { + "methods": [ + "import_intents" + ] + }, "ListIntents": { "methods": [ "list_intents" @@ -903,11 +923,21 @@ "delete_intent" ] }, + "ExportIntents": { + "methods": [ + "export_intents" + ] + }, "GetIntent": { "methods": [ "get_intent" ] }, + "ImportIntents": { + "methods": [ + "import_intents" + ] + }, "ListIntents": { "methods": [ "list_intents" diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py index 6972597de6c2..360a0d13ebdd 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.27.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/async_client.py index bbece6914630..fb1a66abd5ac 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/async_client.py @@ -52,6 +52,7 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.dialogflowcx_v3beta1.services.flows import pagers +from google.cloud.dialogflowcx_v3beta1.types import advanced_settings from google.cloud.dialogflowcx_v3beta1.types import flow from google.cloud.dialogflowcx_v3beta1.types import flow as gcdc_flow from google.cloud.dialogflowcx_v3beta1.types import page, validation_message diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/client.py index 348c66c13077..99b6def70a66 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/client.py @@ -56,6 +56,7 @@ from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.dialogflowcx_v3beta1.services.flows import pagers +from google.cloud.dialogflowcx_v3beta1.types import advanced_settings from google.cloud.dialogflowcx_v3beta1.types import flow from google.cloud.dialogflowcx_v3beta1.types import flow as gcdc_flow from google.cloud.dialogflowcx_v3beta1.types import page, validation_message diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/async_client.py index dbc38d9b5217..563d1da4ada7 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/async_client.py @@ -42,6 +42,8 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -780,6 +782,217 @@ async def sample_delete_intent(): metadata=metadata, ) + async def import_intents( + self, + request: Optional[Union[intent.ImportIntentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports the specified intents into the agent. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ImportIntentsMetadata][google.cloud.dialogflow.cx.v3beta1.ImportIntentsMetadata] + - ``response``: + [ImportIntentsResponse][google.cloud.dialogflow.cx.v3beta1.ImportIntentsResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3beta1 + + async def sample_import_intents(): + # Create a client + client = dialogflowcx_v3beta1.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3beta1.ImportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_intents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3beta1.types.ImportIntentsRequest, dict]]): + The request object. The request message for + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ImportIntents]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3beta1.types.ImportIntentsResponse` The response message for + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ImportIntents]. + + """ + # Create or coerce a protobuf request object. + request = intent.ImportIntentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_intents, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + intent.ImportIntentsResponse, + metadata_type=intent.ImportIntentsMetadata, + ) + + # Done; return the response. + return response + + async def export_intents( + self, + request: Optional[Union[intent.ExportIntentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports the selected intents. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ExportIntentsMetadata][google.cloud.dialogflow.cx.v3beta1.ExportIntentsMetadata] + - ``response``: + [ExportIntentsResponse][google.cloud.dialogflow.cx.v3beta1.ExportIntentsResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3beta1 + + async def sample_export_intents(): + # Create a client + client = dialogflowcx_v3beta1.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3beta1.ExportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + intents=['intents_value1', 'intents_value2'], + ) + + # Make the request + operation = client.export_intents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflowcx_v3beta1.types.ExportIntentsRequest, dict]]): + The request object. The request message for + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ExportIntents]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3beta1.types.ExportIntentsResponse` The response message for + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ExportIntents]. + + """ + # Create or coerce a protobuf request object. + request = intent.ExportIntentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_intents, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + intent.ExportIntentsResponse, + metadata_type=intent.ExportIntentsMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/client.py index ea9e503b9e15..e790305b3e36 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/client.py @@ -46,6 +46,8 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -1039,6 +1041,219 @@ def sample_delete_intent(): metadata=metadata, ) + def import_intents( + self, + request: Optional[Union[intent.ImportIntentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports the specified intents into the agent. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ImportIntentsMetadata][google.cloud.dialogflow.cx.v3beta1.ImportIntentsMetadata] + - ``response``: + [ImportIntentsResponse][google.cloud.dialogflow.cx.v3beta1.ImportIntentsResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3beta1 + + def sample_import_intents(): + # Create a client + client = dialogflowcx_v3beta1.IntentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3beta1.ImportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_intents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3beta1.types.ImportIntentsRequest, dict]): + The request object. The request message for + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ImportIntents]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3beta1.types.ImportIntentsResponse` The response message for + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ImportIntents]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a intent.ImportIntentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, intent.ImportIntentsRequest): + request = intent.ImportIntentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_intents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + intent.ImportIntentsResponse, + metadata_type=intent.ImportIntentsMetadata, + ) + + # Done; return the response. + return response + + def export_intents( + self, + request: Optional[Union[intent.ExportIntentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports the selected intents. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ExportIntentsMetadata][google.cloud.dialogflow.cx.v3beta1.ExportIntentsMetadata] + - ``response``: + [ExportIntentsResponse][google.cloud.dialogflow.cx.v3beta1.ExportIntentsResponse] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflowcx_v3beta1 + + def sample_export_intents(): + # Create a client + client = dialogflowcx_v3beta1.IntentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3beta1.ExportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + intents=['intents_value1', 'intents_value2'], + ) + + # Make the request + operation = client.export_intents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflowcx_v3beta1.types.ExportIntentsRequest, dict]): + The request object. The request message for + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ExportIntents]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dialogflowcx_v3beta1.types.ExportIntentsResponse` The response message for + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ExportIntents]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a intent.ExportIntentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, intent.ExportIntentsRequest): + request = intent.ExportIntentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_intents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + intent.ExportIntentsResponse, + metadata_type=intent.ExportIntentsMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "IntentsClient": return self diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/base.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/base.py index e88dbc0a0148..400ba3f5f4d4 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/base.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/base.py @@ -18,7 +18,7 @@ import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore @@ -154,6 +154,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.import_intents: gapic_v1.method.wrap_method( + self.import_intents, + default_timeout=None, + client_info=client_info, + ), + self.export_intents: gapic_v1.method.wrap_method( + self.export_intents, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -165,6 +175,11 @@ def close(self): """ raise NotImplementedError() + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + @property def list_intents( self, @@ -208,6 +223,24 @@ def delete_intent( ]: raise NotImplementedError() + @property + def import_intents( + self, + ) -> Callable[ + [intent.ImportIntentsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def export_intents( + self, + ) -> Callable[ + [intent.ExportIntentsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/grpc.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/grpc.py index 39cdbd690906..f355df54da12 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/grpc.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/grpc.py @@ -16,7 +16,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, grpc_helpers +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -115,6 +115,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -233,6 +234,20 @@ def grpc_channel(self) -> grpc.Channel: """Return the channel designed to connect to this service.""" return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + @property def list_intents( self, @@ -372,6 +387,78 @@ def delete_intent(self) -> Callable[[intent.DeleteIntentRequest], empty_pb2.Empt ) return self._stubs["delete_intent"] + @property + def import_intents( + self, + ) -> Callable[[intent.ImportIntentsRequest], operations_pb2.Operation]: + r"""Return a callable for the import intents method over gRPC. + + Imports the specified intents into the agent. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ImportIntentsMetadata][google.cloud.dialogflow.cx.v3beta1.ImportIntentsMetadata] + - ``response``: + [ImportIntentsResponse][google.cloud.dialogflow.cx.v3beta1.ImportIntentsResponse] + + Returns: + Callable[[~.ImportIntentsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_intents" not in self._stubs: + self._stubs["import_intents"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3beta1.Intents/ImportIntents", + request_serializer=intent.ImportIntentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_intents"] + + @property + def export_intents( + self, + ) -> Callable[[intent.ExportIntentsRequest], operations_pb2.Operation]: + r"""Return a callable for the export intents method over gRPC. + + Exports the selected intents. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ExportIntentsMetadata][google.cloud.dialogflow.cx.v3beta1.ExportIntentsMetadata] + - ``response``: + [ExportIntentsResponse][google.cloud.dialogflow.cx.v3beta1.ExportIntentsResponse] + + Returns: + Callable[[~.ExportIntentsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_intents" not in self._stubs: + self._stubs["export_intents"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3beta1.Intents/ExportIntents", + request_serializer=intent.ExportIntentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_intents"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/grpc_asyncio.py index 5da41716dfcb..7f76d42b0ca1 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/grpc_asyncio.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/grpc_asyncio.py @@ -16,7 +16,7 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -161,6 +161,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -236,6 +237,22 @@ def grpc_channel(self) -> aio.Channel: # Return the channel from cache. return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + @property def list_intents( self, @@ -379,6 +396,78 @@ def delete_intent( ) return self._stubs["delete_intent"] + @property + def import_intents( + self, + ) -> Callable[[intent.ImportIntentsRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the import intents method over gRPC. + + Imports the specified intents into the agent. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ImportIntentsMetadata][google.cloud.dialogflow.cx.v3beta1.ImportIntentsMetadata] + - ``response``: + [ImportIntentsResponse][google.cloud.dialogflow.cx.v3beta1.ImportIntentsResponse] + + Returns: + Callable[[~.ImportIntentsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_intents" not in self._stubs: + self._stubs["import_intents"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3beta1.Intents/ImportIntents", + request_serializer=intent.ImportIntentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_intents"] + + @property + def export_intents( + self, + ) -> Callable[[intent.ExportIntentsRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the export intents method over gRPC. + + Exports the selected intents. + + This method is a `long-running + operation `__. + The returned ``Operation`` type has the following + method-specific fields: + + - ``metadata``: + [ExportIntentsMetadata][google.cloud.dialogflow.cx.v3beta1.ExportIntentsMetadata] + - ``response``: + [ExportIntentsResponse][google.cloud.dialogflow.cx.v3beta1.ExportIntentsResponse] + + Returns: + Callable[[~.ExportIntentsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_intents" not in self._stubs: + self._stubs["export_intents"] = self.grpc_channel.unary_unary( + "/google.cloud.dialogflow.cx.v3beta1.Intents/ExportIntents", + request_serializer=intent.ExportIntentsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_intents"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/rest.py index 1174d82bd7c5..a903f40f6537 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/rest.py @@ -20,7 +20,13 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore @@ -80,6 +86,14 @@ def pre_delete_intent(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_export_intents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_intents(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_intent(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -88,6 +102,14 @@ def post_get_intent(self, response): logging.log(f"Received response: {response}") return response + def pre_import_intents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_intents(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_intents(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -141,6 +163,27 @@ def pre_delete_intent( """ return request, metadata + def pre_export_intents( + self, request: intent.ExportIntentsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[intent.ExportIntentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_intents + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intents server. + """ + return request, metadata + + def post_export_intents( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_intents + + Override in a subclass to manipulate the response + after it is returned by the Intents server but before + it is returned to user code. + """ + return response + def pre_get_intent( self, request: intent.GetIntentRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[intent.GetIntentRequest, Sequence[Tuple[str, str]]]: @@ -160,6 +203,27 @@ def post_get_intent(self, response: intent.Intent) -> intent.Intent: """ return response + def pre_import_intents( + self, request: intent.ImportIntentsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[intent.ImportIntentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_intents + + Override in a subclass to manipulate the request or metadata + before they are sent to the Intents server. + """ + return request, metadata + + def post_import_intents( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for import_intents + + Override in a subclass to manipulate the response + after it is returned by the Intents server but before + it is returned to user code. + """ + return response + def pre_list_intents( self, request: intent.ListIntentsRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[intent.ListIntentsRequest, Sequence[Tuple[str, str]]]: @@ -408,11 +472,70 @@ def __init__( self._session = AuthorizedSession( self._credentials, default_host=self.DEFAULT_HOST ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) self._interceptor = interceptor or IntentsRestInterceptor() self._prep_wrapped_messages(client_info) + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v3beta1/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v3beta1/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v3beta1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3beta1/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v3beta1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v3beta1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v3beta1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + class _CreateIntent(IntentsRestStub): def __hash__(self): return hash("CreateIntent") @@ -591,6 +714,103 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + class _ExportIntents(IntentsRestStub): + def __hash__(self): + return hash("ExportIntents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: intent.ExportIntentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the export intents method over HTTP. + + Args: + request (~.intent.ExportIntentsRequest): + The request object. The request message for + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ExportIntents]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3beta1/{parent=projects/*/locations/*/agents/*}/intents:export", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_export_intents(request, metadata) + pb_request = intent.ExportIntentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_intents(resp) + return resp + class _GetIntent(IntentsRestStub): def __hash__(self): return hash("GetIntent") @@ -685,6 +905,103 @@ def __call__( resp = self._interceptor.post_get_intent(resp) return resp + class _ImportIntents(IntentsRestStub): + def __hash__(self): + return hash("ImportIntents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: intent.ImportIntentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the import intents method over HTTP. + + Args: + request (~.intent.ImportIntentsRequest): + The request object. The request message for + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ImportIntents]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3beta1/{parent=projects/*/locations/*/agents/*}/intents:import", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_import_intents(request, metadata) + pb_request = intent.ImportIntentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_intents(resp) + return resp + class _ListIntents(IntentsRestStub): def __hash__(self): return hash("ListIntents") @@ -891,12 +1208,28 @@ def delete_intent(self) -> Callable[[intent.DeleteIntentRequest], empty_pb2.Empt # In C++ this would require a dynamic_cast return self._DeleteIntent(self._session, self._host, self._interceptor) # type: ignore + @property + def export_intents( + self, + ) -> Callable[[intent.ExportIntentsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportIntents(self._session, self._host, self._interceptor) # type: ignore + @property def get_intent(self) -> Callable[[intent.GetIntentRequest], intent.Intent]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetIntent(self._session, self._host, self._interceptor) # type: ignore + @property + def import_intents( + self, + ) -> Callable[[intent.ImportIntentsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportIntents(self._session, self._host, self._interceptor) # type: ignore + @property def list_intents( self, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/async_client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/async_client.py index e074ed3624c4..6430d5ed26ae 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/async_client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/async_client.py @@ -47,7 +47,7 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.cloud.dialogflowcx_v3beta1.services.pages import pagers -from google.cloud.dialogflowcx_v3beta1.types import fulfillment +from google.cloud.dialogflowcx_v3beta1.types import advanced_settings, fulfillment from google.cloud.dialogflowcx_v3beta1.types import page from google.cloud.dialogflowcx_v3beta1.types import page as gcdc_page diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/client.py index 1d0159dd1dae..23e6c82ea146 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/client.py @@ -51,7 +51,7 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.cloud.dialogflowcx_v3beta1.services.pages import pagers -from google.cloud.dialogflowcx_v3beta1.types import fulfillment +from google.cloud.dialogflowcx_v3beta1.types import advanced_settings, fulfillment from google.cloud.dialogflowcx_v3beta1.types import page from google.cloud.dialogflowcx_v3beta1.types import page as gcdc_page diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/__init__.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/__init__.py index b72780f5f9f7..e0a0c814015a 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/__init__.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/__init__.py @@ -123,10 +123,17 @@ from .fulfillment import Fulfillment from .gcs import GcsDestination from .generative_settings import GenerativeSettings +from .inline import InlineDestination, InlineSource from .intent import ( CreateIntentRequest, DeleteIntentRequest, + ExportIntentsMetadata, + ExportIntentsRequest, + ExportIntentsResponse, GetIntentRequest, + ImportIntentsMetadata, + ImportIntentsRequest, + ImportIntentsResponse, Intent, IntentView, ListIntentsRequest, @@ -358,9 +365,17 @@ "GcsDestination", "GenerativeSettings", "ImportStrategy", + "InlineDestination", + "InlineSource", "CreateIntentRequest", "DeleteIntentRequest", + "ExportIntentsMetadata", + "ExportIntentsRequest", + "ExportIntentsResponse", "GetIntentRequest", + "ImportIntentsMetadata", + "ImportIntentsRequest", + "ImportIntentsResponse", "Intent", "ListIntentsRequest", "ListIntentsResponse", diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/advanced_settings.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/advanced_settings.py index 5a4e23cc4a97..d46db404496e 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/advanced_settings.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/advanced_settings.py @@ -52,6 +52,14 @@ class AdvancedSettings(proto.Message): - Agent level - Flow level + dtmf_settings (google.cloud.dialogflowcx_v3beta1.types.AdvancedSettings.DtmfSettings): + Settings for DTMF. + Exposed at the following levels: + + - Agent level + - Flow level + - Page level + - Parameter level. logging_settings (google.cloud.dialogflowcx_v3beta1.types.AdvancedSettings.LoggingSettings): Settings for logging. Settings for Dialogflow History, Contact Center @@ -61,6 +69,39 @@ class AdvancedSettings(proto.Message): - Agent level. """ + class DtmfSettings(proto.Message): + r"""Define behaviors for DTMF (dual tone multi frequency). + + Attributes: + enabled (bool): + If true, incoming audio is processed for DTMF + (dual tone multi frequency) events. For example, + if the caller presses a button on their + telephone keypad and DTMF processing is enabled, + Dialogflow will detect the event (e.g. a "3" was + pressed) in the incoming audio and pass the + event to the bot to drive business logic (e.g. + when 3 is pressed, return the account balance). + max_digits (int): + Max length of DTMF digits. + finish_digit (str): + The digit that terminates a DTMF digit + sequence. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + max_digits: int = proto.Field( + proto.INT32, + number=2, + ) + finish_digit: str = proto.Field( + proto.STRING, + number=3, + ) + class LoggingSettings(proto.Message): r"""Define behaviors on logging. @@ -87,6 +128,11 @@ class LoggingSettings(proto.Message): number=2, message=gcs.GcsDestination, ) + dtmf_settings: DtmfSettings = proto.Field( + proto.MESSAGE, + number=5, + message=DtmfSettings, + ) logging_settings: LoggingSettings = proto.Field( proto.MESSAGE, number=6, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/flow.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/flow.py index 7aa7d7bed9d6..ed1a9faa7c4c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/flow.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/flow.py @@ -26,6 +26,9 @@ page, validation_message, ) +from google.cloud.dialogflowcx_v3beta1.types import ( + advanced_settings as gcdc_advanced_settings, +) __protobuf__ = proto.module( package="google.cloud.dialogflow.cx.v3beta1", @@ -202,6 +205,11 @@ class Flow(proto.Message): for agent-level groups. nlu_settings (google.cloud.dialogflowcx_v3beta1.types.NluSettings): NLU related settings of the flow. + advanced_settings (google.cloud.dialogflowcx_v3beta1.types.AdvancedSettings): + Hierarchical advanced settings for this flow. + The settings exposed at the lower level + overrides the settings exposed at the higher + level. knowledge_connector_settings (google.cloud.dialogflowcx_v3beta1.types.KnowledgeConnectorSettings): Optional. Knowledge connector configuration. """ @@ -237,6 +245,11 @@ class Flow(proto.Message): number=11, message="NluSettings", ) + advanced_settings: gcdc_advanced_settings.AdvancedSettings = proto.Field( + proto.MESSAGE, + number=14, + message=gcdc_advanced_settings.AdvancedSettings, + ) knowledge_connector_settings: page.KnowledgeConnectorSettings = proto.Field( proto.MESSAGE, number=18, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/fulfillment.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/fulfillment.py index 2bb10c27a29a..1cf2b0956b31 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/fulfillment.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/fulfillment.py @@ -20,6 +20,9 @@ from google.protobuf import struct_pb2 # type: ignore import proto # type: ignore +from google.cloud.dialogflowcx_v3beta1.types import ( + advanced_settings as gcdc_advanced_settings, +) from google.cloud.dialogflowcx_v3beta1.types import response_message __protobuf__ = proto.module( @@ -85,6 +88,18 @@ class Fulfillment(proto.Message): webhook. conditional_cases (MutableSequence[google.cloud.dialogflowcx_v3beta1.types.Fulfillment.ConditionalCases]): Conditional cases for this fulfillment. + advanced_settings (google.cloud.dialogflowcx_v3beta1.types.AdvancedSettings): + Hierarchical advanced settings for this + fulfillment. The settings exposed at the lower + level overrides the settings exposed at the + higher level. + enable_generative_fallback (bool): + If the flag is true, the agent will utilize LLM to generate + a text response. If LLM generation fails, the defined + [responses][google.cloud.dialogflow.cx.v3beta1.Fulfillment.messages] + in the fulfillment will be respected. This flag is only + useful for fulfillments associated with no-match event + handlers. """ class SetParameterAction(proto.Message): @@ -218,6 +233,15 @@ class CaseContent(proto.Message): number=5, message=ConditionalCases, ) + advanced_settings: gcdc_advanced_settings.AdvancedSettings = proto.Field( + proto.MESSAGE, + number=7, + message=gcdc_advanced_settings.AdvancedSettings, + ) + enable_generative_fallback: bool = proto.Field( + proto.BOOL, + number=12, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/inline.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/inline.py new file mode 100644 index 000000000000..e90282739927 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/inline.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.cx.v3beta1", + manifest={ + "InlineDestination", + "InlineSource", + }, +) + + +class InlineDestination(proto.Message): + r"""Inline destination for a Dialogflow operation that writes or exports + objects (e.g. [intents][google.cloud.dialogflow.cx.v3beta1.Intent]) + outside of Dialogflow. + + Attributes: + content (bytes): + Output only. The uncompressed byte content + for the objects. Only populated in responses. + """ + + content: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +class InlineSource(proto.Message): + r"""Inline source for a Dialogflow operation that reads or imports + objects (e.g. [intents][google.cloud.dialogflow.cx.v3beta1.Intent]) + into Dialogflow. + + Attributes: + content (bytes): + The uncompressed byte content for the + objects. + """ + + content: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/intent.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/intent.py index 7434430f10c6..436d5cf2d481 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/intent.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/intent.py @@ -20,6 +20,8 @@ from google.protobuf import field_mask_pb2 # type: ignore import proto # type: ignore +from google.cloud.dialogflowcx_v3beta1.types import inline + __protobuf__ = proto.module( package="google.cloud.dialogflow.cx.v3beta1", manifest={ @@ -31,6 +33,12 @@ "CreateIntentRequest", "UpdateIntentRequest", "DeleteIntentRequest", + "ImportIntentsRequest", + "ImportIntentsResponse", + "ImportIntentsMetadata", + "ExportIntentsRequest", + "ExportIntentsResponse", + "ExportIntentsMetadata", }, ) @@ -484,4 +492,289 @@ class DeleteIntentRequest(proto.Message): ) +class ImportIntentsRequest(proto.Message): + r"""The request message for + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ImportIntents]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The agent to import the intents into. Format: + ``projects//locations//agents/``. + intents_uri (str): + The `Google Cloud + Storage `__ URI to + import intents from. The format of this URI must be + ``gs:///``. + + Dialogflow performs a read operation for the Cloud Storage + object on the caller's behalf, so your request + authentication must have read permissions for the object. + For more information, see `Dialogflow access + control `__. + + This field is a member of `oneof`_ ``intents``. + intents_content (google.cloud.dialogflowcx_v3beta1.types.InlineSource): + Uncompressed byte content of intents. + + This field is a member of `oneof`_ ``intents``. + merge_option (google.cloud.dialogflowcx_v3beta1.types.ImportIntentsRequest.MergeOption): + Merge option for importing intents. If not specified, + ``REJECT`` is assumed. + """ + + class MergeOption(proto.Enum): + r"""Merge option when display name conflicts exist during import. + + Values: + MERGE_OPTION_UNSPECIFIED (0): + Unspecified. Should not be used. + REJECT (1): + DEPRECATED: Please use + [REPORT_CONFLICT][ImportIntentsRequest.REPORT_CONFLICT] + instead. Fail the request if there are intents whose display + names conflict with the display names of intents in the + agent. + REPLACE (2): + Replace the original intent in the agent with + the new intent when display name conflicts + exist. + MERGE (3): + Merge the original intent with the new intent + when display name conflicts exist. + RENAME (4): + Create new intents with new display names to + differentiate them from the existing intents + when display name conflicts exist. + REPORT_CONFLICT (5): + Report conflict information if display names + conflict is detected. Otherwise, import intents. + KEEP (6): + Keep the original intent and discard the + conflicting new intent when display name + conflicts exist. + """ + MERGE_OPTION_UNSPECIFIED = 0 + REJECT = 1 + REPLACE = 2 + MERGE = 3 + RENAME = 4 + REPORT_CONFLICT = 5 + KEEP = 6 + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + intents_uri: str = proto.Field( + proto.STRING, + number=2, + oneof="intents", + ) + intents_content: inline.InlineSource = proto.Field( + proto.MESSAGE, + number=3, + oneof="intents", + message=inline.InlineSource, + ) + merge_option: MergeOption = proto.Field( + proto.ENUM, + number=4, + enum=MergeOption, + ) + + +class ImportIntentsResponse(proto.Message): + r"""The response message for + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ImportIntents]. + + Attributes: + intents (MutableSequence[str]): + The unique identifier of the imported intents. Format: + ``projects//locations//agents//intents/``. + conflicting_resources (google.cloud.dialogflowcx_v3beta1.types.ImportIntentsResponse.ConflictingResources): + Info which resources have conflicts when + [REPORT_CONFLICT][ImportIntentsResponse.REPORT_CONFLICT] + merge_option is set in ImportIntentsRequest. + """ + + class ConflictingResources(proto.Message): + r"""Conflicting resources detected during the import process. Only + filled when [REPORT_CONFLICT][ImportIntentsResponse.REPORT_CONFLICT] + is set in the request and there are conflicts in the display names. + + Attributes: + intent_display_names (MutableSequence[str]): + Display names of conflicting intents. + entity_display_names (MutableSequence[str]): + Display names of conflicting entities. + """ + + intent_display_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + entity_display_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + intents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + conflicting_resources: ConflictingResources = proto.Field( + proto.MESSAGE, + number=2, + message=ConflictingResources, + ) + + +class ImportIntentsMetadata(proto.Message): + r"""Metadata returned for the + [Intents.ImportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ImportIntents] + long running operation. + + """ + + +class ExportIntentsRequest(proto.Message): + r"""The request message for + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ExportIntents]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The name of the parent agent to export intents. + Format: + ``projects//locations//agents/``. + intents (MutableSequence[str]): + Required. The name of the intents to export. Format: + ``projects//locations//agents//intents/``. + intents_uri (str): + Optional. The `Google Cloud + Storage `__ URI to + export the intents to. The format of this URI must be + ``gs:///``. + + Dialogflow performs a write operation for the Cloud Storage + object on the caller's behalf, so your request + authentication must have write permissions for the object. + For more information, see `Dialogflow access + control `__. + + This field is a member of `oneof`_ ``destination``. + intents_content_inline (bool): + Optional. The option to return the serialized + intents inline. + + This field is a member of `oneof`_ ``destination``. + data_format (google.cloud.dialogflowcx_v3beta1.types.ExportIntentsRequest.DataFormat): + Optional. The data format of the exported intents. If not + specified, ``BLOB`` is assumed. + """ + + class DataFormat(proto.Enum): + r"""Data format of the exported intents. + + Values: + DATA_FORMAT_UNSPECIFIED (0): + Unspecified format. Treated as ``BLOB``. + BLOB (1): + Intents will be exported as raw bytes. + JSON (2): + Intents will be exported in JSON format. + CSV (3): + Intents will be exported in CSV format. + """ + DATA_FORMAT_UNSPECIFIED = 0 + BLOB = 1 + JSON = 2 + CSV = 3 + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + intents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + intents_uri: str = proto.Field( + proto.STRING, + number=3, + oneof="destination", + ) + intents_content_inline: bool = proto.Field( + proto.BOOL, + number=4, + oneof="destination", + ) + data_format: DataFormat = proto.Field( + proto.ENUM, + number=5, + enum=DataFormat, + ) + + +class ExportIntentsResponse(proto.Message): + r"""The response message for + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ExportIntents]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + intents_uri (str): + The URI to a file containing the exported intents. This + field is populated only if ``intents_uri`` is specified in + [ExportIntentsRequest][google.cloud.dialogflow.cx.v3beta1.ExportIntentsRequest]. + + This field is a member of `oneof`_ ``intents``. + intents_content (google.cloud.dialogflowcx_v3beta1.types.InlineDestination): + Uncompressed byte content for intents. This field is + populated only if ``intents_content_inline`` is set to true + in + [ExportIntentsRequest][google.cloud.dialogflow.cx.v3beta1.ExportIntentsRequest]. + + This field is a member of `oneof`_ ``intents``. + """ + + intents_uri: str = proto.Field( + proto.STRING, + number=1, + oneof="intents", + ) + intents_content: inline.InlineDestination = proto.Field( + proto.MESSAGE, + number=2, + oneof="intents", + message=inline.InlineDestination, + ) + + +class ExportIntentsMetadata(proto.Message): + r"""Metadata returned for the + [Intents.ExportIntents][google.cloud.dialogflow.cx.v3beta1.Intents.ExportIntents] + long running operation. + + """ + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/page.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/page.py index f81d160d8ec9..96fc1327fbe2 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/page.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/page.py @@ -22,6 +22,9 @@ import proto # type: ignore from google.cloud.dialogflowcx_v3beta1.types import data_store_connection, fulfillment +from google.cloud.dialogflowcx_v3beta1.types import ( + advanced_settings as gcdc_advanced_settings, +) __protobuf__ = proto.module( package="google.cloud.dialogflow.cx.v3beta1", @@ -127,6 +130,11 @@ class Page(proto.Message): Handlers associated with the page to handle events such as webhook errors, no match or no input. + advanced_settings (google.cloud.dialogflowcx_v3beta1.types.AdvancedSettings): + Hierarchical advanced settings for this page. + The settings exposed at the lower level + overrides the settings exposed at the higher + level. knowledge_connector_settings (google.cloud.dialogflowcx_v3beta1.types.KnowledgeConnectorSettings): Optional. Knowledge connector configuration. """ @@ -163,6 +171,11 @@ class Page(proto.Message): number=10, message="EventHandler", ) + advanced_settings: gcdc_advanced_settings.AdvancedSettings = proto.Field( + proto.MESSAGE, + number=13, + message=gcdc_advanced_settings.AdvancedSettings, + ) knowledge_connector_settings: "KnowledgeConnectorSettings" = proto.Field( proto.MESSAGE, number=18, @@ -223,6 +236,11 @@ class Parameter(proto.Message): parameter level redaction or [entity type level redaction][google.cloud.dialogflow.cx.v3beta1.EntityType.redact] is enabled. + advanced_settings (google.cloud.dialogflowcx_v3beta1.types.AdvancedSettings): + Hierarchical advanced settings for this + parameter. The settings exposed at the lower + level overrides the settings exposed at the + higher level. """ class FillBehavior(proto.Message): @@ -317,6 +335,11 @@ class FillBehavior(proto.Message): proto.BOOL, number=11, ) + advanced_settings: gcdc_advanced_settings.AdvancedSettings = proto.Field( + proto.MESSAGE, + number=12, + message=gcdc_advanced_settings.AdvancedSettings, + ) parameters: MutableSequence[Parameter] = proto.RepeatedField( proto.MESSAGE, diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/session.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/session.py index 7e8a33397b69..24ed960b8c9f 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/session.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/types/session.py @@ -28,6 +28,9 @@ response_message, session_entity_type, ) +from google.cloud.dialogflowcx_v3beta1.types import ( + advanced_settings as gcdc_advanced_settings, +) from google.cloud.dialogflowcx_v3beta1.types import audio_config from google.cloud.dialogflowcx_v3beta1.types import intent as gcdc_intent @@ -1073,6 +1076,16 @@ class QueryResult(proto.Message): [``analyze_query_text_sentiment``] [google.cloud.dialogflow.cx.v3beta1.QueryParameters.analyze_query_text_sentiment], specified in the request. + advanced_settings (google.cloud.dialogflowcx_v3beta1.types.AdvancedSettings): + Returns the current advanced settings + including IVR settings. Even though the + operations configured by these settings are + performed by Dialogflow, the client may need to + perform special logic at the moment. For + example, if Dialogflow exports audio to Google + Cloud Storage, then the client may need to wait + for the resulting object to appear in the bucket + before proceeding. """ text: str = proto.Field( @@ -1156,6 +1169,11 @@ class QueryResult(proto.Message): number=17, message="SentimentAnalysisResult", ) + advanced_settings: gcdc_advanced_settings.AdvancedSettings = proto.Field( + proto.MESSAGE, + number=21, + message=gcdc_advanced_settings.AdvancedSettings, + ) class TextInput(proto.Message): diff --git a/packages/google-cloud-dialogflow-cx/noxfile.py b/packages/google-cloud-dialogflow-cx/noxfile.py index 9a2acd8b6787..be54712bfa8f 100644 --- a/packages/google-cloud-dialogflow-cx/noxfile.py +++ b/packages/google-cloud-dialogflow-cx/noxfile.py @@ -46,7 +46,7 @@ UNIT_TEST_EXTRAS = [] UNIT_TEST_EXTRAS_BY_PYTHON = {} -SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -405,24 +405,3 @@ def prerelease_deps(session): session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3_generated_intents_export_intents_async.py b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3_generated_intents_export_intents_async.py new file mode 100644 index 000000000000..c626f785ae22 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3_generated_intents_export_intents_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow-cx + + +# [START dialogflow_v3_generated_Intents_ExportIntents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflowcx_v3 + + +async def sample_export_intents(): + # Create a client + client = dialogflowcx_v3.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ExportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + intents=['intents_value1', 'intents_value2'], + ) + + # Make the request + operation = client.export_intents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v3_generated_Intents_ExportIntents_async] diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3_generated_intents_export_intents_sync.py b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3_generated_intents_export_intents_sync.py new file mode 100644 index 000000000000..ae1813e80b3e --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3_generated_intents_export_intents_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow-cx + + +# [START dialogflow_v3_generated_Intents_ExportIntents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflowcx_v3 + + +def sample_export_intents(): + # Create a client + client = dialogflowcx_v3.IntentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ExportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + intents=['intents_value1', 'intents_value2'], + ) + + # Make the request + operation = client.export_intents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v3_generated_Intents_ExportIntents_sync] diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3_generated_intents_import_intents_async.py b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3_generated_intents_import_intents_async.py new file mode 100644 index 000000000000..0aa85a8896bb --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3_generated_intents_import_intents_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow-cx + + +# [START dialogflow_v3_generated_Intents_ImportIntents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflowcx_v3 + + +async def sample_import_intents(): + # Create a client + client = dialogflowcx_v3.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ImportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_intents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v3_generated_Intents_ImportIntents_async] diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3_generated_intents_import_intents_sync.py b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3_generated_intents_import_intents_sync.py new file mode 100644 index 000000000000..4eba2ac58198 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3_generated_intents_import_intents_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow-cx + + +# [START dialogflow_v3_generated_Intents_ImportIntents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflowcx_v3 + + +def sample_import_intents(): + # Create a client + client = dialogflowcx_v3.IntentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3.ImportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_intents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v3_generated_Intents_ImportIntents_sync] diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_intents_export_intents_async.py b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_intents_export_intents_async.py new file mode 100644 index 000000000000..42255cc58257 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_intents_export_intents_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow-cx + + +# [START dialogflow_v3beta1_generated_Intents_ExportIntents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflowcx_v3beta1 + + +async def sample_export_intents(): + # Create a client + client = dialogflowcx_v3beta1.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3beta1.ExportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + intents=['intents_value1', 'intents_value2'], + ) + + # Make the request + operation = client.export_intents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v3beta1_generated_Intents_ExportIntents_async] diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_intents_export_intents_sync.py b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_intents_export_intents_sync.py new file mode 100644 index 000000000000..68da6becfa75 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_intents_export_intents_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow-cx + + +# [START dialogflow_v3beta1_generated_Intents_ExportIntents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflowcx_v3beta1 + + +def sample_export_intents(): + # Create a client + client = dialogflowcx_v3beta1.IntentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3beta1.ExportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + intents=['intents_value1', 'intents_value2'], + ) + + # Make the request + operation = client.export_intents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v3beta1_generated_Intents_ExportIntents_sync] diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_intents_import_intents_async.py b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_intents_import_intents_async.py new file mode 100644 index 000000000000..65447b5b14f7 --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_intents_import_intents_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow-cx + + +# [START dialogflow_v3beta1_generated_Intents_ImportIntents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflowcx_v3beta1 + + +async def sample_import_intents(): + # Create a client + client = dialogflowcx_v3beta1.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflowcx_v3beta1.ImportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_intents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v3beta1_generated_Intents_ImportIntents_async] diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_intents_import_intents_sync.py b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_intents_import_intents_sync.py new file mode 100644 index 000000000000..20eafe101d3b --- /dev/null +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3beta1_generated_intents_import_intents_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow-cx + + +# [START dialogflow_v3beta1_generated_Intents_ImportIntents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflowcx_v3beta1 + + +def sample_import_intents(): + # Create a client + client = dialogflowcx_v3beta1.IntentsClient() + + # Initialize request argument(s) + request = dialogflowcx_v3beta1.ImportIntentsRequest( + intents_uri="intents_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_intents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v3beta1_generated_Intents_ImportIntents_sync] diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json index 2ee987e2ff77..078d3ee16252 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow-cx", - "version": "1.27.1" + "version": "0.1.0" }, "snippets": [ { @@ -7743,6 +7743,159 @@ ], "title": "dialogflow_v3_generated_intents_delete_intent_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflowcx_v3.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflowcx_v3.IntentsAsyncClient.export_intents", + "method": { + "fullName": "google.cloud.dialogflow.cx.v3.Intents.ExportIntents", + "service": { + "fullName": "google.cloud.dialogflow.cx.v3.Intents", + "shortName": "Intents" + }, + "shortName": "ExportIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflowcx_v3.types.ExportIntentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_intents" + }, + "description": "Sample for ExportIntents", + "file": "dialogflow_v3_generated_intents_export_intents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v3_generated_Intents_ExportIntents_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v3_generated_intents_export_intents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflowcx_v3.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflowcx_v3.IntentsClient.export_intents", + "method": { + "fullName": "google.cloud.dialogflow.cx.v3.Intents.ExportIntents", + "service": { + "fullName": "google.cloud.dialogflow.cx.v3.Intents", + "shortName": "Intents" + }, + "shortName": "ExportIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflowcx_v3.types.ExportIntentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_intents" + }, + "description": "Sample for ExportIntents", + "file": "dialogflow_v3_generated_intents_export_intents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v3_generated_Intents_ExportIntents_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v3_generated_intents_export_intents_sync.py" + }, { "canonical": true, "clientMethod": { @@ -7904,6 +8057,159 @@ ], "title": "dialogflow_v3_generated_intents_get_intent_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflowcx_v3.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflowcx_v3.IntentsAsyncClient.import_intents", + "method": { + "fullName": "google.cloud.dialogflow.cx.v3.Intents.ImportIntents", + "service": { + "fullName": "google.cloud.dialogflow.cx.v3.Intents", + "shortName": "Intents" + }, + "shortName": "ImportIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflowcx_v3.types.ImportIntentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_intents" + }, + "description": "Sample for ImportIntents", + "file": "dialogflow_v3_generated_intents_import_intents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v3_generated_Intents_ImportIntents_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v3_generated_intents_import_intents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflowcx_v3.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflowcx_v3.IntentsClient.import_intents", + "method": { + "fullName": "google.cloud.dialogflow.cx.v3.Intents.ImportIntents", + "service": { + "fullName": "google.cloud.dialogflow.cx.v3.Intents", + "shortName": "Intents" + }, + "shortName": "ImportIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflowcx_v3.types.ImportIntentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_intents" + }, + "description": "Sample for ImportIntents", + "file": "dialogflow_v3_generated_intents_import_intents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v3_generated_Intents_ImportIntents_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v3_generated_intents_import_intents_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json index 758da063a8e9..c268e8afeef3 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow-cx", - "version": "1.27.1" + "version": "0.1.0" }, "snippets": [ { @@ -7743,6 +7743,159 @@ ], "title": "dialogflow_v3beta1_generated_intents_delete_intent_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflowcx_v3beta1.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflowcx_v3beta1.IntentsAsyncClient.export_intents", + "method": { + "fullName": "google.cloud.dialogflow.cx.v3beta1.Intents.ExportIntents", + "service": { + "fullName": "google.cloud.dialogflow.cx.v3beta1.Intents", + "shortName": "Intents" + }, + "shortName": "ExportIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflowcx_v3beta1.types.ExportIntentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_intents" + }, + "description": "Sample for ExportIntents", + "file": "dialogflow_v3beta1_generated_intents_export_intents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v3beta1_generated_Intents_ExportIntents_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v3beta1_generated_intents_export_intents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflowcx_v3beta1.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflowcx_v3beta1.IntentsClient.export_intents", + "method": { + "fullName": "google.cloud.dialogflow.cx.v3beta1.Intents.ExportIntents", + "service": { + "fullName": "google.cloud.dialogflow.cx.v3beta1.Intents", + "shortName": "Intents" + }, + "shortName": "ExportIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflowcx_v3beta1.types.ExportIntentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_intents" + }, + "description": "Sample for ExportIntents", + "file": "dialogflow_v3beta1_generated_intents_export_intents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v3beta1_generated_Intents_ExportIntents_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v3beta1_generated_intents_export_intents_sync.py" + }, { "canonical": true, "clientMethod": { @@ -7904,6 +8057,159 @@ ], "title": "dialogflow_v3beta1_generated_intents_get_intent_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflowcx_v3beta1.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflowcx_v3beta1.IntentsAsyncClient.import_intents", + "method": { + "fullName": "google.cloud.dialogflow.cx.v3beta1.Intents.ImportIntents", + "service": { + "fullName": "google.cloud.dialogflow.cx.v3beta1.Intents", + "shortName": "Intents" + }, + "shortName": "ImportIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflowcx_v3beta1.types.ImportIntentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_intents" + }, + "description": "Sample for ImportIntents", + "file": "dialogflow_v3beta1_generated_intents_import_intents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v3beta1_generated_Intents_ImportIntents_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v3beta1_generated_intents_import_intents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflowcx_v3beta1.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflowcx_v3beta1.IntentsClient.import_intents", + "method": { + "fullName": "google.cloud.dialogflow.cx.v3beta1.Intents.ImportIntents", + "service": { + "fullName": "google.cloud.dialogflow.cx.v3beta1.Intents", + "shortName": "Intents" + }, + "shortName": "ImportIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflowcx_v3beta1.types.ImportIntentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_intents" + }, + "description": "Sample for ImportIntents", + "file": "dialogflow_v3beta1_generated_intents_import_intents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v3beta1_generated_Intents_ImportIntents_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v3beta1_generated_intents_import_intents_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-dialogflow-cx/scripts/fixup_dialogflowcx_v3_keywords.py b/packages/google-cloud-dialogflow-cx/scripts/fixup_dialogflowcx_v3_keywords.py index fd5c1515eb6f..2f631a4ede1d 100644 --- a/packages/google-cloud-dialogflow-cx/scripts/fixup_dialogflowcx_v3_keywords.py +++ b/packages/google-cloud-dialogflow-cx/scripts/fixup_dialogflowcx_v3_keywords.py @@ -72,6 +72,7 @@ class dialogflowcxCallTransformer(cst.CSTTransformer): 'detect_intent': ('session', 'query_input', 'query_params', 'output_audio_config', ), 'export_agent': ('name', 'agent_uri', 'data_format', 'environment', 'git_destination', 'include_bigquery_export_settings', ), 'export_flow': ('name', 'flow_uri', 'include_referenced_flows', ), + 'export_intents': ('parent', 'intents', 'intents_uri', 'intents_content_inline', 'data_format', ), 'export_test_cases': ('parent', 'gcs_uri', 'data_format', 'filter', ), 'fulfill_intent': ('match_intent_request', 'match', 'output_audio_config', ), 'get_agent': ('name', ), @@ -94,6 +95,7 @@ class dialogflowcxCallTransformer(cst.CSTTransformer): 'get_version': ('name', ), 'get_webhook': ('name', ), 'import_flow': ('parent', 'flow_uri', 'flow_content', 'import_option', 'flow_import_strategy', ), + 'import_intents': ('parent', 'intents_uri', 'intents_content', 'merge_option', ), 'import_test_cases': ('parent', 'gcs_uri', 'content', ), 'list_agents': ('parent', 'page_size', 'page_token', ), 'list_changelogs': ('parent', 'filter', 'page_size', 'page_token', ), diff --git a/packages/google-cloud-dialogflow-cx/scripts/fixup_dialogflowcx_v3beta1_keywords.py b/packages/google-cloud-dialogflow-cx/scripts/fixup_dialogflowcx_v3beta1_keywords.py index fd5c1515eb6f..2f631a4ede1d 100644 --- a/packages/google-cloud-dialogflow-cx/scripts/fixup_dialogflowcx_v3beta1_keywords.py +++ b/packages/google-cloud-dialogflow-cx/scripts/fixup_dialogflowcx_v3beta1_keywords.py @@ -72,6 +72,7 @@ class dialogflowcxCallTransformer(cst.CSTTransformer): 'detect_intent': ('session', 'query_input', 'query_params', 'output_audio_config', ), 'export_agent': ('name', 'agent_uri', 'data_format', 'environment', 'git_destination', 'include_bigquery_export_settings', ), 'export_flow': ('name', 'flow_uri', 'include_referenced_flows', ), + 'export_intents': ('parent', 'intents', 'intents_uri', 'intents_content_inline', 'data_format', ), 'export_test_cases': ('parent', 'gcs_uri', 'data_format', 'filter', ), 'fulfill_intent': ('match_intent_request', 'match', 'output_audio_config', ), 'get_agent': ('name', ), @@ -94,6 +95,7 @@ class dialogflowcxCallTransformer(cst.CSTTransformer): 'get_version': ('name', ), 'get_webhook': ('name', ), 'import_flow': ('parent', 'flow_uri', 'flow_content', 'import_option', 'flow_import_strategy', ), + 'import_intents': ('parent', 'intents_uri', 'intents_content', 'merge_option', ), 'import_test_cases': ('parent', 'gcs_uri', 'content', ), 'list_agents': ('parent', 'page_size', 'page_token', ), 'list_changelogs': ('parent', 'filter', 'page_size', 'page_token', ), diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py index 0ddbd755c4d2..79aa06c42bfa 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py @@ -4019,6 +4019,11 @@ def test_create_agent_rest(request_type): "locked": True, "advanced_settings": { "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, "logging_settings": { "enable_stackdriver_logging": True, "enable_interaction_logging": True, @@ -4255,6 +4260,11 @@ def test_create_agent_rest_bad_request( "locked": True, "advanced_settings": { "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, "logging_settings": { "enable_stackdriver_logging": True, "enable_interaction_logging": True, @@ -4385,6 +4395,11 @@ def test_update_agent_rest(request_type): "locked": True, "advanced_settings": { "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, "logging_settings": { "enable_stackdriver_logging": True, "enable_interaction_logging": True, @@ -4612,6 +4627,11 @@ def test_update_agent_rest_bad_request( "locked": True, "advanced_settings": { "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, "logging_settings": { "enable_stackdriver_logging": True, "enable_interaction_logging": True, diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py index 75597d73169b..e1fdbb8723e7 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py @@ -63,14 +63,15 @@ pagers, transports, ) +from google.cloud.dialogflowcx_v3.types import advanced_settings, data_store_connection from google.cloud.dialogflowcx_v3.types import ( fulfillment, + gcs, import_strategy, page, response_message, validation_message, ) -from google.cloud.dialogflowcx_v3.types import data_store_connection from google.cloud.dialogflowcx_v3.types import flow from google.cloud.dialogflowcx_v3.types import flow as gcdc_flow @@ -3067,6 +3068,19 @@ def test_create_flow_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", @@ -3090,6 +3104,7 @@ def test_create_flow_rest(request_type): "classification_threshold": 0.25520000000000004, "model_training_mode": 1, }, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -3361,6 +3376,19 @@ def test_create_flow_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", @@ -3384,6 +3412,7 @@ def test_create_flow_rest_bad_request( "classification_threshold": 0.25520000000000004, "model_training_mode": 1, }, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -4429,6 +4458,19 @@ def test_update_flow_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", @@ -4452,6 +4494,7 @@ def test_update_flow_rest(request_type): "classification_threshold": 0.25520000000000004, "model_training_mode": 1, }, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -4727,6 +4770,19 @@ def test_update_flow_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", @@ -4750,6 +4806,7 @@ def test_update_flow_rest_bad_request( "classification_threshold": 0.25520000000000004, "model_training_mode": 1, }, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py index 24644e6af410..4e52eb65f8cc 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py @@ -26,9 +26,18 @@ import json import math -from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError @@ -51,6 +60,7 @@ pagers, transports, ) +from google.cloud.dialogflowcx_v3.types import inline from google.cloud.dialogflowcx_v3.types import intent from google.cloud.dialogflowcx_v3.types import intent as gcdc_intent @@ -2059,6 +2069,294 @@ async def test_delete_intent_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + intent.ImportIntentsRequest, + dict, + ], +) +def test_import_intents(request_type, transport: str = "grpc"): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_intents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.import_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == intent.ImportIntentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_intents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_intents), "__call__") as call: + client.import_intents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intent.ImportIntentsRequest() + + +@pytest.mark.asyncio +async def test_import_intents_async( + transport: str = "grpc_asyncio", request_type=intent.ImportIntentsRequest +): + client = IntentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_intents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == intent.ImportIntentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_intents_async_from_dict(): + await test_import_intents_async(request_type=dict) + + +def test_import_intents_field_headers(): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intent.ImportIntentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_intents), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_intents_field_headers_async(): + client = IntentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intent.ImportIntentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_intents), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.import_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + intent.ExportIntentsRequest, + dict, + ], +) +def test_export_intents(request_type, transport: str = "grpc"): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_intents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == intent.ExportIntentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_intents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_intents), "__call__") as call: + client.export_intents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intent.ExportIntentsRequest() + + +@pytest.mark.asyncio +async def test_export_intents_async( + transport: str = "grpc_asyncio", request_type=intent.ExportIntentsRequest +): + client = IntentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_intents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == intent.ExportIntentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_export_intents_async_from_dict(): + await test_export_intents_async(request_type=dict) + + +def test_export_intents_field_headers(): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intent.ExportIntentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_intents), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_intents_field_headers_async(): + client = IntentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intent.ExportIntentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_intents), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -3598,28 +3896,446 @@ def test_delete_intent_rest_error(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.IntentsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = IntentsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.IntentsGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + intent.ImportIntentsRequest, + dict, + ], +) +def test_import_intents_rest(request_type): + client = IntentsClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = IntentsClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - # It is an error to provide an api_key and a transport instance. + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/agents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_intents(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_import_intents_rest_required_fields(request_type=intent.ImportIntentsRequest): + transport_class = transports.IntentsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_intents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_intents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.import_intents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_import_intents_rest_unset_required_fields(): + transport = transports.IntentsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.import_intents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_intents_rest_interceptors(null_interceptor): + transport = transports.IntentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IntentsRestInterceptor(), + ) + client = IntentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.IntentsRestInterceptor, "post_import_intents" + ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "pre_import_intents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = intent.ImportIntentsRequest.pb(intent.ImportIntentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = intent.ImportIntentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.import_intents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_intents_rest_bad_request( + transport: str = "rest", request_type=intent.ImportIntentsRequest +): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/agents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.import_intents(request) + + +def test_import_intents_rest_error(): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intent.ExportIntentsRequest, + dict, + ], +) +def test_export_intents_rest(request_type): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/agents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.export_intents(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_intents_rest_required_fields(request_type=intent.ExportIntentsRequest): + transport_class = transports.IntentsRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["intents"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_intents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["intents"] = "intents_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_intents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "intents" in jsonified_request + assert jsonified_request["intents"] == "intents_value" + + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.export_intents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_intents_rest_unset_required_fields(): + transport = transports.IntentsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_intents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "intents", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_intents_rest_interceptors(null_interceptor): + transport = transports.IntentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IntentsRestInterceptor(), + ) + client = IntentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.IntentsRestInterceptor, "post_export_intents" + ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "pre_export_intents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = intent.ExportIntentsRequest.pb(intent.ExportIntentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = intent.ExportIntentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_intents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_intents_rest_bad_request( + transport: str = "rest", request_type=intent.ExportIntentsRequest +): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/agents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_intents(request) + + +def test_export_intents_rest_error(): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.IntentsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.IntentsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IntentsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. transport = transports.IntentsGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3742,6 +4458,8 @@ def test_intents_base_transport(): "create_intent", "update_intent", "delete_intent", + "import_intents", + "export_intents", "get_location", "list_locations", "get_operation", @@ -3755,6 +4473,11 @@ def test_intents_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + # Catch all for all remaining methods and properties remainder = [ "kind", @@ -3951,6 +4674,23 @@ def test_intents_http_transport_client_cert_source_for_mtls(): mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) +def test_intents_rest_lro_client(): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + @pytest.mark.parametrize( "transport_name", [ @@ -4029,6 +4769,12 @@ def test_intents_client_transport_session_collision(transport_name): session1 = client1.transport.delete_intent._session session2 = client2.transport.delete_intent._session assert session1 != session2 + session1 = client1.transport.import_intents._session + session2 = client2.transport.import_intents._session + assert session1 != session2 + session1 = client1.transport.export_intents._session + session2 = client2.transport.export_intents._session + assert session1 != session2 def test_intents_grpc_transport_channel(): @@ -4149,6 +4895,40 @@ def test_intents_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel +def test_intents_grpc_lro_client(): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_intents_grpc_lro_async_client(): + client = IntentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + def test_entity_type_path(): project = "squid" location = "clam" diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py index 58802ea27a99..9f2ca53f8c85 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py @@ -52,7 +52,12 @@ pagers, transports, ) -from google.cloud.dialogflowcx_v3.types import data_store_connection, fulfillment +from google.cloud.dialogflowcx_v3.types import ( + advanced_settings, + data_store_connection, + fulfillment, + gcs, +) from google.cloud.dialogflowcx_v3.types import page from google.cloud.dialogflowcx_v3.types import page as gcdc_page from google.cloud.dialogflowcx_v3.types import response_message @@ -2733,6 +2738,19 @@ def test_create_page_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -2755,6 +2773,7 @@ def test_create_page_rest(request_type): }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -2774,6 +2793,7 @@ def test_create_page_rest(request_type): } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -3034,6 +3054,19 @@ def test_create_page_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -3056,6 +3089,7 @@ def test_create_page_rest_bad_request( }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -3075,6 +3109,7 @@ def test_create_page_rest_bad_request( } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -3248,6 +3283,19 @@ def test_update_page_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -3270,6 +3318,7 @@ def test_update_page_rest(request_type): }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -3289,6 +3338,7 @@ def test_update_page_rest(request_type): } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -3551,6 +3601,19 @@ def test_update_page_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -3573,6 +3636,7 @@ def test_update_page_rest_bad_request( }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -3592,6 +3656,7 @@ def test_update_page_rest_bad_request( } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py index cb7af621d933..5ee6d3fc3a15 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py @@ -65,9 +65,11 @@ transports, ) from google.cloud.dialogflowcx_v3.types import ( + advanced_settings, audio_config, data_store_connection, fulfillment, + gcs, intent, page, response_message, @@ -4606,6 +4608,19 @@ def test_create_test_case_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -4628,6 +4643,7 @@ def test_create_test_case_rest(request_type): }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -4647,6 +4663,7 @@ def test_create_test_case_rest(request_type): } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -5015,6 +5032,19 @@ def test_create_test_case_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -5037,6 +5067,7 @@ def test_create_test_case_rest_bad_request( }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -5056,6 +5087,7 @@ def test_create_test_case_rest_bad_request( } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -5331,6 +5363,19 @@ def test_update_test_case_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -5353,6 +5398,7 @@ def test_update_test_case_rest(request_type): }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -5372,6 +5418,7 @@ def test_update_test_case_rest(request_type): } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -5741,6 +5788,19 @@ def test_update_test_case_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -5763,6 +5823,7 @@ def test_update_test_case_rest_bad_request( }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -5782,6 +5843,7 @@ def test_update_test_case_rest_bad_request( } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py index d11fe7827d61..59bc0c65525e 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py @@ -52,7 +52,13 @@ pagers, transports, ) -from google.cloud.dialogflowcx_v3.types import fulfillment, page, response_message +from google.cloud.dialogflowcx_v3.types import ( + advanced_settings, + fulfillment, + gcs, + page, + response_message, +) from google.cloud.dialogflowcx_v3.types import ( transition_route_group as gcdc_transition_route_group, ) @@ -2985,6 +2991,19 @@ def test_create_transition_route_group_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", @@ -3268,6 +3287,19 @@ def test_create_transition_route_group_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", @@ -3453,6 +3485,19 @@ def test_update_transition_route_group_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", @@ -3738,6 +3783,19 @@ def test_update_transition_route_group_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_agents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_agents.py index 268651a987ea..5dfc5b88463a 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_agents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_agents.py @@ -4021,6 +4021,11 @@ def test_create_agent_rest(request_type): "locked": True, "advanced_settings": { "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, "logging_settings": { "enable_stackdriver_logging": True, "enable_interaction_logging": True, @@ -4257,6 +4262,11 @@ def test_create_agent_rest_bad_request( "locked": True, "advanced_settings": { "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, "logging_settings": { "enable_stackdriver_logging": True, "enable_interaction_logging": True, @@ -4388,6 +4398,11 @@ def test_update_agent_rest(request_type): "locked": True, "advanced_settings": { "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, "logging_settings": { "enable_stackdriver_logging": True, "enable_interaction_logging": True, @@ -4615,6 +4630,11 @@ def test_update_agent_rest_bad_request( "locked": True, "advanced_settings": { "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, "logging_settings": { "enable_stackdriver_logging": True, "enable_interaction_logging": True, diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_flows.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_flows.py index 705aecda6ae7..63265be8e73c 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_flows.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_flows.py @@ -63,14 +63,18 @@ pagers, transports, ) +from google.cloud.dialogflowcx_v3beta1.types import ( + advanced_settings, + data_store_connection, +) from google.cloud.dialogflowcx_v3beta1.types import ( fulfillment, + gcs, import_strategy, page, response_message, validation_message, ) -from google.cloud.dialogflowcx_v3beta1.types import data_store_connection from google.cloud.dialogflowcx_v3beta1.types import flow from google.cloud.dialogflowcx_v3beta1.types import flow as gcdc_flow @@ -3066,6 +3070,19 @@ def test_create_flow_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", @@ -3089,6 +3106,7 @@ def test_create_flow_rest(request_type): "classification_threshold": 0.25520000000000004, "model_training_mode": 1, }, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -3359,6 +3377,19 @@ def test_create_flow_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", @@ -3382,6 +3413,7 @@ def test_create_flow_rest_bad_request( "classification_threshold": 0.25520000000000004, "model_training_mode": 1, }, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -4426,6 +4458,19 @@ def test_update_flow_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", @@ -4449,6 +4494,7 @@ def test_update_flow_rest(request_type): "classification_threshold": 0.25520000000000004, "model_training_mode": 1, }, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -4723,6 +4769,19 @@ def test_update_flow_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", @@ -4746,6 +4805,7 @@ def test_update_flow_rest_bad_request( "classification_threshold": 0.25520000000000004, "model_training_mode": 1, }, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py index 2d89a2bc1bec..7916c2953a24 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py @@ -26,9 +26,18 @@ import json import math -from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError @@ -51,6 +60,7 @@ pagers, transports, ) +from google.cloud.dialogflowcx_v3beta1.types import inline from google.cloud.dialogflowcx_v3beta1.types import intent from google.cloud.dialogflowcx_v3beta1.types import intent as gcdc_intent @@ -2059,6 +2069,294 @@ async def test_delete_intent_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + intent.ImportIntentsRequest, + dict, + ], +) +def test_import_intents(request_type, transport: str = "grpc"): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_intents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.import_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == intent.ImportIntentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_intents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_intents), "__call__") as call: + client.import_intents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intent.ImportIntentsRequest() + + +@pytest.mark.asyncio +async def test_import_intents_async( + transport: str = "grpc_asyncio", request_type=intent.ImportIntentsRequest +): + client = IntentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_intents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == intent.ImportIntentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_intents_async_from_dict(): + await test_import_intents_async(request_type=dict) + + +def test_import_intents_field_headers(): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intent.ImportIntentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_intents), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_intents_field_headers_async(): + client = IntentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intent.ImportIntentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.import_intents), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.import_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + intent.ExportIntentsRequest, + dict, + ], +) +def test_export_intents(request_type, transport: str = "grpc"): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_intents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == intent.ExportIntentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_intents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_intents), "__call__") as call: + client.export_intents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == intent.ExportIntentsRequest() + + +@pytest.mark.asyncio +async def test_export_intents_async( + transport: str = "grpc_asyncio", request_type=intent.ExportIntentsRequest +): + client = IntentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_intents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == intent.ExportIntentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_export_intents_async_from_dict(): + await test_export_intents_async(request_type=dict) + + +def test_export_intents_field_headers(): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intent.ExportIntentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_intents), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_intents_field_headers_async(): + client = IntentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = intent.ExportIntentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_intents), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_intents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -3598,28 +3896,446 @@ def test_delete_intent_rest_error(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.IntentsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = IntentsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.IntentsGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + intent.ImportIntentsRequest, + dict, + ], +) +def test_import_intents_rest(request_type): + client = IntentsClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = IntentsClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - # It is an error to provide an api_key and a transport instance. + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/agents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.import_intents(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_import_intents_rest_required_fields(request_type=intent.ImportIntentsRequest): + transport_class = transports.IntentsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_intents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_intents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.import_intents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_import_intents_rest_unset_required_fields(): + transport = transports.IntentsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.import_intents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_intents_rest_interceptors(null_interceptor): + transport = transports.IntentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IntentsRestInterceptor(), + ) + client = IntentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.IntentsRestInterceptor, "post_import_intents" + ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "pre_import_intents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = intent.ImportIntentsRequest.pb(intent.ImportIntentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = intent.ImportIntentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.import_intents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_intents_rest_bad_request( + transport: str = "rest", request_type=intent.ImportIntentsRequest +): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/agents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.import_intents(request) + + +def test_import_intents_rest_error(): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + intent.ExportIntentsRequest, + dict, + ], +) +def test_export_intents_rest(request_type): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/agents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.export_intents(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_intents_rest_required_fields(request_type=intent.ExportIntentsRequest): + transport_class = transports.IntentsRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["intents"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_intents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["intents"] = "intents_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_intents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "intents" in jsonified_request + assert jsonified_request["intents"] == "intents_value" + + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.export_intents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_intents_rest_unset_required_fields(): + transport = transports.IntentsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_intents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "intents", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_intents_rest_interceptors(null_interceptor): + transport = transports.IntentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IntentsRestInterceptor(), + ) + client = IntentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.IntentsRestInterceptor, "post_export_intents" + ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "pre_export_intents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = intent.ExportIntentsRequest.pb(intent.ExportIntentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = intent.ExportIntentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_intents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_intents_rest_bad_request( + transport: str = "rest", request_type=intent.ExportIntentsRequest +): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/agents/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_intents(request) + + +def test_export_intents_rest_error(): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.IntentsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.IntentsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IntentsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. transport = transports.IntentsGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3742,6 +4458,8 @@ def test_intents_base_transport(): "create_intent", "update_intent", "delete_intent", + "import_intents", + "export_intents", "get_location", "list_locations", "get_operation", @@ -3755,6 +4473,11 @@ def test_intents_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + # Catch all for all remaining methods and properties remainder = [ "kind", @@ -3951,6 +4674,23 @@ def test_intents_http_transport_client_cert_source_for_mtls(): mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) +def test_intents_rest_lro_client(): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + @pytest.mark.parametrize( "transport_name", [ @@ -4029,6 +4769,12 @@ def test_intents_client_transport_session_collision(transport_name): session1 = client1.transport.delete_intent._session session2 = client2.transport.delete_intent._session assert session1 != session2 + session1 = client1.transport.import_intents._session + session2 = client2.transport.import_intents._session + assert session1 != session2 + session1 = client1.transport.export_intents._session + session2 = client2.transport.export_intents._session + assert session1 != session2 def test_intents_grpc_transport_channel(): @@ -4149,6 +4895,40 @@ def test_intents_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel +def test_intents_grpc_lro_client(): + client = IntentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_intents_grpc_lro_async_client(): + client = IntentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + def test_entity_type_path(): project = "squid" location = "clam" diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_pages.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_pages.py index 8338a55cd4c3..e79d7d544744 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_pages.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_pages.py @@ -52,7 +52,12 @@ pagers, transports, ) -from google.cloud.dialogflowcx_v3beta1.types import data_store_connection, fulfillment +from google.cloud.dialogflowcx_v3beta1.types import ( + advanced_settings, + data_store_connection, + fulfillment, + gcs, +) from google.cloud.dialogflowcx_v3beta1.types import page from google.cloud.dialogflowcx_v3beta1.types import page as gcdc_page from google.cloud.dialogflowcx_v3beta1.types import response_message @@ -2732,6 +2737,19 @@ def test_create_page_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -2754,6 +2772,7 @@ def test_create_page_rest(request_type): }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -2773,6 +2792,7 @@ def test_create_page_rest(request_type): } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -3032,6 +3052,19 @@ def test_create_page_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -3054,6 +3087,7 @@ def test_create_page_rest_bad_request( }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -3073,6 +3107,7 @@ def test_create_page_rest_bad_request( } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -3245,6 +3280,19 @@ def test_update_page_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -3267,6 +3315,7 @@ def test_update_page_rest(request_type): }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -3286,6 +3335,7 @@ def test_update_page_rest(request_type): } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -3547,6 +3597,19 @@ def test_update_page_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -3569,6 +3632,7 @@ def test_update_page_rest_bad_request( }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -3588,6 +3652,7 @@ def test_update_page_rest_bad_request( } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_test_cases.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_test_cases.py index d8e73c7f3f98..f8e9fb6d2f68 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_test_cases.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_test_cases.py @@ -65,9 +65,11 @@ transports, ) from google.cloud.dialogflowcx_v3beta1.types import ( + advanced_settings, audio_config, data_store_connection, fulfillment, + gcs, intent, page, response_message, @@ -4605,6 +4607,19 @@ def test_create_test_case_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -4627,6 +4642,7 @@ def test_create_test_case_rest(request_type): }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -4646,6 +4662,7 @@ def test_create_test_case_rest(request_type): } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -5013,6 +5030,19 @@ def test_create_test_case_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -5035,6 +5065,7 @@ def test_create_test_case_rest_bad_request( }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -5054,6 +5085,7 @@ def test_create_test_case_rest_bad_request( } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -5328,6 +5360,19 @@ def test_update_test_case_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -5350,6 +5395,7 @@ def test_update_test_case_rest(request_type): }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -5369,6 +5415,7 @@ def test_update_test_case_rest(request_type): } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, @@ -5737,6 +5784,19 @@ def test_update_test_case_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "form": { "parameters": [ @@ -5759,6 +5819,7 @@ def test_update_test_case_rest_bad_request( }, "default_value": {}, "redact": True, + "advanced_settings": {}, } ] }, @@ -5778,6 +5839,7 @@ def test_update_test_case_rest_bad_request( } ], "event_handlers": {}, + "advanced_settings": {}, "knowledge_connector_settings": { "enabled": True, "trigger_fulfillment": {}, diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_transition_route_groups.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_transition_route_groups.py index b1650b0da6c6..06ae6fbc2755 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_transition_route_groups.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_transition_route_groups.py @@ -52,7 +52,13 @@ pagers, transports, ) -from google.cloud.dialogflowcx_v3beta1.types import fulfillment, page, response_message +from google.cloud.dialogflowcx_v3beta1.types import ( + advanced_settings, + fulfillment, + gcs, + page, + response_message, +) from google.cloud.dialogflowcx_v3beta1.types import ( transition_route_group as gcdc_transition_route_group, ) @@ -2984,6 +2990,19 @@ def test_create_transition_route_group_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", @@ -3266,6 +3285,19 @@ def test_create_transition_route_group_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", @@ -3450,6 +3482,19 @@ def test_update_transition_route_group_rest(request_type): ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", @@ -3734,6 +3779,19 @@ def test_update_transition_route_group_rest_bad_request( ] } ], + "advanced_settings": { + "audio_export_gcs_destination": {"uri": "uri_value"}, + "dtmf_settings": { + "enabled": True, + "max_digits": 1065, + "finish_digit": "finish_digit_value", + }, + "logging_settings": { + "enable_stackdriver_logging": True, + "enable_interaction_logging": True, + }, + }, + "enable_generative_fallback": True, }, "target_page": "target_page_value", "target_flow": "target_flow_value", diff --git a/packages/google-cloud-dialogflow/CHANGELOG.md b/packages/google-cloud-dialogflow/CHANGELOG.md index a40c53f53ca5..a7593ff0589f 100644 --- a/packages/google-cloud-dialogflow/CHANGELOG.md +++ b/packages/google-cloud-dialogflow/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/dialogflow/#history +## [2.24.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.24.0...google-cloud-dialogflow-v2.24.1) (2023-09-25) + + +### Bug Fixes + +* Delete un-referenced UPGRADING.md to amke docs build pass ([0eebe3e](https://github.com/googleapis/google-cloud-python/commit/0eebe3e9a9fa78319ad294f814c4b48f2dc73e2a)) + ## [2.24.0](https://github.com/googleapis/python-dialogflow/compare/v2.23.3...v2.24.0) (2023-09-13) diff --git a/packages/google-cloud-dialogflow/CONTRIBUTING.rst b/packages/google-cloud-dialogflow/CONTRIBUTING.rst index 3b5850bd8227..ef76893eed7d 100644 --- a/packages/google-cloud-dialogflow/CONTRIBUTING.rst +++ b/packages/google-cloud-dialogflow/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system- -- -k + $ nox -s system-3.11 -- -k .. note:: - System tests are only configured to run under Python. + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py index 07de09d568ba..360a0d13ebdd 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.24.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py index 07de09d568ba..360a0d13ebdd 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.24.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/participant.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/participant.py index 619f4359b115..4c2248a77fff 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/participant.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/participant.py @@ -703,6 +703,27 @@ class StreamingAnalyzeContentRequest(proto.Message): Note: this field should only be used if you are connecting to a Dialogflow CX agent. + enable_extended_streaming (bool): + Optional. Enable full bidirectional streaming. You can keep + streaming the audio until timeout, and there's no need to + half close the stream to get the response. + + Restrictions: + + - Timeout: 3 mins. + - Audio Encoding: only supports + [AudioEncoding.AUDIO_ENCODING_LINEAR_16][google.cloud.dialogflow.v2.AudioEncoding.AUDIO_ENCODING_LINEAR_16] + and + [AudioEncoding.AUDIO_ENCODING_MULAW][google.cloud.dialogflow.v2.AudioEncoding.AUDIO_ENCODING_MULAW] + - Lifecycle: conversation should be in ``Assist Stage``, go + to [Conversation.CreateConversation][] for more + information. + + InvalidArgument Error will be returned if the one of + restriction checks failed. + + You can find more details in + https://cloud.google.com/agent-assist/docs/extended-streaming enable_partial_automated_agent_reply (bool): Enable partial virtual agent responses. If this flag is not enabled, response stream still contains only one final @@ -766,6 +787,10 @@ class StreamingAnalyzeContentRequest(proto.Message): number=13, message=struct_pb2.Struct, ) + enable_extended_streaming: bool = proto.Field( + proto.BOOL, + number=11, + ) enable_partial_automated_agent_reply: bool = proto.Field( proto.BOOL, number=12, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py index 07de09d568ba..360a0d13ebdd 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.24.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/participant.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/participant.py index 7a5ad1739558..481807096377 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/participant.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/participant.py @@ -1148,6 +1148,27 @@ class StreamingAnalyzeContentRequest(proto.Message): Note: this field should only be used if you are connecting to a Dialogflow CX agent. + enable_extended_streaming (bool): + Optional. Enable full bidirectional streaming. You can keep + streaming the audio until timeout, and there's no need to + half close the stream to get the response. + + Restrictions: + + - Timeout: 3 mins. + - Audio Encoding: only supports + [AudioEncoding.AUDIO_ENCODING_LINEAR_16][google.cloud.dialogflow.v2beta1.AudioEncoding.AUDIO_ENCODING_LINEAR_16] + and + [AudioEncoding.AUDIO_ENCODING_MULAW][google.cloud.dialogflow.v2beta1.AudioEncoding.AUDIO_ENCODING_MULAW] + - Lifecycle: conversation should be in ``Assist Stage``, go + to [Conversation.CreateConversation][] for more + information. + + InvalidArgument Error will be returned if the one of + restriction checks failed. + + You can find more details in + https://cloud.google.com/agent-assist/docs/extended-streaming enable_partial_automated_agent_reply (bool): Enable partial virtual agent responses. If this flag is not enabled, response stream still contains only one final @@ -1215,6 +1236,10 @@ class StreamingAnalyzeContentRequest(proto.Message): proto.STRING, number=15, ) + enable_extended_streaming: bool = proto.Field( + proto.BOOL, + number=11, + ) enable_partial_automated_agent_reply: bool = proto.Field( proto.BOOL, number=12, diff --git a/packages/google-cloud-dialogflow/noxfile.py b/packages/google-cloud-dialogflow/noxfile.py index 9a2acd8b6787..be54712bfa8f 100644 --- a/packages/google-cloud-dialogflow/noxfile.py +++ b/packages/google-cloud-dialogflow/noxfile.py @@ -46,7 +46,7 @@ UNIT_TEST_EXTRAS = [] UNIT_TEST_EXTRAS_BY_PYTHON = {} -SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -405,24 +405,3 @@ def prerelease_deps(session): session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_delete_agent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_delete_agent_async.py new file mode 100644 index 000000000000..8a3e75136ebb --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_delete_agent_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_DeleteAgent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_delete_agent(): + # Create a client + client = dialogflow_v2.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteAgentRequest( + parent="parent_value", + ) + + # Make the request + await client.delete_agent(request=request) + + +# [END dialogflow_v2_generated_Agents_DeleteAgent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_delete_agent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_delete_agent_sync.py new file mode 100644 index 000000000000..62c96107ee15 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_delete_agent_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_DeleteAgent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_delete_agent(): + # Create a client + client = dialogflow_v2.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteAgentRequest( + parent="parent_value", + ) + + # Make the request + client.delete_agent(request=request) + + +# [END dialogflow_v2_generated_Agents_DeleteAgent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_export_agent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_export_agent_async.py new file mode 100644 index 000000000000..cefa941e77b2 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_export_agent_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_ExportAgent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_export_agent(): + # Create a client + client = dialogflow_v2.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ExportAgentRequest( + parent="parent_value", + agent_uri="agent_uri_value", + ) + + # Make the request + operation = client.export_agent(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Agents_ExportAgent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_export_agent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_export_agent_sync.py new file mode 100644 index 000000000000..df775b23188c --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_export_agent_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_ExportAgent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_export_agent(): + # Create a client + client = dialogflow_v2.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ExportAgentRequest( + parent="parent_value", + agent_uri="agent_uri_value", + ) + + # Make the request + operation = client.export_agent(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Agents_ExportAgent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_get_agent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_get_agent_async.py new file mode 100644 index 000000000000..35d5e6708728 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_get_agent_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_GetAgent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_agent(): + # Create a client + client = dialogflow_v2.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetAgentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.get_agent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Agents_GetAgent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_get_agent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_get_agent_sync.py new file mode 100644 index 000000000000..618923ca5234 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_get_agent_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_GetAgent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_agent(): + # Create a client + client = dialogflow_v2.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetAgentRequest( + parent="parent_value", + ) + + # Make the request + response = client.get_agent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Agents_GetAgent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_get_validation_result_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_get_validation_result_async.py new file mode 100644 index 000000000000..48cdc6481ff4 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_get_validation_result_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetValidationResult +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_GetValidationResult_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_validation_result(): + # Create a client + client = dialogflow_v2.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetValidationResultRequest( + parent="parent_value", + ) + + # Make the request + response = await client.get_validation_result(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Agents_GetValidationResult_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_get_validation_result_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_get_validation_result_sync.py new file mode 100644 index 000000000000..447cc76c995f --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_get_validation_result_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetValidationResult +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_GetValidationResult_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_validation_result(): + # Create a client + client = dialogflow_v2.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetValidationResultRequest( + parent="parent_value", + ) + + # Make the request + response = client.get_validation_result(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Agents_GetValidationResult_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_import_agent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_import_agent_async.py new file mode 100644 index 000000000000..0cfab257c900 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_import_agent_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_ImportAgent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_import_agent(): + # Create a client + client = dialogflow_v2.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ImportAgentRequest( + agent_uri="agent_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_agent(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Agents_ImportAgent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_import_agent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_import_agent_sync.py new file mode 100644 index 000000000000..344c4c29ceac --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_import_agent_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_ImportAgent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_import_agent(): + # Create a client + client = dialogflow_v2.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ImportAgentRequest( + agent_uri="agent_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_agent(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Agents_ImportAgent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_restore_agent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_restore_agent_async.py new file mode 100644 index 000000000000..98460caa97d1 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_restore_agent_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_RestoreAgent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_restore_agent(): + # Create a client + client = dialogflow_v2.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.RestoreAgentRequest( + agent_uri="agent_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.restore_agent(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Agents_RestoreAgent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_restore_agent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_restore_agent_sync.py new file mode 100644 index 000000000000..04b7ea8ade58 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_restore_agent_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_RestoreAgent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_restore_agent(): + # Create a client + client = dialogflow_v2.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.RestoreAgentRequest( + agent_uri="agent_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.restore_agent(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Agents_RestoreAgent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_search_agents_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_search_agents_async.py new file mode 100644 index 000000000000..e3f57fd71696 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_search_agents_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchAgents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_SearchAgents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_search_agents(): + # Create a client + client = dialogflow_v2.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.SearchAgentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.search_agents(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Agents_SearchAgents_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_search_agents_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_search_agents_sync.py new file mode 100644 index 000000000000..fe201f801124 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_search_agents_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchAgents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_SearchAgents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_search_agents(): + # Create a client + client = dialogflow_v2.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.SearchAgentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.search_agents(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Agents_SearchAgents_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_set_agent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_set_agent_async.py new file mode 100644 index 000000000000..d7211481fbcc --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_set_agent_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_SetAgent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_set_agent(): + # Create a client + client = dialogflow_v2.AgentsAsyncClient() + + # Initialize request argument(s) + agent = dialogflow_v2.Agent() + agent.parent = "parent_value" + agent.display_name = "display_name_value" + agent.default_language_code = "default_language_code_value" + agent.time_zone = "time_zone_value" + + request = dialogflow_v2.SetAgentRequest( + agent=agent, + ) + + # Make the request + response = await client.set_agent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Agents_SetAgent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_set_agent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_set_agent_sync.py new file mode 100644 index 000000000000..b1aa71dcf8e8 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_set_agent_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_SetAgent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_set_agent(): + # Create a client + client = dialogflow_v2.AgentsClient() + + # Initialize request argument(s) + agent = dialogflow_v2.Agent() + agent.parent = "parent_value" + agent.display_name = "display_name_value" + agent.default_language_code = "default_language_code_value" + agent.time_zone = "time_zone_value" + + request = dialogflow_v2.SetAgentRequest( + agent=agent, + ) + + # Make the request + response = client.set_agent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Agents_SetAgent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_train_agent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_train_agent_async.py new file mode 100644 index 000000000000..38442ee1292d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_train_agent_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TrainAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_TrainAgent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_train_agent(): + # Create a client + client = dialogflow_v2.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.TrainAgentRequest( + parent="parent_value", + ) + + # Make the request + operation = client.train_agent(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Agents_TrainAgent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_train_agent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_train_agent_sync.py new file mode 100644 index 000000000000..74124356daeb --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_agents_train_agent_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TrainAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Agents_TrainAgent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_train_agent(): + # Create a client + client = dialogflow_v2.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.TrainAgentRequest( + parent="parent_value", + ) + + # Make the request + operation = client.train_agent(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Agents_TrainAgent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_answer_records_list_answer_records_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_answer_records_list_answer_records_async.py new file mode 100644 index 000000000000..aa2a6cefb6aa --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_answer_records_list_answer_records_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAnswerRecords +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_AnswerRecords_ListAnswerRecords_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_answer_records(): + # Create a client + client = dialogflow_v2.AnswerRecordsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListAnswerRecordsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_answer_records(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_AnswerRecords_ListAnswerRecords_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_answer_records_list_answer_records_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_answer_records_list_answer_records_sync.py new file mode 100644 index 000000000000..0cc4105f3ab0 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_answer_records_list_answer_records_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAnswerRecords +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_AnswerRecords_ListAnswerRecords_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_answer_records(): + # Create a client + client = dialogflow_v2.AnswerRecordsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListAnswerRecordsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_answer_records(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_AnswerRecords_ListAnswerRecords_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_answer_records_update_answer_record_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_answer_records_update_answer_record_async.py new file mode 100644 index 000000000000..62a4f79cc584 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_answer_records_update_answer_record_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAnswerRecord +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_AnswerRecords_UpdateAnswerRecord_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_update_answer_record(): + # Create a client + client = dialogflow_v2.AnswerRecordsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.UpdateAnswerRecordRequest( + ) + + # Make the request + response = await client.update_answer_record(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_AnswerRecords_UpdateAnswerRecord_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_answer_records_update_answer_record_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_answer_records_update_answer_record_sync.py new file mode 100644 index 000000000000..ec384750dec7 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_answer_records_update_answer_record_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAnswerRecord +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_AnswerRecords_UpdateAnswerRecord_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_update_answer_record(): + # Create a client + client = dialogflow_v2.AnswerRecordsClient() + + # Initialize request argument(s) + request = dialogflow_v2.UpdateAnswerRecordRequest( + ) + + # Make the request + response = client.update_answer_record(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_AnswerRecords_UpdateAnswerRecord_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_create_context_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_create_context_async.py new file mode 100644 index 000000000000..031fe938fbf5 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_create_context_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Contexts_CreateContext_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_create_context(): + # Create a client + client = dialogflow_v2.ContextsAsyncClient() + + # Initialize request argument(s) + context = dialogflow_v2.Context() + context.name = "name_value" + + request = dialogflow_v2.CreateContextRequest( + parent="parent_value", + context=context, + ) + + # Make the request + response = await client.create_context(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Contexts_CreateContext_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_create_context_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_create_context_sync.py new file mode 100644 index 000000000000..a39e8e814c15 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_create_context_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Contexts_CreateContext_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_create_context(): + # Create a client + client = dialogflow_v2.ContextsClient() + + # Initialize request argument(s) + context = dialogflow_v2.Context() + context.name = "name_value" + + request = dialogflow_v2.CreateContextRequest( + parent="parent_value", + context=context, + ) + + # Make the request + response = client.create_context(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Contexts_CreateContext_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_delete_all_contexts_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_delete_all_contexts_async.py new file mode 100644 index 000000000000..49588c4e6847 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_delete_all_contexts_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAllContexts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Contexts_DeleteAllContexts_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_delete_all_contexts(): + # Create a client + client = dialogflow_v2.ContextsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteAllContextsRequest( + parent="parent_value", + ) + + # Make the request + await client.delete_all_contexts(request=request) + + +# [END dialogflow_v2_generated_Contexts_DeleteAllContexts_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_delete_all_contexts_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_delete_all_contexts_sync.py new file mode 100644 index 000000000000..c147e9705580 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_delete_all_contexts_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAllContexts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Contexts_DeleteAllContexts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_delete_all_contexts(): + # Create a client + client = dialogflow_v2.ContextsClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteAllContextsRequest( + parent="parent_value", + ) + + # Make the request + client.delete_all_contexts(request=request) + + +# [END dialogflow_v2_generated_Contexts_DeleteAllContexts_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_delete_context_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_delete_context_async.py new file mode 100644 index 000000000000..948f4fecb123 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_delete_context_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Contexts_DeleteContext_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_delete_context(): + # Create a client + client = dialogflow_v2.ContextsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteContextRequest( + name="name_value", + ) + + # Make the request + await client.delete_context(request=request) + + +# [END dialogflow_v2_generated_Contexts_DeleteContext_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_delete_context_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_delete_context_sync.py new file mode 100644 index 000000000000..3bcf3473cca6 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_delete_context_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Contexts_DeleteContext_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_delete_context(): + # Create a client + client = dialogflow_v2.ContextsClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteContextRequest( + name="name_value", + ) + + # Make the request + client.delete_context(request=request) + + +# [END dialogflow_v2_generated_Contexts_DeleteContext_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_get_context_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_get_context_async.py new file mode 100644 index 000000000000..b2181c8615d4 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_get_context_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Contexts_GetContext_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_context(): + # Create a client + client = dialogflow_v2.ContextsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetContextRequest( + name="name_value", + ) + + # Make the request + response = await client.get_context(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Contexts_GetContext_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_get_context_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_get_context_sync.py new file mode 100644 index 000000000000..99450c42e0aa --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_get_context_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Contexts_GetContext_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_context(): + # Create a client + client = dialogflow_v2.ContextsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetContextRequest( + name="name_value", + ) + + # Make the request + response = client.get_context(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Contexts_GetContext_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_list_contexts_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_list_contexts_async.py new file mode 100644 index 000000000000..83b8921f507e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_list_contexts_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListContexts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Contexts_ListContexts_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_contexts(): + # Create a client + client = dialogflow_v2.ContextsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListContextsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_contexts(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Contexts_ListContexts_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_list_contexts_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_list_contexts_sync.py new file mode 100644 index 000000000000..756b5feddcf0 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_list_contexts_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListContexts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Contexts_ListContexts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_contexts(): + # Create a client + client = dialogflow_v2.ContextsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListContextsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_contexts(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Contexts_ListContexts_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_update_context_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_update_context_async.py new file mode 100644 index 000000000000..5e86ae041874 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_update_context_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Contexts_UpdateContext_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_update_context(): + # Create a client + client = dialogflow_v2.ContextsAsyncClient() + + # Initialize request argument(s) + context = dialogflow_v2.Context() + context.name = "name_value" + + request = dialogflow_v2.UpdateContextRequest( + context=context, + ) + + # Make the request + response = await client.update_context(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Contexts_UpdateContext_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_update_context_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_update_context_sync.py new file mode 100644 index 000000000000..4c66936c89f8 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_contexts_update_context_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Contexts_UpdateContext_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_update_context(): + # Create a client + client = dialogflow_v2.ContextsClient() + + # Initialize request argument(s) + context = dialogflow_v2.Context() + context.name = "name_value" + + request = dialogflow_v2.UpdateContextRequest( + context=context, + ) + + # Make the request + response = client.update_context(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Contexts_UpdateContext_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_create_conversation_dataset_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_create_conversation_dataset_async.py new file mode 100644 index 000000000000..b461196340ba --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_create_conversation_dataset_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversationDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationDatasets_CreateConversationDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_create_conversation_dataset(): + # Create a client + client = dialogflow_v2.ConversationDatasetsAsyncClient() + + # Initialize request argument(s) + conversation_dataset = dialogflow_v2.ConversationDataset() + conversation_dataset.display_name = "display_name_value" + + request = dialogflow_v2.CreateConversationDatasetRequest( + parent="parent_value", + conversation_dataset=conversation_dataset, + ) + + # Make the request + operation = client.create_conversation_dataset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationDatasets_CreateConversationDataset_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_create_conversation_dataset_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_create_conversation_dataset_sync.py new file mode 100644 index 000000000000..265613949cfd --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_create_conversation_dataset_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversationDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationDatasets_CreateConversationDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_create_conversation_dataset(): + # Create a client + client = dialogflow_v2.ConversationDatasetsClient() + + # Initialize request argument(s) + conversation_dataset = dialogflow_v2.ConversationDataset() + conversation_dataset.display_name = "display_name_value" + + request = dialogflow_v2.CreateConversationDatasetRequest( + parent="parent_value", + conversation_dataset=conversation_dataset, + ) + + # Make the request + operation = client.create_conversation_dataset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationDatasets_CreateConversationDataset_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_delete_conversation_dataset_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_delete_conversation_dataset_async.py new file mode 100644 index 000000000000..855360449a0b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_delete_conversation_dataset_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConversationDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationDatasets_DeleteConversationDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_delete_conversation_dataset(): + # Create a client + client = dialogflow_v2.ConversationDatasetsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteConversationDatasetRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversation_dataset(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationDatasets_DeleteConversationDataset_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_delete_conversation_dataset_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_delete_conversation_dataset_sync.py new file mode 100644 index 000000000000..2d6c0cff1488 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_delete_conversation_dataset_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConversationDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationDatasets_DeleteConversationDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_delete_conversation_dataset(): + # Create a client + client = dialogflow_v2.ConversationDatasetsClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteConversationDatasetRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversation_dataset(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationDatasets_DeleteConversationDataset_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_get_conversation_dataset_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_get_conversation_dataset_async.py new file mode 100644 index 000000000000..7267034721e6 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_get_conversation_dataset_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversationDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationDatasets_GetConversationDataset_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_conversation_dataset(): + # Create a client + client = dialogflow_v2.ConversationDatasetsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetConversationDatasetRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conversation_dataset(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationDatasets_GetConversationDataset_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_get_conversation_dataset_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_get_conversation_dataset_sync.py new file mode 100644 index 000000000000..7c374505a181 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_get_conversation_dataset_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversationDataset +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationDatasets_GetConversationDataset_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_conversation_dataset(): + # Create a client + client = dialogflow_v2.ConversationDatasetsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetConversationDatasetRequest( + name="name_value", + ) + + # Make the request + response = client.get_conversation_dataset(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationDatasets_GetConversationDataset_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_import_conversation_data_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_import_conversation_data_async.py new file mode 100644 index 000000000000..e91b6010ab4b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_import_conversation_data_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportConversationData +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationDatasets_ImportConversationData_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_import_conversation_data(): + # Create a client + client = dialogflow_v2.ConversationDatasetsAsyncClient() + + # Initialize request argument(s) + input_config = dialogflow_v2.InputConfig() + input_config.gcs_source.uris = ['uris_value1', 'uris_value2'] + + request = dialogflow_v2.ImportConversationDataRequest( + name="name_value", + input_config=input_config, + ) + + # Make the request + operation = client.import_conversation_data(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationDatasets_ImportConversationData_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_import_conversation_data_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_import_conversation_data_sync.py new file mode 100644 index 000000000000..60196cf01d23 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_import_conversation_data_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportConversationData +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationDatasets_ImportConversationData_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_import_conversation_data(): + # Create a client + client = dialogflow_v2.ConversationDatasetsClient() + + # Initialize request argument(s) + input_config = dialogflow_v2.InputConfig() + input_config.gcs_source.uris = ['uris_value1', 'uris_value2'] + + request = dialogflow_v2.ImportConversationDataRequest( + name="name_value", + input_config=input_config, + ) + + # Make the request + operation = client.import_conversation_data(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationDatasets_ImportConversationData_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_list_conversation_datasets_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_list_conversation_datasets_async.py new file mode 100644 index 000000000000..6fbfd49a3396 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_list_conversation_datasets_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversationDatasets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationDatasets_ListConversationDatasets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_conversation_datasets(): + # Create a client + client = dialogflow_v2.ConversationDatasetsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListConversationDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversation_datasets(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_ConversationDatasets_ListConversationDatasets_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_list_conversation_datasets_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_list_conversation_datasets_sync.py new file mode 100644 index 000000000000..a2d15bd26ca8 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_datasets_list_conversation_datasets_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversationDatasets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationDatasets_ListConversationDatasets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_conversation_datasets(): + # Create a client + client = dialogflow_v2.ConversationDatasetsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListConversationDatasetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversation_datasets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_ConversationDatasets_ListConversationDatasets_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_create_conversation_model_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_create_conversation_model_async.py new file mode 100644 index 000000000000..ec4066f21e0a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_create_conversation_model_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversationModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_CreateConversationModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_create_conversation_model(): + # Create a client + client = dialogflow_v2.ConversationModelsAsyncClient() + + # Initialize request argument(s) + conversation_model = dialogflow_v2.ConversationModel() + conversation_model.display_name = "display_name_value" + conversation_model.datasets.dataset = "dataset_value" + + request = dialogflow_v2.CreateConversationModelRequest( + conversation_model=conversation_model, + ) + + # Make the request + operation = client.create_conversation_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationModels_CreateConversationModel_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_create_conversation_model_evaluation_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_create_conversation_model_evaluation_async.py new file mode 100644 index 000000000000..1004b58a56d0 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_create_conversation_model_evaluation_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversationModelEvaluation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_CreateConversationModelEvaluation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_create_conversation_model_evaluation(): + # Create a client + client = dialogflow_v2.ConversationModelsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.CreateConversationModelEvaluationRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_conversation_model_evaluation(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationModels_CreateConversationModelEvaluation_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_create_conversation_model_evaluation_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_create_conversation_model_evaluation_sync.py new file mode 100644 index 000000000000..3b693b871f2e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_create_conversation_model_evaluation_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversationModelEvaluation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_CreateConversationModelEvaluation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_create_conversation_model_evaluation(): + # Create a client + client = dialogflow_v2.ConversationModelsClient() + + # Initialize request argument(s) + request = dialogflow_v2.CreateConversationModelEvaluationRequest( + parent="parent_value", + ) + + # Make the request + operation = client.create_conversation_model_evaluation(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationModels_CreateConversationModelEvaluation_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_create_conversation_model_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_create_conversation_model_sync.py new file mode 100644 index 000000000000..00d6cad43688 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_create_conversation_model_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversationModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_CreateConversationModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_create_conversation_model(): + # Create a client + client = dialogflow_v2.ConversationModelsClient() + + # Initialize request argument(s) + conversation_model = dialogflow_v2.ConversationModel() + conversation_model.display_name = "display_name_value" + conversation_model.datasets.dataset = "dataset_value" + + request = dialogflow_v2.CreateConversationModelRequest( + conversation_model=conversation_model, + ) + + # Make the request + operation = client.create_conversation_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationModels_CreateConversationModel_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_delete_conversation_model_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_delete_conversation_model_async.py new file mode 100644 index 000000000000..cb1bf56b3852 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_delete_conversation_model_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConversationModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_DeleteConversationModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_delete_conversation_model(): + # Create a client + client = dialogflow_v2.ConversationModelsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteConversationModelRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversation_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationModels_DeleteConversationModel_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_delete_conversation_model_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_delete_conversation_model_sync.py new file mode 100644 index 000000000000..0a04f155a2c6 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_delete_conversation_model_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConversationModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_DeleteConversationModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_delete_conversation_model(): + # Create a client + client = dialogflow_v2.ConversationModelsClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteConversationModelRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversation_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationModels_DeleteConversationModel_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_deploy_conversation_model_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_deploy_conversation_model_async.py new file mode 100644 index 000000000000..fa776493b39c --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_deploy_conversation_model_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeployConversationModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_DeployConversationModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_deploy_conversation_model(): + # Create a client + client = dialogflow_v2.ConversationModelsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeployConversationModelRequest( + name="name_value", + ) + + # Make the request + operation = client.deploy_conversation_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationModels_DeployConversationModel_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_deploy_conversation_model_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_deploy_conversation_model_sync.py new file mode 100644 index 000000000000..ca1fd81a42ac --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_deploy_conversation_model_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeployConversationModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_DeployConversationModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_deploy_conversation_model(): + # Create a client + client = dialogflow_v2.ConversationModelsClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeployConversationModelRequest( + name="name_value", + ) + + # Make the request + operation = client.deploy_conversation_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationModels_DeployConversationModel_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_get_conversation_model_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_get_conversation_model_async.py new file mode 100644 index 000000000000..d9990b2e2fdb --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_get_conversation_model_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversationModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_GetConversationModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_conversation_model(): + # Create a client + client = dialogflow_v2.ConversationModelsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetConversationModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conversation_model(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationModels_GetConversationModel_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_get_conversation_model_evaluation_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_get_conversation_model_evaluation_async.py new file mode 100644 index 000000000000..d327a11ef0f9 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_get_conversation_model_evaluation_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversationModelEvaluation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_GetConversationModelEvaluation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_conversation_model_evaluation(): + # Create a client + client = dialogflow_v2.ConversationModelsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetConversationModelEvaluationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conversation_model_evaluation(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationModels_GetConversationModelEvaluation_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_get_conversation_model_evaluation_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_get_conversation_model_evaluation_sync.py new file mode 100644 index 000000000000..f12f8d9e5575 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_get_conversation_model_evaluation_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversationModelEvaluation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_GetConversationModelEvaluation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_conversation_model_evaluation(): + # Create a client + client = dialogflow_v2.ConversationModelsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetConversationModelEvaluationRequest( + name="name_value", + ) + + # Make the request + response = client.get_conversation_model_evaluation(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationModels_GetConversationModelEvaluation_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_get_conversation_model_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_get_conversation_model_sync.py new file mode 100644 index 000000000000..187339c778ec --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_get_conversation_model_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversationModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_GetConversationModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_conversation_model(): + # Create a client + client = dialogflow_v2.ConversationModelsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetConversationModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_conversation_model(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationModels_GetConversationModel_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_list_conversation_model_evaluations_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_list_conversation_model_evaluations_async.py new file mode 100644 index 000000000000..b935d77374e9 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_list_conversation_model_evaluations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversationModelEvaluations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_ListConversationModelEvaluations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_conversation_model_evaluations(): + # Create a client + client = dialogflow_v2.ConversationModelsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListConversationModelEvaluationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversation_model_evaluations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_ConversationModels_ListConversationModelEvaluations_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_list_conversation_model_evaluations_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_list_conversation_model_evaluations_sync.py new file mode 100644 index 000000000000..86d2ca5fccab --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_list_conversation_model_evaluations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversationModelEvaluations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_ListConversationModelEvaluations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_conversation_model_evaluations(): + # Create a client + client = dialogflow_v2.ConversationModelsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListConversationModelEvaluationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversation_model_evaluations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_ConversationModels_ListConversationModelEvaluations_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_list_conversation_models_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_list_conversation_models_async.py new file mode 100644 index 000000000000..0ffd99375b21 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_list_conversation_models_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversationModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_ListConversationModels_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_conversation_models(): + # Create a client + client = dialogflow_v2.ConversationModelsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListConversationModelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversation_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_ConversationModels_ListConversationModels_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_list_conversation_models_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_list_conversation_models_sync.py new file mode 100644 index 000000000000..de7847ebdf00 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_list_conversation_models_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversationModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_ListConversationModels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_conversation_models(): + # Create a client + client = dialogflow_v2.ConversationModelsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListConversationModelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversation_models(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_ConversationModels_ListConversationModels_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_undeploy_conversation_model_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_undeploy_conversation_model_async.py new file mode 100644 index 000000000000..78c402e81fe5 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_undeploy_conversation_model_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UndeployConversationModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_UndeployConversationModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_undeploy_conversation_model(): + # Create a client + client = dialogflow_v2.ConversationModelsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.UndeployConversationModelRequest( + name="name_value", + ) + + # Make the request + operation = client.undeploy_conversation_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationModels_UndeployConversationModel_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_undeploy_conversation_model_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_undeploy_conversation_model_sync.py new file mode 100644 index 000000000000..0dd885e42ffc --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_models_undeploy_conversation_model_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UndeployConversationModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationModels_UndeployConversationModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_undeploy_conversation_model(): + # Create a client + client = dialogflow_v2.ConversationModelsClient() + + # Initialize request argument(s) + request = dialogflow_v2.UndeployConversationModelRequest( + name="name_value", + ) + + # Make the request + operation = client.undeploy_conversation_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationModels_UndeployConversationModel_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_clear_suggestion_feature_config_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_clear_suggestion_feature_config_async.py new file mode 100644 index 000000000000..8a08b66a81d1 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_clear_suggestion_feature_config_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ClearSuggestionFeatureConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationProfiles_ClearSuggestionFeatureConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_clear_suggestion_feature_config(): + # Create a client + client = dialogflow_v2.ConversationProfilesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ClearSuggestionFeatureConfigRequest( + conversation_profile="conversation_profile_value", + participant_role="END_USER", + suggestion_feature_type="KNOWLEDGE_SEARCH", + ) + + # Make the request + operation = client.clear_suggestion_feature_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationProfiles_ClearSuggestionFeatureConfig_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_clear_suggestion_feature_config_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_clear_suggestion_feature_config_sync.py new file mode 100644 index 000000000000..6a80a30fc85d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_clear_suggestion_feature_config_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ClearSuggestionFeatureConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationProfiles_ClearSuggestionFeatureConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_clear_suggestion_feature_config(): + # Create a client + client = dialogflow_v2.ConversationProfilesClient() + + # Initialize request argument(s) + request = dialogflow_v2.ClearSuggestionFeatureConfigRequest( + conversation_profile="conversation_profile_value", + participant_role="END_USER", + suggestion_feature_type="KNOWLEDGE_SEARCH", + ) + + # Make the request + operation = client.clear_suggestion_feature_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationProfiles_ClearSuggestionFeatureConfig_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_create_conversation_profile_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_create_conversation_profile_async.py new file mode 100644 index 000000000000..ae3f212ffdb0 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_create_conversation_profile_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationProfiles_CreateConversationProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_create_conversation_profile(): + # Create a client + client = dialogflow_v2.ConversationProfilesAsyncClient() + + # Initialize request argument(s) + conversation_profile = dialogflow_v2.ConversationProfile() + conversation_profile.display_name = "display_name_value" + + request = dialogflow_v2.CreateConversationProfileRequest( + parent="parent_value", + conversation_profile=conversation_profile, + ) + + # Make the request + response = await client.create_conversation_profile(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationProfiles_CreateConversationProfile_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_create_conversation_profile_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_create_conversation_profile_sync.py new file mode 100644 index 000000000000..761fcdb67609 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_create_conversation_profile_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationProfiles_CreateConversationProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_create_conversation_profile(): + # Create a client + client = dialogflow_v2.ConversationProfilesClient() + + # Initialize request argument(s) + conversation_profile = dialogflow_v2.ConversationProfile() + conversation_profile.display_name = "display_name_value" + + request = dialogflow_v2.CreateConversationProfileRequest( + parent="parent_value", + conversation_profile=conversation_profile, + ) + + # Make the request + response = client.create_conversation_profile(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationProfiles_CreateConversationProfile_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_delete_conversation_profile_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_delete_conversation_profile_async.py new file mode 100644 index 000000000000..75442982d0de --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_delete_conversation_profile_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationProfiles_DeleteConversationProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_delete_conversation_profile(): + # Create a client + client = dialogflow_v2.ConversationProfilesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteConversationProfileRequest( + name="name_value", + ) + + # Make the request + await client.delete_conversation_profile(request=request) + + +# [END dialogflow_v2_generated_ConversationProfiles_DeleteConversationProfile_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_delete_conversation_profile_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_delete_conversation_profile_sync.py new file mode 100644 index 000000000000..3a2af11a3579 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_delete_conversation_profile_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationProfiles_DeleteConversationProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_delete_conversation_profile(): + # Create a client + client = dialogflow_v2.ConversationProfilesClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteConversationProfileRequest( + name="name_value", + ) + + # Make the request + client.delete_conversation_profile(request=request) + + +# [END dialogflow_v2_generated_ConversationProfiles_DeleteConversationProfile_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_get_conversation_profile_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_get_conversation_profile_async.py new file mode 100644 index 000000000000..b326bc371846 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_get_conversation_profile_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationProfiles_GetConversationProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_conversation_profile(): + # Create a client + client = dialogflow_v2.ConversationProfilesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetConversationProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conversation_profile(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationProfiles_GetConversationProfile_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_get_conversation_profile_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_get_conversation_profile_sync.py new file mode 100644 index 000000000000..701b4e04c02a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_get_conversation_profile_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationProfiles_GetConversationProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_conversation_profile(): + # Create a client + client = dialogflow_v2.ConversationProfilesClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetConversationProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_conversation_profile(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationProfiles_GetConversationProfile_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_list_conversation_profiles_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_list_conversation_profiles_async.py new file mode 100644 index 000000000000..bff29d629468 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_list_conversation_profiles_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversationProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationProfiles_ListConversationProfiles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_conversation_profiles(): + # Create a client + client = dialogflow_v2.ConversationProfilesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListConversationProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversation_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_ConversationProfiles_ListConversationProfiles_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_list_conversation_profiles_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_list_conversation_profiles_sync.py new file mode 100644 index 000000000000..b9e2e8309ead --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_list_conversation_profiles_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversationProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationProfiles_ListConversationProfiles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_conversation_profiles(): + # Create a client + client = dialogflow_v2.ConversationProfilesClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListConversationProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversation_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_ConversationProfiles_ListConversationProfiles_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_set_suggestion_feature_config_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_set_suggestion_feature_config_async.py new file mode 100644 index 000000000000..4a995696521b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_set_suggestion_feature_config_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetSuggestionFeatureConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationProfiles_SetSuggestionFeatureConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_set_suggestion_feature_config(): + # Create a client + client = dialogflow_v2.ConversationProfilesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.SetSuggestionFeatureConfigRequest( + conversation_profile="conversation_profile_value", + participant_role="END_USER", + ) + + # Make the request + operation = client.set_suggestion_feature_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationProfiles_SetSuggestionFeatureConfig_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_set_suggestion_feature_config_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_set_suggestion_feature_config_sync.py new file mode 100644 index 000000000000..424896136594 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_set_suggestion_feature_config_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetSuggestionFeatureConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationProfiles_SetSuggestionFeatureConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_set_suggestion_feature_config(): + # Create a client + client = dialogflow_v2.ConversationProfilesClient() + + # Initialize request argument(s) + request = dialogflow_v2.SetSuggestionFeatureConfigRequest( + conversation_profile="conversation_profile_value", + participant_role="END_USER", + ) + + # Make the request + operation = client.set_suggestion_feature_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationProfiles_SetSuggestionFeatureConfig_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_update_conversation_profile_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_update_conversation_profile_async.py new file mode 100644 index 000000000000..34530ad7dd92 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_update_conversation_profile_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationProfiles_UpdateConversationProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_update_conversation_profile(): + # Create a client + client = dialogflow_v2.ConversationProfilesAsyncClient() + + # Initialize request argument(s) + conversation_profile = dialogflow_v2.ConversationProfile() + conversation_profile.display_name = "display_name_value" + + request = dialogflow_v2.UpdateConversationProfileRequest( + conversation_profile=conversation_profile, + ) + + # Make the request + response = await client.update_conversation_profile(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationProfiles_UpdateConversationProfile_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_update_conversation_profile_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_update_conversation_profile_sync.py new file mode 100644 index 000000000000..b69a17a8211c --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversation_profiles_update_conversation_profile_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_ConversationProfiles_UpdateConversationProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_update_conversation_profile(): + # Create a client + client = dialogflow_v2.ConversationProfilesClient() + + # Initialize request argument(s) + conversation_profile = dialogflow_v2.ConversationProfile() + conversation_profile.display_name = "display_name_value" + + request = dialogflow_v2.UpdateConversationProfileRequest( + conversation_profile=conversation_profile, + ) + + # Make the request + response = client.update_conversation_profile(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_ConversationProfiles_UpdateConversationProfile_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_complete_conversation_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_complete_conversation_async.py new file mode 100644 index 000000000000..b35488e6fec9 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_complete_conversation_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CompleteConversation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_CompleteConversation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_complete_conversation(): + # Create a client + client = dialogflow_v2.ConversationsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.CompleteConversationRequest( + name="name_value", + ) + + # Make the request + response = await client.complete_conversation(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Conversations_CompleteConversation_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_complete_conversation_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_complete_conversation_sync.py new file mode 100644 index 000000000000..575892c534f8 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_complete_conversation_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CompleteConversation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_CompleteConversation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_complete_conversation(): + # Create a client + client = dialogflow_v2.ConversationsClient() + + # Initialize request argument(s) + request = dialogflow_v2.CompleteConversationRequest( + name="name_value", + ) + + # Make the request + response = client.complete_conversation(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Conversations_CompleteConversation_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_create_conversation_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_create_conversation_async.py new file mode 100644 index 000000000000..ac24dde196a5 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_create_conversation_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_CreateConversation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_create_conversation(): + # Create a client + client = dialogflow_v2.ConversationsAsyncClient() + + # Initialize request argument(s) + conversation = dialogflow_v2.Conversation() + conversation.conversation_profile = "conversation_profile_value" + + request = dialogflow_v2.CreateConversationRequest( + parent="parent_value", + conversation=conversation, + ) + + # Make the request + response = await client.create_conversation(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Conversations_CreateConversation_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_create_conversation_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_create_conversation_sync.py new file mode 100644 index 000000000000..07fad53294c7 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_create_conversation_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_CreateConversation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_create_conversation(): + # Create a client + client = dialogflow_v2.ConversationsClient() + + # Initialize request argument(s) + conversation = dialogflow_v2.Conversation() + conversation.conversation_profile = "conversation_profile_value" + + request = dialogflow_v2.CreateConversationRequest( + parent="parent_value", + conversation=conversation, + ) + + # Make the request + response = client.create_conversation(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Conversations_CreateConversation_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_generate_stateless_summary_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_generate_stateless_summary_async.py new file mode 100644 index 000000000000..263c6c2504df --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_generate_stateless_summary_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateStatelessSummary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_GenerateStatelessSummary_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_generate_stateless_summary(): + # Create a client + client = dialogflow_v2.ConversationsAsyncClient() + + # Initialize request argument(s) + stateless_conversation = dialogflow_v2.MinimalConversation() + stateless_conversation.messages.content = "content_value" + stateless_conversation.parent = "parent_value" + + conversation_profile = dialogflow_v2.ConversationProfile() + conversation_profile.display_name = "display_name_value" + + request = dialogflow_v2.GenerateStatelessSummaryRequest( + stateless_conversation=stateless_conversation, + conversation_profile=conversation_profile, + ) + + # Make the request + response = await client.generate_stateless_summary(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Conversations_GenerateStatelessSummary_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_generate_stateless_summary_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_generate_stateless_summary_sync.py new file mode 100644 index 000000000000..0a8332bbd440 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_generate_stateless_summary_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateStatelessSummary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_GenerateStatelessSummary_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_generate_stateless_summary(): + # Create a client + client = dialogflow_v2.ConversationsClient() + + # Initialize request argument(s) + stateless_conversation = dialogflow_v2.MinimalConversation() + stateless_conversation.messages.content = "content_value" + stateless_conversation.parent = "parent_value" + + conversation_profile = dialogflow_v2.ConversationProfile() + conversation_profile.display_name = "display_name_value" + + request = dialogflow_v2.GenerateStatelessSummaryRequest( + stateless_conversation=stateless_conversation, + conversation_profile=conversation_profile, + ) + + # Make the request + response = client.generate_stateless_summary(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Conversations_GenerateStatelessSummary_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_get_conversation_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_get_conversation_async.py new file mode 100644 index 000000000000..772a897c4563 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_get_conversation_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_GetConversation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_conversation(): + # Create a client + client = dialogflow_v2.ConversationsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetConversationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conversation(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Conversations_GetConversation_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_get_conversation_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_get_conversation_sync.py new file mode 100644 index 000000000000..a61be1a5b864 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_get_conversation_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_GetConversation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_conversation(): + # Create a client + client = dialogflow_v2.ConversationsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetConversationRequest( + name="name_value", + ) + + # Make the request + response = client.get_conversation(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Conversations_GetConversation_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_list_conversations_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_list_conversations_async.py new file mode 100644 index 000000000000..d440781017b1 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_list_conversations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_ListConversations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_conversations(): + # Create a client + client = dialogflow_v2.ConversationsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListConversationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Conversations_ListConversations_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_list_conversations_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_list_conversations_sync.py new file mode 100644 index 000000000000..f9d9e774e351 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_list_conversations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_ListConversations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_conversations(): + # Create a client + client = dialogflow_v2.ConversationsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListConversationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Conversations_ListConversations_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_list_messages_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_list_messages_async.py new file mode 100644 index 000000000000..74465e40a52a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_list_messages_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMessages +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_ListMessages_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_messages(): + # Create a client + client = dialogflow_v2.ConversationsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListMessagesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_messages(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Conversations_ListMessages_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_list_messages_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_list_messages_sync.py new file mode 100644 index 000000000000..873aedd3bab6 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_list_messages_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMessages +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_ListMessages_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_messages(): + # Create a client + client = dialogflow_v2.ConversationsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListMessagesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_messages(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Conversations_ListMessages_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_search_knowledge_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_search_knowledge_async.py new file mode 100644 index 000000000000..3b65b455832a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_search_knowledge_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchKnowledge +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_SearchKnowledge_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_search_knowledge(): + # Create a client + client = dialogflow_v2.ConversationsAsyncClient() + + # Initialize request argument(s) + query = dialogflow_v2.TextInput() + query.text = "text_value" + query.language_code = "language_code_value" + + request = dialogflow_v2.SearchKnowledgeRequest( + query=query, + conversation_profile="conversation_profile_value", + ) + + # Make the request + response = await client.search_knowledge(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Conversations_SearchKnowledge_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_search_knowledge_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_search_knowledge_sync.py new file mode 100644 index 000000000000..1a43367f03fd --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_search_knowledge_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchKnowledge +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_SearchKnowledge_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_search_knowledge(): + # Create a client + client = dialogflow_v2.ConversationsClient() + + # Initialize request argument(s) + query = dialogflow_v2.TextInput() + query.text = "text_value" + query.language_code = "language_code_value" + + request = dialogflow_v2.SearchKnowledgeRequest( + query=query, + conversation_profile="conversation_profile_value", + ) + + # Make the request + response = client.search_knowledge(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Conversations_SearchKnowledge_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_suggest_conversation_summary_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_suggest_conversation_summary_async.py new file mode 100644 index 000000000000..bdf95c2d69ab --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_suggest_conversation_summary_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestConversationSummary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_SuggestConversationSummary_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_suggest_conversation_summary(): + # Create a client + client = dialogflow_v2.ConversationsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.SuggestConversationSummaryRequest( + conversation="conversation_value", + ) + + # Make the request + response = await client.suggest_conversation_summary(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Conversations_SuggestConversationSummary_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_suggest_conversation_summary_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_suggest_conversation_summary_sync.py new file mode 100644 index 000000000000..7ab1bf77d459 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_conversations_suggest_conversation_summary_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestConversationSummary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Conversations_SuggestConversationSummary_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_suggest_conversation_summary(): + # Create a client + client = dialogflow_v2.ConversationsClient() + + # Initialize request argument(s) + request = dialogflow_v2.SuggestConversationSummaryRequest( + conversation="conversation_value", + ) + + # Make the request + response = client.suggest_conversation_summary(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Conversations_SuggestConversationSummary_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_create_document_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_create_document_async.py new file mode 100644 index 000000000000..0f32052689cb --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_create_document_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_CreateDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_create_document(): + # Create a client + client = dialogflow_v2.DocumentsAsyncClient() + + # Initialize request argument(s) + document = dialogflow_v2.Document() + document.content_uri = "content_uri_value" + document.display_name = "display_name_value" + document.mime_type = "mime_type_value" + document.knowledge_types = ['AGENT_FACING_SMART_REPLY'] + + request = dialogflow_v2.CreateDocumentRequest( + parent="parent_value", + document=document, + ) + + # Make the request + operation = client.create_document(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Documents_CreateDocument_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_create_document_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_create_document_sync.py new file mode 100644 index 000000000000..70a544b4698c --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_create_document_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_CreateDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_create_document(): + # Create a client + client = dialogflow_v2.DocumentsClient() + + # Initialize request argument(s) + document = dialogflow_v2.Document() + document.content_uri = "content_uri_value" + document.display_name = "display_name_value" + document.mime_type = "mime_type_value" + document.knowledge_types = ['AGENT_FACING_SMART_REPLY'] + + request = dialogflow_v2.CreateDocumentRequest( + parent="parent_value", + document=document, + ) + + # Make the request + operation = client.create_document(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Documents_CreateDocument_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_delete_document_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_delete_document_async.py new file mode 100644 index 000000000000..9ef0c1407d26 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_delete_document_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_DeleteDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_delete_document(): + # Create a client + client = dialogflow_v2.DocumentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_document(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Documents_DeleteDocument_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_delete_document_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_delete_document_sync.py new file mode 100644 index 000000000000..4cc92fa3baec --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_delete_document_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_DeleteDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_delete_document(): + # Create a client + client = dialogflow_v2.DocumentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_document(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Documents_DeleteDocument_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_export_document_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_export_document_async.py new file mode 100644 index 000000000000..9fa0d4aa5d54 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_export_document_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_ExportDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_export_document(): + # Create a client + client = dialogflow_v2.DocumentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ExportDocumentRequest( + name="name_value", + ) + + # Make the request + operation = client.export_document(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Documents_ExportDocument_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_export_document_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_export_document_sync.py new file mode 100644 index 000000000000..dddeadad5573 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_export_document_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_ExportDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_export_document(): + # Create a client + client = dialogflow_v2.DocumentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ExportDocumentRequest( + name="name_value", + ) + + # Make the request + operation = client.export_document(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Documents_ExportDocument_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_get_document_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_get_document_async.py new file mode 100644 index 000000000000..ef9422bc3300 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_get_document_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_GetDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_document(): + # Create a client + client = dialogflow_v2.DocumentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_document(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Documents_GetDocument_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_get_document_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_get_document_sync.py new file mode 100644 index 000000000000..d0cca29e0c05 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_get_document_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_GetDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_document(): + # Create a client + client = dialogflow_v2.DocumentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = client.get_document(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Documents_GetDocument_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_import_documents_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_import_documents_async.py new file mode 100644 index 000000000000..40d8e049c01b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_import_documents_async.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_ImportDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_import_documents(): + # Create a client + client = dialogflow_v2.DocumentsAsyncClient() + + # Initialize request argument(s) + gcs_source = dialogflow_v2.GcsSources() + gcs_source.uris = ['uris_value1', 'uris_value2'] + + document_template = dialogflow_v2.ImportDocumentTemplate() + document_template.mime_type = "mime_type_value" + document_template.knowledge_types = ['AGENT_FACING_SMART_REPLY'] + + request = dialogflow_v2.ImportDocumentsRequest( + gcs_source=gcs_source, + parent="parent_value", + document_template=document_template, + ) + + # Make the request + operation = client.import_documents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Documents_ImportDocuments_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_import_documents_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_import_documents_sync.py new file mode 100644 index 000000000000..7118f0837e8e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_import_documents_sync.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_ImportDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_import_documents(): + # Create a client + client = dialogflow_v2.DocumentsClient() + + # Initialize request argument(s) + gcs_source = dialogflow_v2.GcsSources() + gcs_source.uris = ['uris_value1', 'uris_value2'] + + document_template = dialogflow_v2.ImportDocumentTemplate() + document_template.mime_type = "mime_type_value" + document_template.knowledge_types = ['AGENT_FACING_SMART_REPLY'] + + request = dialogflow_v2.ImportDocumentsRequest( + gcs_source=gcs_source, + parent="parent_value", + document_template=document_template, + ) + + # Make the request + operation = client.import_documents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Documents_ImportDocuments_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_list_documents_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_list_documents_async.py new file mode 100644 index 000000000000..7cb9e463a4cc --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_list_documents_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_ListDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_documents(): + # Create a client + client = dialogflow_v2.DocumentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Documents_ListDocuments_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_list_documents_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_list_documents_sync.py new file mode 100644 index 000000000000..86f6bc7c8563 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_list_documents_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_ListDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_documents(): + # Create a client + client = dialogflow_v2.DocumentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Documents_ListDocuments_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_reload_document_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_reload_document_async.py new file mode 100644 index 000000000000..71a381c88a44 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_reload_document_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReloadDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_ReloadDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_reload_document(): + # Create a client + client = dialogflow_v2.DocumentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ReloadDocumentRequest( + content_uri="content_uri_value", + name="name_value", + ) + + # Make the request + operation = client.reload_document(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Documents_ReloadDocument_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_reload_document_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_reload_document_sync.py new file mode 100644 index 000000000000..45bb8fe08598 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_reload_document_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReloadDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_ReloadDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_reload_document(): + # Create a client + client = dialogflow_v2.DocumentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ReloadDocumentRequest( + content_uri="content_uri_value", + name="name_value", + ) + + # Make the request + operation = client.reload_document(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Documents_ReloadDocument_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_update_document_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_update_document_async.py new file mode 100644 index 000000000000..95334302afb8 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_update_document_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_UpdateDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_update_document(): + # Create a client + client = dialogflow_v2.DocumentsAsyncClient() + + # Initialize request argument(s) + document = dialogflow_v2.Document() + document.content_uri = "content_uri_value" + document.display_name = "display_name_value" + document.mime_type = "mime_type_value" + document.knowledge_types = ['AGENT_FACING_SMART_REPLY'] + + request = dialogflow_v2.UpdateDocumentRequest( + document=document, + ) + + # Make the request + operation = client.update_document(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Documents_UpdateDocument_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_update_document_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_update_document_sync.py new file mode 100644 index 000000000000..b00d11f23ccf --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_documents_update_document_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Documents_UpdateDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_update_document(): + # Create a client + client = dialogflow_v2.DocumentsClient() + + # Initialize request argument(s) + document = dialogflow_v2.Document() + document.content_uri = "content_uri_value" + document.display_name = "display_name_value" + document.mime_type = "mime_type_value" + document.knowledge_types = ['AGENT_FACING_SMART_REPLY'] + + request = dialogflow_v2.UpdateDocumentRequest( + document=document, + ) + + # Make the request + operation = client.update_document(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Documents_UpdateDocument_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_create_entities_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_create_entities_async.py new file mode 100644 index 000000000000..1c1725bca557 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_create_entities_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_BatchCreateEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_batch_create_entities(): + # Create a client + client = dialogflow_v2.EntityTypesAsyncClient() + + # Initialize request argument(s) + entities = dialogflow_v2.Entity() + entities.value = "value_value" + entities.synonyms = ['synonyms_value1', 'synonyms_value2'] + + request = dialogflow_v2.BatchCreateEntitiesRequest( + parent="parent_value", + entities=entities, + ) + + # Make the request + operation = client.batch_create_entities(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_BatchCreateEntities_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_create_entities_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_create_entities_sync.py new file mode 100644 index 000000000000..f1c6fd876ade --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_create_entities_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_BatchCreateEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_batch_create_entities(): + # Create a client + client = dialogflow_v2.EntityTypesClient() + + # Initialize request argument(s) + entities = dialogflow_v2.Entity() + entities.value = "value_value" + entities.synonyms = ['synonyms_value1', 'synonyms_value2'] + + request = dialogflow_v2.BatchCreateEntitiesRequest( + parent="parent_value", + entities=entities, + ) + + # Make the request + operation = client.batch_create_entities(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_BatchCreateEntities_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_delete_entities_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_delete_entities_async.py new file mode 100644 index 000000000000..67c2b3399d47 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_delete_entities_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_BatchDeleteEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_batch_delete_entities(): + # Create a client + client = dialogflow_v2.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.BatchDeleteEntitiesRequest( + parent="parent_value", + entity_values=['entity_values_value1', 'entity_values_value2'], + ) + + # Make the request + operation = client.batch_delete_entities(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_BatchDeleteEntities_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_delete_entities_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_delete_entities_sync.py new file mode 100644 index 000000000000..43c047cc97d5 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_delete_entities_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_BatchDeleteEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_batch_delete_entities(): + # Create a client + client = dialogflow_v2.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2.BatchDeleteEntitiesRequest( + parent="parent_value", + entity_values=['entity_values_value1', 'entity_values_value2'], + ) + + # Make the request + operation = client.batch_delete_entities(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_BatchDeleteEntities_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_delete_entity_types_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_delete_entity_types_async.py new file mode 100644 index 000000000000..7f5d4e448664 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_delete_entity_types_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_BatchDeleteEntityTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_batch_delete_entity_types(): + # Create a client + client = dialogflow_v2.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.BatchDeleteEntityTypesRequest( + parent="parent_value", + entity_type_names=['entity_type_names_value1', 'entity_type_names_value2'], + ) + + # Make the request + operation = client.batch_delete_entity_types(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_BatchDeleteEntityTypes_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_delete_entity_types_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_delete_entity_types_sync.py new file mode 100644 index 000000000000..b5991eae48af --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_delete_entity_types_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_BatchDeleteEntityTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_batch_delete_entity_types(): + # Create a client + client = dialogflow_v2.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2.BatchDeleteEntityTypesRequest( + parent="parent_value", + entity_type_names=['entity_type_names_value1', 'entity_type_names_value2'], + ) + + # Make the request + operation = client.batch_delete_entity_types(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_BatchDeleteEntityTypes_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_update_entities_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_update_entities_async.py new file mode 100644 index 000000000000..7ff24087b21d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_update_entities_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_BatchUpdateEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_batch_update_entities(): + # Create a client + client = dialogflow_v2.EntityTypesAsyncClient() + + # Initialize request argument(s) + entities = dialogflow_v2.Entity() + entities.value = "value_value" + entities.synonyms = ['synonyms_value1', 'synonyms_value2'] + + request = dialogflow_v2.BatchUpdateEntitiesRequest( + parent="parent_value", + entities=entities, + ) + + # Make the request + operation = client.batch_update_entities(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_BatchUpdateEntities_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_update_entities_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_update_entities_sync.py new file mode 100644 index 000000000000..7c4f3089ef20 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_update_entities_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_BatchUpdateEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_batch_update_entities(): + # Create a client + client = dialogflow_v2.EntityTypesClient() + + # Initialize request argument(s) + entities = dialogflow_v2.Entity() + entities.value = "value_value" + entities.synonyms = ['synonyms_value1', 'synonyms_value2'] + + request = dialogflow_v2.BatchUpdateEntitiesRequest( + parent="parent_value", + entities=entities, + ) + + # Make the request + operation = client.batch_update_entities(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_BatchUpdateEntities_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_update_entity_types_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_update_entity_types_async.py new file mode 100644 index 000000000000..e37cf5c64e78 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_update_entity_types_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_BatchUpdateEntityTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_batch_update_entity_types(): + # Create a client + client = dialogflow_v2.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.BatchUpdateEntityTypesRequest( + entity_type_batch_uri="entity_type_batch_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.batch_update_entity_types(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_BatchUpdateEntityTypes_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_update_entity_types_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_update_entity_types_sync.py new file mode 100644 index 000000000000..b1a78b44f8bf --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_batch_update_entity_types_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_BatchUpdateEntityTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_batch_update_entity_types(): + # Create a client + client = dialogflow_v2.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2.BatchUpdateEntityTypesRequest( + entity_type_batch_uri="entity_type_batch_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.batch_update_entity_types(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_BatchUpdateEntityTypes_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_create_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_create_entity_type_async.py new file mode 100644 index 000000000000..893c18fef997 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_create_entity_type_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_CreateEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_create_entity_type(): + # Create a client + client = dialogflow_v2.EntityTypesAsyncClient() + + # Initialize request argument(s) + entity_type = dialogflow_v2.EntityType() + entity_type.display_name = "display_name_value" + entity_type.kind = "KIND_REGEXP" + + request = dialogflow_v2.CreateEntityTypeRequest( + parent="parent_value", + entity_type=entity_type, + ) + + # Make the request + response = await client.create_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_CreateEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_create_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_create_entity_type_sync.py new file mode 100644 index 000000000000..2bfe839bdcc9 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_create_entity_type_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_CreateEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_create_entity_type(): + # Create a client + client = dialogflow_v2.EntityTypesClient() + + # Initialize request argument(s) + entity_type = dialogflow_v2.EntityType() + entity_type.display_name = "display_name_value" + entity_type.kind = "KIND_REGEXP" + + request = dialogflow_v2.CreateEntityTypeRequest( + parent="parent_value", + entity_type=entity_type, + ) + + # Make the request + response = client.create_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_CreateEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_delete_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_delete_entity_type_async.py new file mode 100644 index 000000000000..fbbb233b0738 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_delete_entity_type_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_DeleteEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_delete_entity_type(): + # Create a client + client = dialogflow_v2.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteEntityTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_entity_type(request=request) + + +# [END dialogflow_v2_generated_EntityTypes_DeleteEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_delete_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_delete_entity_type_sync.py new file mode 100644 index 000000000000..40b1e21ca08c --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_delete_entity_type_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_DeleteEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_delete_entity_type(): + # Create a client + client = dialogflow_v2.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteEntityTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_entity_type(request=request) + + +# [END dialogflow_v2_generated_EntityTypes_DeleteEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_get_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_get_entity_type_async.py new file mode 100644 index 000000000000..4e5283c80d77 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_get_entity_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_GetEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_entity_type(): + # Create a client + client = dialogflow_v2.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetEntityTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_GetEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_get_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_get_entity_type_sync.py new file mode 100644 index 000000000000..22f928270591 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_get_entity_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_GetEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_entity_type(): + # Create a client + client = dialogflow_v2.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetEntityTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_GetEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_list_entity_types_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_list_entity_types_async.py new file mode 100644 index 000000000000..3b4f66e16657 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_list_entity_types_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_ListEntityTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_entity_types(): + # Create a client + client = dialogflow_v2.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListEntityTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entity_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_EntityTypes_ListEntityTypes_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_list_entity_types_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_list_entity_types_sync.py new file mode 100644 index 000000000000..1fe308235ec2 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_list_entity_types_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_ListEntityTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_entity_types(): + # Create a client + client = dialogflow_v2.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListEntityTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entity_types(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_EntityTypes_ListEntityTypes_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_update_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_update_entity_type_async.py new file mode 100644 index 000000000000..d77c151ab5d4 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_update_entity_type_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_UpdateEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_update_entity_type(): + # Create a client + client = dialogflow_v2.EntityTypesAsyncClient() + + # Initialize request argument(s) + entity_type = dialogflow_v2.EntityType() + entity_type.display_name = "display_name_value" + entity_type.kind = "KIND_REGEXP" + + request = dialogflow_v2.UpdateEntityTypeRequest( + entity_type=entity_type, + ) + + # Make the request + response = await client.update_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_UpdateEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_update_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_update_entity_type_sync.py new file mode 100644 index 000000000000..bceac5aa1e68 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_entity_types_update_entity_type_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_EntityTypes_UpdateEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_update_entity_type(): + # Create a client + client = dialogflow_v2.EntityTypesClient() + + # Initialize request argument(s) + entity_type = dialogflow_v2.EntityType() + entity_type.display_name = "display_name_value" + entity_type.kind = "KIND_REGEXP" + + request = dialogflow_v2.UpdateEntityTypeRequest( + entity_type=entity_type, + ) + + # Make the request + response = client.update_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_EntityTypes_UpdateEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_create_environment_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_create_environment_async.py new file mode 100644 index 000000000000..f68770bd9d24 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_create_environment_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Environments_CreateEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_create_environment(): + # Create a client + client = dialogflow_v2.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + ) + + # Make the request + response = await client.create_environment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Environments_CreateEnvironment_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_create_environment_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_create_environment_sync.py new file mode 100644 index 000000000000..ddbf30474bd6 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_create_environment_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Environments_CreateEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_create_environment(): + # Create a client + client = dialogflow_v2.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + ) + + # Make the request + response = client.create_environment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Environments_CreateEnvironment_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_delete_environment_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_delete_environment_async.py new file mode 100644 index 000000000000..5d17baf4d8bd --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_delete_environment_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Environments_DeleteEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_delete_environment(): + # Create a client + client = dialogflow_v2.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + await client.delete_environment(request=request) + + +# [END dialogflow_v2_generated_Environments_DeleteEnvironment_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_delete_environment_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_delete_environment_sync.py new file mode 100644 index 000000000000..bb1a8e9f1736 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_delete_environment_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Environments_DeleteEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_delete_environment(): + # Create a client + client = dialogflow_v2.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + client.delete_environment(request=request) + + +# [END dialogflow_v2_generated_Environments_DeleteEnvironment_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_get_environment_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_get_environment_async.py new file mode 100644 index 000000000000..b77262a4ebc4 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_get_environment_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Environments_GetEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_environment(): + # Create a client + client = dialogflow_v2.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_environment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Environments_GetEnvironment_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_get_environment_history_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_get_environment_history_async.py new file mode 100644 index 000000000000..384eb16a7345 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_get_environment_history_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironmentHistory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Environments_GetEnvironmentHistory_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_environment_history(): + # Create a client + client = dialogflow_v2.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetEnvironmentHistoryRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.get_environment_history(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Environments_GetEnvironmentHistory_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_get_environment_history_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_get_environment_history_sync.py new file mode 100644 index 000000000000..407e59207889 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_get_environment_history_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironmentHistory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Environments_GetEnvironmentHistory_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_environment_history(): + # Create a client + client = dialogflow_v2.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetEnvironmentHistoryRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.get_environment_history(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Environments_GetEnvironmentHistory_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_get_environment_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_get_environment_sync.py new file mode 100644 index 000000000000..43ae2f81e840 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_get_environment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Environments_GetEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_environment(): + # Create a client + client = dialogflow_v2.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_environment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Environments_GetEnvironment_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_list_environments_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_list_environments_async.py new file mode 100644 index 000000000000..129563191edf --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_list_environments_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnvironments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Environments_ListEnvironments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_environments(): + # Create a client + client = dialogflow_v2.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Environments_ListEnvironments_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_list_environments_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_list_environments_sync.py new file mode 100644 index 000000000000..e38015957e30 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_list_environments_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnvironments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Environments_ListEnvironments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_environments(): + # Create a client + client = dialogflow_v2.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Environments_ListEnvironments_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_update_environment_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_update_environment_async.py new file mode 100644 index 000000000000..65af86bc5cae --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_update_environment_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Environments_UpdateEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_update_environment(): + # Create a client + client = dialogflow_v2.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.UpdateEnvironmentRequest( + ) + + # Make the request + response = await client.update_environment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Environments_UpdateEnvironment_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_update_environment_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_update_environment_sync.py new file mode 100644 index 000000000000..d5f1112ba9b6 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_environments_update_environment_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Environments_UpdateEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_update_environment(): + # Create a client + client = dialogflow_v2.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.UpdateEnvironmentRequest( + ) + + # Make the request + response = client.update_environment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Environments_UpdateEnvironment_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_fulfillments_get_fulfillment_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_fulfillments_get_fulfillment_async.py new file mode 100644 index 000000000000..12f99ebd1f9f --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_fulfillments_get_fulfillment_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFulfillment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Fulfillments_GetFulfillment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_fulfillment(): + # Create a client + client = dialogflow_v2.FulfillmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetFulfillmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_fulfillment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Fulfillments_GetFulfillment_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_fulfillments_get_fulfillment_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_fulfillments_get_fulfillment_sync.py new file mode 100644 index 000000000000..2675cba85d8e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_fulfillments_get_fulfillment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFulfillment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Fulfillments_GetFulfillment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_fulfillment(): + # Create a client + client = dialogflow_v2.FulfillmentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetFulfillmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_fulfillment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Fulfillments_GetFulfillment_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_fulfillments_update_fulfillment_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_fulfillments_update_fulfillment_async.py new file mode 100644 index 000000000000..822b383d2b0f --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_fulfillments_update_fulfillment_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateFulfillment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Fulfillments_UpdateFulfillment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_update_fulfillment(): + # Create a client + client = dialogflow_v2.FulfillmentsAsyncClient() + + # Initialize request argument(s) + fulfillment = dialogflow_v2.Fulfillment() + fulfillment.generic_web_service.uri = "uri_value" + fulfillment.name = "name_value" + + request = dialogflow_v2.UpdateFulfillmentRequest( + fulfillment=fulfillment, + ) + + # Make the request + response = await client.update_fulfillment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Fulfillments_UpdateFulfillment_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_fulfillments_update_fulfillment_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_fulfillments_update_fulfillment_sync.py new file mode 100644 index 000000000000..ef61621880b1 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_fulfillments_update_fulfillment_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateFulfillment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Fulfillments_UpdateFulfillment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_update_fulfillment(): + # Create a client + client = dialogflow_v2.FulfillmentsClient() + + # Initialize request argument(s) + fulfillment = dialogflow_v2.Fulfillment() + fulfillment.generic_web_service.uri = "uri_value" + fulfillment.name = "name_value" + + request = dialogflow_v2.UpdateFulfillmentRequest( + fulfillment=fulfillment, + ) + + # Make the request + response = client.update_fulfillment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Fulfillments_UpdateFulfillment_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_batch_delete_intents_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_batch_delete_intents_async.py new file mode 100644 index 000000000000..1c78a0d1c991 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_batch_delete_intents_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Intents_BatchDeleteIntents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_batch_delete_intents(): + # Create a client + client = dialogflow_v2.IntentsAsyncClient() + + # Initialize request argument(s) + intents = dialogflow_v2.Intent() + intents.display_name = "display_name_value" + + request = dialogflow_v2.BatchDeleteIntentsRequest( + parent="parent_value", + intents=intents, + ) + + # Make the request + operation = client.batch_delete_intents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Intents_BatchDeleteIntents_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_batch_delete_intents_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_batch_delete_intents_sync.py new file mode 100644 index 000000000000..72d658b12f6d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_batch_delete_intents_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Intents_BatchDeleteIntents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_batch_delete_intents(): + # Create a client + client = dialogflow_v2.IntentsClient() + + # Initialize request argument(s) + intents = dialogflow_v2.Intent() + intents.display_name = "display_name_value" + + request = dialogflow_v2.BatchDeleteIntentsRequest( + parent="parent_value", + intents=intents, + ) + + # Make the request + operation = client.batch_delete_intents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Intents_BatchDeleteIntents_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_batch_update_intents_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_batch_update_intents_async.py new file mode 100644 index 000000000000..acbb8eb41fed --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_batch_update_intents_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Intents_BatchUpdateIntents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_batch_update_intents(): + # Create a client + client = dialogflow_v2.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.BatchUpdateIntentsRequest( + intent_batch_uri="intent_batch_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.batch_update_intents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Intents_BatchUpdateIntents_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_batch_update_intents_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_batch_update_intents_sync.py new file mode 100644 index 000000000000..ffb1771e83ba --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_batch_update_intents_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Intents_BatchUpdateIntents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_batch_update_intents(): + # Create a client + client = dialogflow_v2.IntentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.BatchUpdateIntentsRequest( + intent_batch_uri="intent_batch_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.batch_update_intents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Intents_BatchUpdateIntents_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_create_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_create_intent_async.py new file mode 100644 index 000000000000..fca003ede64c --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_create_intent_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Intents_CreateIntent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_create_intent(): + # Create a client + client = dialogflow_v2.IntentsAsyncClient() + + # Initialize request argument(s) + intent = dialogflow_v2.Intent() + intent.display_name = "display_name_value" + + request = dialogflow_v2.CreateIntentRequest( + parent="parent_value", + intent=intent, + ) + + # Make the request + response = await client.create_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Intents_CreateIntent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_create_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_create_intent_sync.py new file mode 100644 index 000000000000..d58eabda2ce0 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_create_intent_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Intents_CreateIntent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_create_intent(): + # Create a client + client = dialogflow_v2.IntentsClient() + + # Initialize request argument(s) + intent = dialogflow_v2.Intent() + intent.display_name = "display_name_value" + + request = dialogflow_v2.CreateIntentRequest( + parent="parent_value", + intent=intent, + ) + + # Make the request + response = client.create_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Intents_CreateIntent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_delete_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_delete_intent_async.py new file mode 100644 index 000000000000..84e656874dac --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_delete_intent_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Intents_DeleteIntent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_delete_intent(): + # Create a client + client = dialogflow_v2.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteIntentRequest( + name="name_value", + ) + + # Make the request + await client.delete_intent(request=request) + + +# [END dialogflow_v2_generated_Intents_DeleteIntent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_delete_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_delete_intent_sync.py new file mode 100644 index 000000000000..6a7e192f9e9f --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_delete_intent_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Intents_DeleteIntent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_delete_intent(): + # Create a client + client = dialogflow_v2.IntentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteIntentRequest( + name="name_value", + ) + + # Make the request + client.delete_intent(request=request) + + +# [END dialogflow_v2_generated_Intents_DeleteIntent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_get_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_get_intent_async.py new file mode 100644 index 000000000000..0e8071119417 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_get_intent_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Intents_GetIntent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_intent(): + # Create a client + client = dialogflow_v2.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetIntentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Intents_GetIntent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_get_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_get_intent_sync.py new file mode 100644 index 000000000000..619478fff658 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_get_intent_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Intents_GetIntent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_intent(): + # Create a client + client = dialogflow_v2.IntentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetIntentRequest( + name="name_value", + ) + + # Make the request + response = client.get_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Intents_GetIntent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_list_intents_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_list_intents_async.py new file mode 100644 index 000000000000..86426676ab2c --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_list_intents_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Intents_ListIntents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_intents(): + # Create a client + client = dialogflow_v2.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListIntentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intents(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Intents_ListIntents_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_list_intents_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_list_intents_sync.py new file mode 100644 index 000000000000..67429bfcedfd --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_list_intents_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Intents_ListIntents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_intents(): + # Create a client + client = dialogflow_v2.IntentsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListIntentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intents(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Intents_ListIntents_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_update_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_update_intent_async.py new file mode 100644 index 000000000000..629004ed6f54 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_update_intent_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Intents_UpdateIntent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_update_intent(): + # Create a client + client = dialogflow_v2.IntentsAsyncClient() + + # Initialize request argument(s) + intent = dialogflow_v2.Intent() + intent.display_name = "display_name_value" + + request = dialogflow_v2.UpdateIntentRequest( + intent=intent, + ) + + # Make the request + response = await client.update_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Intents_UpdateIntent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_update_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_update_intent_sync.py new file mode 100644 index 000000000000..98eeb98c524e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_intents_update_intent_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Intents_UpdateIntent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_update_intent(): + # Create a client + client = dialogflow_v2.IntentsClient() + + # Initialize request argument(s) + intent = dialogflow_v2.Intent() + intent.display_name = "display_name_value" + + request = dialogflow_v2.UpdateIntentRequest( + intent=intent, + ) + + # Make the request + response = client.update_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Intents_UpdateIntent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_create_knowledge_base_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_create_knowledge_base_async.py new file mode 100644 index 000000000000..5390a10fd297 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_create_knowledge_base_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_KnowledgeBases_CreateKnowledgeBase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_create_knowledge_base(): + # Create a client + client = dialogflow_v2.KnowledgeBasesAsyncClient() + + # Initialize request argument(s) + knowledge_base = dialogflow_v2.KnowledgeBase() + knowledge_base.display_name = "display_name_value" + + request = dialogflow_v2.CreateKnowledgeBaseRequest( + parent="parent_value", + knowledge_base=knowledge_base, + ) + + # Make the request + response = await client.create_knowledge_base(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_KnowledgeBases_CreateKnowledgeBase_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_create_knowledge_base_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_create_knowledge_base_sync.py new file mode 100644 index 000000000000..e5e73380f92b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_create_knowledge_base_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_KnowledgeBases_CreateKnowledgeBase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_create_knowledge_base(): + # Create a client + client = dialogflow_v2.KnowledgeBasesClient() + + # Initialize request argument(s) + knowledge_base = dialogflow_v2.KnowledgeBase() + knowledge_base.display_name = "display_name_value" + + request = dialogflow_v2.CreateKnowledgeBaseRequest( + parent="parent_value", + knowledge_base=knowledge_base, + ) + + # Make the request + response = client.create_knowledge_base(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_KnowledgeBases_CreateKnowledgeBase_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_delete_knowledge_base_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_delete_knowledge_base_async.py new file mode 100644 index 000000000000..403cf2f2bb5c --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_delete_knowledge_base_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_KnowledgeBases_DeleteKnowledgeBase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_delete_knowledge_base(): + # Create a client + client = dialogflow_v2.KnowledgeBasesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteKnowledgeBaseRequest( + name="name_value", + ) + + # Make the request + await client.delete_knowledge_base(request=request) + + +# [END dialogflow_v2_generated_KnowledgeBases_DeleteKnowledgeBase_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_delete_knowledge_base_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_delete_knowledge_base_sync.py new file mode 100644 index 000000000000..a4df10fa43b9 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_delete_knowledge_base_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_KnowledgeBases_DeleteKnowledgeBase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_delete_knowledge_base(): + # Create a client + client = dialogflow_v2.KnowledgeBasesClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteKnowledgeBaseRequest( + name="name_value", + ) + + # Make the request + client.delete_knowledge_base(request=request) + + +# [END dialogflow_v2_generated_KnowledgeBases_DeleteKnowledgeBase_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_get_knowledge_base_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_get_knowledge_base_async.py new file mode 100644 index 000000000000..b40c4b5fc7c9 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_get_knowledge_base_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_KnowledgeBases_GetKnowledgeBase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_knowledge_base(): + # Create a client + client = dialogflow_v2.KnowledgeBasesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetKnowledgeBaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_knowledge_base(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_KnowledgeBases_GetKnowledgeBase_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_get_knowledge_base_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_get_knowledge_base_sync.py new file mode 100644 index 000000000000..3a65e066adb2 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_get_knowledge_base_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_KnowledgeBases_GetKnowledgeBase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_knowledge_base(): + # Create a client + client = dialogflow_v2.KnowledgeBasesClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetKnowledgeBaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_knowledge_base(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_KnowledgeBases_GetKnowledgeBase_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_list_knowledge_bases_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_list_knowledge_bases_async.py new file mode 100644 index 000000000000..4bd53f5fc51f --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_list_knowledge_bases_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListKnowledgeBases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_KnowledgeBases_ListKnowledgeBases_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_knowledge_bases(): + # Create a client + client = dialogflow_v2.KnowledgeBasesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListKnowledgeBasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_knowledge_bases(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_KnowledgeBases_ListKnowledgeBases_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_list_knowledge_bases_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_list_knowledge_bases_sync.py new file mode 100644 index 000000000000..6d18a0a4ab39 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_list_knowledge_bases_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListKnowledgeBases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_KnowledgeBases_ListKnowledgeBases_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_knowledge_bases(): + # Create a client + client = dialogflow_v2.KnowledgeBasesClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListKnowledgeBasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_knowledge_bases(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_KnowledgeBases_ListKnowledgeBases_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_update_knowledge_base_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_update_knowledge_base_async.py new file mode 100644 index 000000000000..6b90ae728885 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_update_knowledge_base_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_KnowledgeBases_UpdateKnowledgeBase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_update_knowledge_base(): + # Create a client + client = dialogflow_v2.KnowledgeBasesAsyncClient() + + # Initialize request argument(s) + knowledge_base = dialogflow_v2.KnowledgeBase() + knowledge_base.display_name = "display_name_value" + + request = dialogflow_v2.UpdateKnowledgeBaseRequest( + knowledge_base=knowledge_base, + ) + + # Make the request + response = await client.update_knowledge_base(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_KnowledgeBases_UpdateKnowledgeBase_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_update_knowledge_base_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_update_knowledge_base_sync.py new file mode 100644 index 000000000000..9aa1d1e110eb --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_knowledge_bases_update_knowledge_base_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_KnowledgeBases_UpdateKnowledgeBase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_update_knowledge_base(): + # Create a client + client = dialogflow_v2.KnowledgeBasesClient() + + # Initialize request argument(s) + knowledge_base = dialogflow_v2.KnowledgeBase() + knowledge_base.display_name = "display_name_value" + + request = dialogflow_v2.UpdateKnowledgeBaseRequest( + knowledge_base=knowledge_base, + ) + + # Make the request + response = client.update_knowledge_base(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_KnowledgeBases_UpdateKnowledgeBase_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_analyze_content_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_analyze_content_async.py new file mode 100644 index 000000000000..178322f43321 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_analyze_content_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_AnalyzeContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_analyze_content(): + # Create a client + client = dialogflow_v2.ParticipantsAsyncClient() + + # Initialize request argument(s) + text_input = dialogflow_v2.TextInput() + text_input.text = "text_value" + text_input.language_code = "language_code_value" + + request = dialogflow_v2.AnalyzeContentRequest( + text_input=text_input, + participant="participant_value", + ) + + # Make the request + response = await client.analyze_content(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Participants_AnalyzeContent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_analyze_content_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_analyze_content_sync.py new file mode 100644 index 000000000000..8fac243deac6 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_analyze_content_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_AnalyzeContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_analyze_content(): + # Create a client + client = dialogflow_v2.ParticipantsClient() + + # Initialize request argument(s) + text_input = dialogflow_v2.TextInput() + text_input.text = "text_value" + text_input.language_code = "language_code_value" + + request = dialogflow_v2.AnalyzeContentRequest( + text_input=text_input, + participant="participant_value", + ) + + # Make the request + response = client.analyze_content(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Participants_AnalyzeContent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_create_participant_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_create_participant_async.py new file mode 100644 index 000000000000..5d9dfe89761a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_create_participant_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateParticipant +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_CreateParticipant_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_create_participant(): + # Create a client + client = dialogflow_v2.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.CreateParticipantRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_participant(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Participants_CreateParticipant_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_create_participant_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_create_participant_sync.py new file mode 100644 index 000000000000..520a91f1a8ac --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_create_participant_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateParticipant +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_CreateParticipant_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_create_participant(): + # Create a client + client = dialogflow_v2.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2.CreateParticipantRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_participant(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Participants_CreateParticipant_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_get_participant_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_get_participant_async.py new file mode 100644 index 000000000000..4a238a41c600 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_get_participant_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetParticipant +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_GetParticipant_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_participant(): + # Create a client + client = dialogflow_v2.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetParticipantRequest( + name="name_value", + ) + + # Make the request + response = await client.get_participant(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Participants_GetParticipant_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_get_participant_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_get_participant_sync.py new file mode 100644 index 000000000000..eafde9d7b68b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_get_participant_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetParticipant +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_GetParticipant_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_participant(): + # Create a client + client = dialogflow_v2.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetParticipantRequest( + name="name_value", + ) + + # Make the request + response = client.get_participant(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Participants_GetParticipant_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_list_participants_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_list_participants_async.py new file mode 100644 index 000000000000..0e808beb29cf --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_list_participants_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListParticipants +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_ListParticipants_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_participants(): + # Create a client + client = dialogflow_v2.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListParticipantsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_participants(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Participants_ListParticipants_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_list_participants_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_list_participants_sync.py new file mode 100644 index 000000000000..52ec81b0d774 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_list_participants_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListParticipants +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_ListParticipants_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_participants(): + # Create a client + client = dialogflow_v2.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListParticipantsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_participants(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Participants_ListParticipants_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_async.py new file mode 100644 index 000000000000..99c7bf49757b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_async.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingAnalyzeContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_StreamingAnalyzeContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_streaming_analyze_content(): + # Create a client + client = dialogflow_v2.ParticipantsAsyncClient() + + # Initialize request argument(s) + audio_config = dialogflow_v2.InputAudioConfig() + audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + audio_config.sample_rate_hertz = 1817 + audio_config.language_code = "language_code_value" + + request = dialogflow_v2.StreamingAnalyzeContentRequest( + audio_config=audio_config, + input_audio=b'input_audio_blob', + participant="participant_value", + ) + + # This method expects an iterator which contains + # 'dialogflow_v2.StreamingAnalyzeContentRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_analyze_content(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + +# [END dialogflow_v2_generated_Participants_StreamingAnalyzeContent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_sync.py new file mode 100644 index 000000000000..8b9d52d97b15 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_streaming_analyze_content_sync.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingAnalyzeContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_StreamingAnalyzeContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_streaming_analyze_content(): + # Create a client + client = dialogflow_v2.ParticipantsClient() + + # Initialize request argument(s) + audio_config = dialogflow_v2.InputAudioConfig() + audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + audio_config.sample_rate_hertz = 1817 + audio_config.language_code = "language_code_value" + + request = dialogflow_v2.StreamingAnalyzeContentRequest( + audio_config=audio_config, + input_audio=b'input_audio_blob', + participant="participant_value", + ) + + # This method expects an iterator which contains + # 'dialogflow_v2.StreamingAnalyzeContentRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_analyze_content(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + +# [END dialogflow_v2_generated_Participants_StreamingAnalyzeContent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_articles_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_articles_async.py new file mode 100644 index 000000000000..97ed840fc971 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_articles_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestArticles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_SuggestArticles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_suggest_articles(): + # Create a client + client = dialogflow_v2.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.SuggestArticlesRequest( + parent="parent_value", + ) + + # Make the request + response = await client.suggest_articles(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Participants_SuggestArticles_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_articles_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_articles_sync.py new file mode 100644 index 000000000000..cdfb9f045311 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_articles_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestArticles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_SuggestArticles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_suggest_articles(): + # Create a client + client = dialogflow_v2.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2.SuggestArticlesRequest( + parent="parent_value", + ) + + # Make the request + response = client.suggest_articles(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Participants_SuggestArticles_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_faq_answers_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_faq_answers_async.py new file mode 100644 index 000000000000..27873e5efea7 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_faq_answers_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestFaqAnswers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_SuggestFaqAnswers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_suggest_faq_answers(): + # Create a client + client = dialogflow_v2.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.SuggestFaqAnswersRequest( + parent="parent_value", + ) + + # Make the request + response = await client.suggest_faq_answers(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Participants_SuggestFaqAnswers_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_faq_answers_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_faq_answers_sync.py new file mode 100644 index 000000000000..2293ef09dba4 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_faq_answers_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestFaqAnswers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_SuggestFaqAnswers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_suggest_faq_answers(): + # Create a client + client = dialogflow_v2.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2.SuggestFaqAnswersRequest( + parent="parent_value", + ) + + # Make the request + response = client.suggest_faq_answers(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Participants_SuggestFaqAnswers_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_smart_replies_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_smart_replies_async.py new file mode 100644 index 000000000000..6758e2b8d5a7 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_smart_replies_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestSmartReplies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_SuggestSmartReplies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_suggest_smart_replies(): + # Create a client + client = dialogflow_v2.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.SuggestSmartRepliesRequest( + parent="parent_value", + ) + + # Make the request + response = await client.suggest_smart_replies(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Participants_SuggestSmartReplies_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_smart_replies_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_smart_replies_sync.py new file mode 100644 index 000000000000..923eb1d69a8f --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_suggest_smart_replies_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestSmartReplies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_SuggestSmartReplies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_suggest_smart_replies(): + # Create a client + client = dialogflow_v2.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2.SuggestSmartRepliesRequest( + parent="parent_value", + ) + + # Make the request + response = client.suggest_smart_replies(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Participants_SuggestSmartReplies_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_update_participant_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_update_participant_async.py new file mode 100644 index 000000000000..771356a44fc7 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_update_participant_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateParticipant +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_UpdateParticipant_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_update_participant(): + # Create a client + client = dialogflow_v2.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.UpdateParticipantRequest( + ) + + # Make the request + response = await client.update_participant(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Participants_UpdateParticipant_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_update_participant_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_update_participant_sync.py new file mode 100644 index 000000000000..934c713847c7 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_participants_update_participant_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateParticipant +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Participants_UpdateParticipant_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_update_participant(): + # Create a client + client = dialogflow_v2.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2.UpdateParticipantRequest( + ) + + # Make the request + response = client.update_participant(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Participants_UpdateParticipant_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_create_session_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_create_session_entity_type_async.py new file mode 100644 index 000000000000..82c9f3993025 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_create_session_entity_type_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_SessionEntityTypes_CreateSessionEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_create_session_entity_type(): + # Create a client + client = dialogflow_v2.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + session_entity_type = dialogflow_v2.SessionEntityType() + session_entity_type.name = "name_value" + session_entity_type.entity_override_mode = "ENTITY_OVERRIDE_MODE_SUPPLEMENT" + session_entity_type.entities.value = "value_value" + session_entity_type.entities.synonyms = ['synonyms_value1', 'synonyms_value2'] + + request = dialogflow_v2.CreateSessionEntityTypeRequest( + parent="parent_value", + session_entity_type=session_entity_type, + ) + + # Make the request + response = await client.create_session_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_SessionEntityTypes_CreateSessionEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_create_session_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_create_session_entity_type_sync.py new file mode 100644 index 000000000000..e0fed344e5f8 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_create_session_entity_type_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_SessionEntityTypes_CreateSessionEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_create_session_entity_type(): + # Create a client + client = dialogflow_v2.SessionEntityTypesClient() + + # Initialize request argument(s) + session_entity_type = dialogflow_v2.SessionEntityType() + session_entity_type.name = "name_value" + session_entity_type.entity_override_mode = "ENTITY_OVERRIDE_MODE_SUPPLEMENT" + session_entity_type.entities.value = "value_value" + session_entity_type.entities.synonyms = ['synonyms_value1', 'synonyms_value2'] + + request = dialogflow_v2.CreateSessionEntityTypeRequest( + parent="parent_value", + session_entity_type=session_entity_type, + ) + + # Make the request + response = client.create_session_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_SessionEntityTypes_CreateSessionEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_delete_session_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_delete_session_entity_type_async.py new file mode 100644 index 000000000000..10c84e3b2468 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_delete_session_entity_type_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_SessionEntityTypes_DeleteSessionEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_delete_session_entity_type(): + # Create a client + client = dialogflow_v2.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteSessionEntityTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_session_entity_type(request=request) + + +# [END dialogflow_v2_generated_SessionEntityTypes_DeleteSessionEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_delete_session_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_delete_session_entity_type_sync.py new file mode 100644 index 000000000000..9055e5b7a6b9 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_delete_session_entity_type_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_SessionEntityTypes_DeleteSessionEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_delete_session_entity_type(): + # Create a client + client = dialogflow_v2.SessionEntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteSessionEntityTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_session_entity_type(request=request) + + +# [END dialogflow_v2_generated_SessionEntityTypes_DeleteSessionEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_get_session_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_get_session_entity_type_async.py new file mode 100644 index 000000000000..7fcad0dd43f5 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_get_session_entity_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_SessionEntityTypes_GetSessionEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_session_entity_type(): + # Create a client + client = dialogflow_v2.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetSessionEntityTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_session_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_SessionEntityTypes_GetSessionEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_get_session_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_get_session_entity_type_sync.py new file mode 100644 index 000000000000..7a5d9f998106 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_get_session_entity_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_SessionEntityTypes_GetSessionEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_session_entity_type(): + # Create a client + client = dialogflow_v2.SessionEntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetSessionEntityTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_session_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_SessionEntityTypes_GetSessionEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_list_session_entity_types_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_list_session_entity_types_async.py new file mode 100644 index 000000000000..d4dbfa18e484 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_list_session_entity_types_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessionEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_SessionEntityTypes_ListSessionEntityTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_session_entity_types(): + # Create a client + client = dialogflow_v2.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListSessionEntityTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_session_entity_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_SessionEntityTypes_ListSessionEntityTypes_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_list_session_entity_types_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_list_session_entity_types_sync.py new file mode 100644 index 000000000000..678c95d7f40c --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_list_session_entity_types_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessionEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_SessionEntityTypes_ListSessionEntityTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_session_entity_types(): + # Create a client + client = dialogflow_v2.SessionEntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListSessionEntityTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_session_entity_types(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_SessionEntityTypes_ListSessionEntityTypes_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_update_session_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_update_session_entity_type_async.py new file mode 100644 index 000000000000..0c10bd3c18ad --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_update_session_entity_type_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_SessionEntityTypes_UpdateSessionEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_update_session_entity_type(): + # Create a client + client = dialogflow_v2.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + session_entity_type = dialogflow_v2.SessionEntityType() + session_entity_type.name = "name_value" + session_entity_type.entity_override_mode = "ENTITY_OVERRIDE_MODE_SUPPLEMENT" + session_entity_type.entities.value = "value_value" + session_entity_type.entities.synonyms = ['synonyms_value1', 'synonyms_value2'] + + request = dialogflow_v2.UpdateSessionEntityTypeRequest( + session_entity_type=session_entity_type, + ) + + # Make the request + response = await client.update_session_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_SessionEntityTypes_UpdateSessionEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_update_session_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_update_session_entity_type_sync.py new file mode 100644 index 000000000000..925f6edc28d7 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_session_entity_types_update_session_entity_type_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_SessionEntityTypes_UpdateSessionEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_update_session_entity_type(): + # Create a client + client = dialogflow_v2.SessionEntityTypesClient() + + # Initialize request argument(s) + session_entity_type = dialogflow_v2.SessionEntityType() + session_entity_type.name = "name_value" + session_entity_type.entity_override_mode = "ENTITY_OVERRIDE_MODE_SUPPLEMENT" + session_entity_type.entities.value = "value_value" + session_entity_type.entities.synonyms = ['synonyms_value1', 'synonyms_value2'] + + request = dialogflow_v2.UpdateSessionEntityTypeRequest( + session_entity_type=session_entity_type, + ) + + # Make the request + response = client.update_session_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_SessionEntityTypes_UpdateSessionEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_async.py new file mode 100644 index 000000000000..cd01d8f25c06 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DetectIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Sessions_DetectIntent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_detect_intent(): + # Create a client + client = dialogflow_v2.SessionsAsyncClient() + + # Initialize request argument(s) + query_input = dialogflow_v2.QueryInput() + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.sample_rate_hertz = 1817 + query_input.audio_config.language_code = "language_code_value" + + request = dialogflow_v2.DetectIntentRequest( + session="session_value", + query_input=query_input, + ) + + # Make the request + response = await client.detect_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Sessions_DetectIntent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_sync.py new file mode 100644 index 000000000000..8039f3d90636 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_detect_intent_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DetectIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Sessions_DetectIntent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_detect_intent(): + # Create a client + client = dialogflow_v2.SessionsClient() + + # Initialize request argument(s) + query_input = dialogflow_v2.QueryInput() + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.sample_rate_hertz = 1817 + query_input.audio_config.language_code = "language_code_value" + + request = dialogflow_v2.DetectIntentRequest( + session="session_value", + query_input=query_input, + ) + + # Make the request + response = client.detect_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Sessions_DetectIntent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_async.py new file mode 100644 index 000000000000..68a65fd0ed19 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_async.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingDetectIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Sessions_StreamingDetectIntent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_streaming_detect_intent(): + # Create a client + client = dialogflow_v2.SessionsAsyncClient() + + # Initialize request argument(s) + query_input = dialogflow_v2.QueryInput() + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.sample_rate_hertz = 1817 + query_input.audio_config.language_code = "language_code_value" + + request = dialogflow_v2.StreamingDetectIntentRequest( + session="session_value", + query_input=query_input, + ) + + # This method expects an iterator which contains + # 'dialogflow_v2.StreamingDetectIntentRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_detect_intent(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + +# [END dialogflow_v2_generated_Sessions_StreamingDetectIntent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_sync.py new file mode 100644 index 000000000000..b4c0ce875b81 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_sessions_streaming_detect_intent_sync.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingDetectIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Sessions_StreamingDetectIntent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_streaming_detect_intent(): + # Create a client + client = dialogflow_v2.SessionsClient() + + # Initialize request argument(s) + query_input = dialogflow_v2.QueryInput() + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.sample_rate_hertz = 1817 + query_input.audio_config.language_code = "language_code_value" + + request = dialogflow_v2.StreamingDetectIntentRequest( + session="session_value", + query_input=query_input, + ) + + # This method expects an iterator which contains + # 'dialogflow_v2.StreamingDetectIntentRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_detect_intent(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + +# [END dialogflow_v2_generated_Sessions_StreamingDetectIntent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_create_version_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_create_version_async.py new file mode 100644 index 000000000000..c0a7d6584ee4 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_create_version_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Versions_CreateVersion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_create_version(): + # Create a client + client = dialogflow_v2.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.CreateVersionRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_version(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Versions_CreateVersion_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_create_version_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_create_version_sync.py new file mode 100644 index 000000000000..09a64f00cd9f --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_create_version_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Versions_CreateVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_create_version(): + # Create a client + client = dialogflow_v2.VersionsClient() + + # Initialize request argument(s) + request = dialogflow_v2.CreateVersionRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_version(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Versions_CreateVersion_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_delete_version_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_delete_version_async.py new file mode 100644 index 000000000000..0991838789d5 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_delete_version_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Versions_DeleteVersion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_delete_version(): + # Create a client + client = dialogflow_v2.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteVersionRequest( + name="name_value", + ) + + # Make the request + await client.delete_version(request=request) + + +# [END dialogflow_v2_generated_Versions_DeleteVersion_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_delete_version_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_delete_version_sync.py new file mode 100644 index 000000000000..e06b5bec9afd --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_delete_version_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Versions_DeleteVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_delete_version(): + # Create a client + client = dialogflow_v2.VersionsClient() + + # Initialize request argument(s) + request = dialogflow_v2.DeleteVersionRequest( + name="name_value", + ) + + # Make the request + client.delete_version(request=request) + + +# [END dialogflow_v2_generated_Versions_DeleteVersion_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_get_version_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_get_version_async.py new file mode 100644 index 000000000000..b370eef617cd --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_get_version_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Versions_GetVersion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_get_version(): + # Create a client + client = dialogflow_v2.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetVersionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_version(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Versions_GetVersion_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_get_version_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_get_version_sync.py new file mode 100644 index 000000000000..3c6a67026147 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_get_version_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Versions_GetVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_get_version(): + # Create a client + client = dialogflow_v2.VersionsClient() + + # Initialize request argument(s) + request = dialogflow_v2.GetVersionRequest( + name="name_value", + ) + + # Make the request + response = client.get_version(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Versions_GetVersion_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_list_versions_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_list_versions_async.py new file mode 100644 index 000000000000..03ed79207e89 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_list_versions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Versions_ListVersions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_list_versions(): + # Create a client + client = dialogflow_v2.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_versions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Versions_ListVersions_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_list_versions_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_list_versions_sync.py new file mode 100644 index 000000000000..adad643d7325 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_list_versions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Versions_ListVersions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_list_versions(): + # Create a client + client = dialogflow_v2.VersionsClient() + + # Initialize request argument(s) + request = dialogflow_v2.ListVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2_generated_Versions_ListVersions_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_update_version_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_update_version_async.py new file mode 100644 index 000000000000..612b45577ae4 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_update_version_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Versions_UpdateVersion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +async def sample_update_version(): + # Create a client + client = dialogflow_v2.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2.UpdateVersionRequest( + ) + + # Make the request + response = await client.update_version(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Versions_UpdateVersion_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_update_version_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_update_version_sync.py new file mode 100644 index 000000000000..49855537669a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2_generated_versions_update_version_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2_generated_Versions_UpdateVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2 + + +def sample_update_version(): + # Create a client + client = dialogflow_v2.VersionsClient() + + # Initialize request argument(s) + request = dialogflow_v2.UpdateVersionRequest( + ) + + # Make the request + response = client.update_version(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2_generated_Versions_UpdateVersion_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_delete_agent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_delete_agent_async.py new file mode 100644 index 000000000000..ba5347500d97 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_delete_agent_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_DeleteAgent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_delete_agent(): + # Create a client + client = dialogflow_v2beta1.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteAgentRequest( + parent="parent_value", + ) + + # Make the request + await client.delete_agent(request=request) + + +# [END dialogflow_v2beta1_generated_Agents_DeleteAgent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_delete_agent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_delete_agent_sync.py new file mode 100644 index 000000000000..5acc80a0afa7 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_delete_agent_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_DeleteAgent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_delete_agent(): + # Create a client + client = dialogflow_v2beta1.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteAgentRequest( + parent="parent_value", + ) + + # Make the request + client.delete_agent(request=request) + + +# [END dialogflow_v2beta1_generated_Agents_DeleteAgent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_export_agent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_export_agent_async.py new file mode 100644 index 000000000000..36834176b240 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_export_agent_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_ExportAgent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_export_agent(): + # Create a client + client = dialogflow_v2beta1.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ExportAgentRequest( + parent="parent_value", + ) + + # Make the request + operation = client.export_agent(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Agents_ExportAgent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_export_agent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_export_agent_sync.py new file mode 100644 index 000000000000..e8f732c645fe --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_export_agent_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_ExportAgent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_export_agent(): + # Create a client + client = dialogflow_v2beta1.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ExportAgentRequest( + parent="parent_value", + ) + + # Make the request + operation = client.export_agent(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Agents_ExportAgent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_get_agent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_get_agent_async.py new file mode 100644 index 000000000000..34935a3e6394 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_get_agent_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_GetAgent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_agent(): + # Create a client + client = dialogflow_v2beta1.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetAgentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.get_agent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Agents_GetAgent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_get_agent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_get_agent_sync.py new file mode 100644 index 000000000000..64dffe383116 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_get_agent_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_GetAgent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_agent(): + # Create a client + client = dialogflow_v2beta1.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetAgentRequest( + parent="parent_value", + ) + + # Make the request + response = client.get_agent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Agents_GetAgent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_get_validation_result_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_get_validation_result_async.py new file mode 100644 index 000000000000..2da8a445aa6d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_get_validation_result_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetValidationResult +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_GetValidationResult_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_validation_result(): + # Create a client + client = dialogflow_v2beta1.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetValidationResultRequest( + parent="parent_value", + ) + + # Make the request + response = await client.get_validation_result(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Agents_GetValidationResult_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_get_validation_result_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_get_validation_result_sync.py new file mode 100644 index 000000000000..ecf74f5c55b4 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_get_validation_result_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetValidationResult +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_GetValidationResult_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_validation_result(): + # Create a client + client = dialogflow_v2beta1.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetValidationResultRequest( + parent="parent_value", + ) + + # Make the request + response = client.get_validation_result(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Agents_GetValidationResult_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_import_agent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_import_agent_async.py new file mode 100644 index 000000000000..7c360289dcef --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_import_agent_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_ImportAgent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_import_agent(): + # Create a client + client = dialogflow_v2beta1.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ImportAgentRequest( + agent_uri="agent_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_agent(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Agents_ImportAgent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_import_agent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_import_agent_sync.py new file mode 100644 index 000000000000..d3bb6ddd0f43 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_import_agent_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_ImportAgent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_import_agent(): + # Create a client + client = dialogflow_v2beta1.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ImportAgentRequest( + agent_uri="agent_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_agent(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Agents_ImportAgent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_restore_agent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_restore_agent_async.py new file mode 100644 index 000000000000..8ca3f7734f3e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_restore_agent_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_RestoreAgent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_restore_agent(): + # Create a client + client = dialogflow_v2beta1.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.RestoreAgentRequest( + agent_uri="agent_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.restore_agent(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Agents_RestoreAgent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_restore_agent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_restore_agent_sync.py new file mode 100644 index 000000000000..dca343596aac --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_restore_agent_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_RestoreAgent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_restore_agent(): + # Create a client + client = dialogflow_v2beta1.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.RestoreAgentRequest( + agent_uri="agent_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.restore_agent(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Agents_RestoreAgent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_search_agents_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_search_agents_async.py new file mode 100644 index 000000000000..b2f5b897b270 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_search_agents_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchAgents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_SearchAgents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_search_agents(): + # Create a client + client = dialogflow_v2beta1.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.SearchAgentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.search_agents(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Agents_SearchAgents_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_search_agents_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_search_agents_sync.py new file mode 100644 index 000000000000..22f16f3d7023 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_search_agents_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchAgents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_SearchAgents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_search_agents(): + # Create a client + client = dialogflow_v2beta1.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.SearchAgentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.search_agents(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Agents_SearchAgents_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_set_agent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_set_agent_async.py new file mode 100644 index 000000000000..acd7ddd002c7 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_set_agent_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_SetAgent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_set_agent(): + # Create a client + client = dialogflow_v2beta1.AgentsAsyncClient() + + # Initialize request argument(s) + agent = dialogflow_v2beta1.Agent() + agent.parent = "parent_value" + + request = dialogflow_v2beta1.SetAgentRequest( + agent=agent, + ) + + # Make the request + response = await client.set_agent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Agents_SetAgent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_set_agent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_set_agent_sync.py new file mode 100644 index 000000000000..c07b29abe86f --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_set_agent_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_SetAgent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_set_agent(): + # Create a client + client = dialogflow_v2beta1.AgentsClient() + + # Initialize request argument(s) + agent = dialogflow_v2beta1.Agent() + agent.parent = "parent_value" + + request = dialogflow_v2beta1.SetAgentRequest( + agent=agent, + ) + + # Make the request + response = client.set_agent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Agents_SetAgent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_train_agent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_train_agent_async.py new file mode 100644 index 000000000000..542e82186838 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_train_agent_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TrainAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_TrainAgent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_train_agent(): + # Create a client + client = dialogflow_v2beta1.AgentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.TrainAgentRequest( + parent="parent_value", + ) + + # Make the request + operation = client.train_agent(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Agents_TrainAgent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_train_agent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_train_agent_sync.py new file mode 100644 index 000000000000..97d0090107a9 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_agents_train_agent_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TrainAgent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Agents_TrainAgent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_train_agent(): + # Create a client + client = dialogflow_v2beta1.AgentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.TrainAgentRequest( + parent="parent_value", + ) + + # Make the request + operation = client.train_agent(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Agents_TrainAgent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_get_answer_record_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_get_answer_record_async.py new file mode 100644 index 000000000000..9e8b069edc9a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_get_answer_record_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAnswerRecord +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_AnswerRecords_GetAnswerRecord_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_answer_record(): + # Create a client + client = dialogflow_v2beta1.AnswerRecordsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetAnswerRecordRequest( + ) + + # Make the request + response = await client.get_answer_record(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_AnswerRecords_GetAnswerRecord_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_get_answer_record_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_get_answer_record_sync.py new file mode 100644 index 000000000000..8571ced33034 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_get_answer_record_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAnswerRecord +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_AnswerRecords_GetAnswerRecord_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_answer_record(): + # Create a client + client = dialogflow_v2beta1.AnswerRecordsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetAnswerRecordRequest( + ) + + # Make the request + response = client.get_answer_record(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_AnswerRecords_GetAnswerRecord_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_list_answer_records_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_list_answer_records_async.py new file mode 100644 index 000000000000..7f623e3c8fac --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_list_answer_records_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAnswerRecords +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_AnswerRecords_ListAnswerRecords_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_answer_records(): + # Create a client + client = dialogflow_v2beta1.AnswerRecordsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListAnswerRecordsRequest( + ) + + # Make the request + page_result = client.list_answer_records(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_AnswerRecords_ListAnswerRecords_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_list_answer_records_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_list_answer_records_sync.py new file mode 100644 index 000000000000..040bc64eb764 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_list_answer_records_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAnswerRecords +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_AnswerRecords_ListAnswerRecords_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_answer_records(): + # Create a client + client = dialogflow_v2beta1.AnswerRecordsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListAnswerRecordsRequest( + ) + + # Make the request + page_result = client.list_answer_records(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_AnswerRecords_ListAnswerRecords_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_update_answer_record_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_update_answer_record_async.py new file mode 100644 index 000000000000..1196108cfc58 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_update_answer_record_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAnswerRecord +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_AnswerRecords_UpdateAnswerRecord_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_update_answer_record(): + # Create a client + client = dialogflow_v2beta1.AnswerRecordsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdateAnswerRecordRequest( + ) + + # Make the request + response = await client.update_answer_record(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_AnswerRecords_UpdateAnswerRecord_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_update_answer_record_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_update_answer_record_sync.py new file mode 100644 index 000000000000..4acecf54c918 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_answer_records_update_answer_record_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAnswerRecord +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_AnswerRecords_UpdateAnswerRecord_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_update_answer_record(): + # Create a client + client = dialogflow_v2beta1.AnswerRecordsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdateAnswerRecordRequest( + ) + + # Make the request + response = client.update_answer_record(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_AnswerRecords_UpdateAnswerRecord_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_create_context_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_create_context_async.py new file mode 100644 index 000000000000..9b676e268fdf --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_create_context_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Contexts_CreateContext_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_create_context(): + # Create a client + client = dialogflow_v2beta1.ContextsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.CreateContextRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_context(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Contexts_CreateContext_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_create_context_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_create_context_sync.py new file mode 100644 index 000000000000..5f44955fa358 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_create_context_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Contexts_CreateContext_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_create_context(): + # Create a client + client = dialogflow_v2beta1.ContextsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.CreateContextRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_context(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Contexts_CreateContext_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_delete_all_contexts_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_delete_all_contexts_async.py new file mode 100644 index 000000000000..efa23e3dd554 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_delete_all_contexts_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAllContexts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Contexts_DeleteAllContexts_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_delete_all_contexts(): + # Create a client + client = dialogflow_v2beta1.ContextsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteAllContextsRequest( + parent="parent_value", + ) + + # Make the request + await client.delete_all_contexts(request=request) + + +# [END dialogflow_v2beta1_generated_Contexts_DeleteAllContexts_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_delete_all_contexts_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_delete_all_contexts_sync.py new file mode 100644 index 000000000000..def5fef5d39d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_delete_all_contexts_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAllContexts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Contexts_DeleteAllContexts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_delete_all_contexts(): + # Create a client + client = dialogflow_v2beta1.ContextsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteAllContextsRequest( + parent="parent_value", + ) + + # Make the request + client.delete_all_contexts(request=request) + + +# [END dialogflow_v2beta1_generated_Contexts_DeleteAllContexts_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_delete_context_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_delete_context_async.py new file mode 100644 index 000000000000..066e66f6b795 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_delete_context_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Contexts_DeleteContext_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_delete_context(): + # Create a client + client = dialogflow_v2beta1.ContextsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteContextRequest( + name="name_value", + ) + + # Make the request + await client.delete_context(request=request) + + +# [END dialogflow_v2beta1_generated_Contexts_DeleteContext_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_delete_context_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_delete_context_sync.py new file mode 100644 index 000000000000..fe9a2ab36dd8 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_delete_context_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Contexts_DeleteContext_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_delete_context(): + # Create a client + client = dialogflow_v2beta1.ContextsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteContextRequest( + name="name_value", + ) + + # Make the request + client.delete_context(request=request) + + +# [END dialogflow_v2beta1_generated_Contexts_DeleteContext_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_get_context_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_get_context_async.py new file mode 100644 index 000000000000..a82a0a17ab37 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_get_context_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Contexts_GetContext_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_context(): + # Create a client + client = dialogflow_v2beta1.ContextsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetContextRequest( + name="name_value", + ) + + # Make the request + response = await client.get_context(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Contexts_GetContext_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_get_context_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_get_context_sync.py new file mode 100644 index 000000000000..4209504bffc7 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_get_context_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Contexts_GetContext_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_context(): + # Create a client + client = dialogflow_v2beta1.ContextsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetContextRequest( + name="name_value", + ) + + # Make the request + response = client.get_context(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Contexts_GetContext_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_list_contexts_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_list_contexts_async.py new file mode 100644 index 000000000000..eadb74108400 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_list_contexts_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListContexts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Contexts_ListContexts_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_contexts(): + # Create a client + client = dialogflow_v2beta1.ContextsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListContextsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_contexts(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Contexts_ListContexts_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_list_contexts_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_list_contexts_sync.py new file mode 100644 index 000000000000..19c08fb21f57 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_list_contexts_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListContexts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Contexts_ListContexts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_contexts(): + # Create a client + client = dialogflow_v2beta1.ContextsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListContextsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_contexts(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Contexts_ListContexts_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_update_context_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_update_context_async.py new file mode 100644 index 000000000000..dc8c4e897975 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_update_context_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Contexts_UpdateContext_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_update_context(): + # Create a client + client = dialogflow_v2beta1.ContextsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdateContextRequest( + ) + + # Make the request + response = await client.update_context(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Contexts_UpdateContext_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_update_context_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_update_context_sync.py new file mode 100644 index 000000000000..7d0cac6840c3 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_contexts_update_context_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateContext +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Contexts_UpdateContext_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_update_context(): + # Create a client + client = dialogflow_v2beta1.ContextsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdateContextRequest( + ) + + # Make the request + response = client.update_context(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Contexts_UpdateContext_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_clear_suggestion_feature_config_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_clear_suggestion_feature_config_async.py new file mode 100644 index 000000000000..fad4a0402817 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_clear_suggestion_feature_config_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ClearSuggestionFeatureConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_ConversationProfiles_ClearSuggestionFeatureConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_clear_suggestion_feature_config(): + # Create a client + client = dialogflow_v2beta1.ConversationProfilesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ClearSuggestionFeatureConfigRequest( + conversation_profile="conversation_profile_value", + participant_role="END_USER", + suggestion_feature_type="KNOWLEDGE_SEARCH", + ) + + # Make the request + operation = client.clear_suggestion_feature_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_ConversationProfiles_ClearSuggestionFeatureConfig_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_clear_suggestion_feature_config_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_clear_suggestion_feature_config_sync.py new file mode 100644 index 000000000000..a59c725d57cf --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_clear_suggestion_feature_config_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ClearSuggestionFeatureConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_ConversationProfiles_ClearSuggestionFeatureConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_clear_suggestion_feature_config(): + # Create a client + client = dialogflow_v2beta1.ConversationProfilesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ClearSuggestionFeatureConfigRequest( + conversation_profile="conversation_profile_value", + participant_role="END_USER", + suggestion_feature_type="KNOWLEDGE_SEARCH", + ) + + # Make the request + operation = client.clear_suggestion_feature_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_ConversationProfiles_ClearSuggestionFeatureConfig_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_create_conversation_profile_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_create_conversation_profile_async.py new file mode 100644 index 000000000000..58c4e33517ec --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_create_conversation_profile_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_ConversationProfiles_CreateConversationProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_create_conversation_profile(): + # Create a client + client = dialogflow_v2beta1.ConversationProfilesAsyncClient() + + # Initialize request argument(s) + conversation_profile = dialogflow_v2beta1.ConversationProfile() + conversation_profile.display_name = "display_name_value" + + request = dialogflow_v2beta1.CreateConversationProfileRequest( + parent="parent_value", + conversation_profile=conversation_profile, + ) + + # Make the request + response = await client.create_conversation_profile(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_ConversationProfiles_CreateConversationProfile_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_create_conversation_profile_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_create_conversation_profile_sync.py new file mode 100644 index 000000000000..6752f6e7e066 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_create_conversation_profile_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_ConversationProfiles_CreateConversationProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_create_conversation_profile(): + # Create a client + client = dialogflow_v2beta1.ConversationProfilesClient() + + # Initialize request argument(s) + conversation_profile = dialogflow_v2beta1.ConversationProfile() + conversation_profile.display_name = "display_name_value" + + request = dialogflow_v2beta1.CreateConversationProfileRequest( + parent="parent_value", + conversation_profile=conversation_profile, + ) + + # Make the request + response = client.create_conversation_profile(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_ConversationProfiles_CreateConversationProfile_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_delete_conversation_profile_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_delete_conversation_profile_async.py new file mode 100644 index 000000000000..60d87316009a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_delete_conversation_profile_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_ConversationProfiles_DeleteConversationProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_delete_conversation_profile(): + # Create a client + client = dialogflow_v2beta1.ConversationProfilesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteConversationProfileRequest( + name="name_value", + ) + + # Make the request + await client.delete_conversation_profile(request=request) + + +# [END dialogflow_v2beta1_generated_ConversationProfiles_DeleteConversationProfile_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_delete_conversation_profile_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_delete_conversation_profile_sync.py new file mode 100644 index 000000000000..6e7f18d8bf6b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_delete_conversation_profile_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_ConversationProfiles_DeleteConversationProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_delete_conversation_profile(): + # Create a client + client = dialogflow_v2beta1.ConversationProfilesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteConversationProfileRequest( + name="name_value", + ) + + # Make the request + client.delete_conversation_profile(request=request) + + +# [END dialogflow_v2beta1_generated_ConversationProfiles_DeleteConversationProfile_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_get_conversation_profile_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_get_conversation_profile_async.py new file mode 100644 index 000000000000..2a28e5123c62 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_get_conversation_profile_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_ConversationProfiles_GetConversationProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_conversation_profile(): + # Create a client + client = dialogflow_v2beta1.ConversationProfilesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetConversationProfileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conversation_profile(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_ConversationProfiles_GetConversationProfile_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_get_conversation_profile_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_get_conversation_profile_sync.py new file mode 100644 index 000000000000..0352f905fa11 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_get_conversation_profile_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_ConversationProfiles_GetConversationProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_conversation_profile(): + # Create a client + client = dialogflow_v2beta1.ConversationProfilesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetConversationProfileRequest( + name="name_value", + ) + + # Make the request + response = client.get_conversation_profile(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_ConversationProfiles_GetConversationProfile_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_list_conversation_profiles_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_list_conversation_profiles_async.py new file mode 100644 index 000000000000..7ba8b5a21465 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_list_conversation_profiles_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversationProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_ConversationProfiles_ListConversationProfiles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_conversation_profiles(): + # Create a client + client = dialogflow_v2beta1.ConversationProfilesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListConversationProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversation_profiles(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_ConversationProfiles_ListConversationProfiles_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_list_conversation_profiles_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_list_conversation_profiles_sync.py new file mode 100644 index 000000000000..9f492b14dc35 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_list_conversation_profiles_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversationProfiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_ConversationProfiles_ListConversationProfiles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_conversation_profiles(): + # Create a client + client = dialogflow_v2beta1.ConversationProfilesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListConversationProfilesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversation_profiles(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_ConversationProfiles_ListConversationProfiles_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_set_suggestion_feature_config_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_set_suggestion_feature_config_async.py new file mode 100644 index 000000000000..91d03366cfc4 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_set_suggestion_feature_config_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetSuggestionFeatureConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_ConversationProfiles_SetSuggestionFeatureConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_set_suggestion_feature_config(): + # Create a client + client = dialogflow_v2beta1.ConversationProfilesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.SetSuggestionFeatureConfigRequest( + conversation_profile="conversation_profile_value", + participant_role="END_USER", + ) + + # Make the request + operation = client.set_suggestion_feature_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_ConversationProfiles_SetSuggestionFeatureConfig_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_set_suggestion_feature_config_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_set_suggestion_feature_config_sync.py new file mode 100644 index 000000000000..db9619977a76 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_set_suggestion_feature_config_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetSuggestionFeatureConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_ConversationProfiles_SetSuggestionFeatureConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_set_suggestion_feature_config(): + # Create a client + client = dialogflow_v2beta1.ConversationProfilesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.SetSuggestionFeatureConfigRequest( + conversation_profile="conversation_profile_value", + participant_role="END_USER", + ) + + # Make the request + operation = client.set_suggestion_feature_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_ConversationProfiles_SetSuggestionFeatureConfig_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_update_conversation_profile_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_update_conversation_profile_async.py new file mode 100644 index 000000000000..50ff58ed9060 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_update_conversation_profile_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_ConversationProfiles_UpdateConversationProfile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_update_conversation_profile(): + # Create a client + client = dialogflow_v2beta1.ConversationProfilesAsyncClient() + + # Initialize request argument(s) + conversation_profile = dialogflow_v2beta1.ConversationProfile() + conversation_profile.display_name = "display_name_value" + + request = dialogflow_v2beta1.UpdateConversationProfileRequest( + conversation_profile=conversation_profile, + ) + + # Make the request + response = await client.update_conversation_profile(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_ConversationProfiles_UpdateConversationProfile_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_update_conversation_profile_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_update_conversation_profile_sync.py new file mode 100644 index 000000000000..457ee0e1e6a0 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversation_profiles_update_conversation_profile_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConversationProfile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_ConversationProfiles_UpdateConversationProfile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_update_conversation_profile(): + # Create a client + client = dialogflow_v2beta1.ConversationProfilesClient() + + # Initialize request argument(s) + conversation_profile = dialogflow_v2beta1.ConversationProfile() + conversation_profile.display_name = "display_name_value" + + request = dialogflow_v2beta1.UpdateConversationProfileRequest( + conversation_profile=conversation_profile, + ) + + # Make the request + response = client.update_conversation_profile(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_ConversationProfiles_UpdateConversationProfile_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_batch_create_messages_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_batch_create_messages_async.py new file mode 100644 index 000000000000..9fcf0933ff2d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_batch_create_messages_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateMessages +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_BatchCreateMessages_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_batch_create_messages(): + # Create a client + client = dialogflow_v2beta1.ConversationsAsyncClient() + + # Initialize request argument(s) + requests = dialogflow_v2beta1.CreateMessageRequest() + requests.parent = "parent_value" + requests.message.content = "content_value" + + request = dialogflow_v2beta1.BatchCreateMessagesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = await client.batch_create_messages(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_BatchCreateMessages_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_batch_create_messages_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_batch_create_messages_sync.py new file mode 100644 index 000000000000..0c85a0b0e695 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_batch_create_messages_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateMessages +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_BatchCreateMessages_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_batch_create_messages(): + # Create a client + client = dialogflow_v2beta1.ConversationsClient() + + # Initialize request argument(s) + requests = dialogflow_v2beta1.CreateMessageRequest() + requests.parent = "parent_value" + requests.message.content = "content_value" + + request = dialogflow_v2beta1.BatchCreateMessagesRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.batch_create_messages(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_BatchCreateMessages_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_complete_conversation_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_complete_conversation_async.py new file mode 100644 index 000000000000..3e8e95ff1907 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_complete_conversation_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CompleteConversation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_CompleteConversation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_complete_conversation(): + # Create a client + client = dialogflow_v2beta1.ConversationsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.CompleteConversationRequest( + name="name_value", + ) + + # Make the request + response = await client.complete_conversation(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_CompleteConversation_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_complete_conversation_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_complete_conversation_sync.py new file mode 100644 index 000000000000..2d74302ca706 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_complete_conversation_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CompleteConversation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_CompleteConversation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_complete_conversation(): + # Create a client + client = dialogflow_v2beta1.ConversationsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.CompleteConversationRequest( + name="name_value", + ) + + # Make the request + response = client.complete_conversation(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_CompleteConversation_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_create_conversation_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_create_conversation_async.py new file mode 100644 index 000000000000..c606c0385dca --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_create_conversation_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_CreateConversation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_create_conversation(): + # Create a client + client = dialogflow_v2beta1.ConversationsAsyncClient() + + # Initialize request argument(s) + conversation = dialogflow_v2beta1.Conversation() + conversation.conversation_profile = "conversation_profile_value" + + request = dialogflow_v2beta1.CreateConversationRequest( + parent="parent_value", + conversation=conversation, + ) + + # Make the request + response = await client.create_conversation(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_CreateConversation_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_create_conversation_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_create_conversation_sync.py new file mode 100644 index 000000000000..7fcee73f1c20 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_create_conversation_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_CreateConversation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_create_conversation(): + # Create a client + client = dialogflow_v2beta1.ConversationsClient() + + # Initialize request argument(s) + conversation = dialogflow_v2beta1.Conversation() + conversation.conversation_profile = "conversation_profile_value" + + request = dialogflow_v2beta1.CreateConversationRequest( + parent="parent_value", + conversation=conversation, + ) + + # Make the request + response = client.create_conversation(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_CreateConversation_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_generate_stateless_summary_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_generate_stateless_summary_async.py new file mode 100644 index 000000000000..dcd7aa71438b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_generate_stateless_summary_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateStatelessSummary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_GenerateStatelessSummary_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_generate_stateless_summary(): + # Create a client + client = dialogflow_v2beta1.ConversationsAsyncClient() + + # Initialize request argument(s) + stateless_conversation = dialogflow_v2beta1.MinimalConversation() + stateless_conversation.messages.content = "content_value" + stateless_conversation.parent = "parent_value" + + conversation_profile = dialogflow_v2beta1.ConversationProfile() + conversation_profile.display_name = "display_name_value" + + request = dialogflow_v2beta1.GenerateStatelessSummaryRequest( + stateless_conversation=stateless_conversation, + conversation_profile=conversation_profile, + ) + + # Make the request + response = await client.generate_stateless_summary(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_GenerateStatelessSummary_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_generate_stateless_summary_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_generate_stateless_summary_sync.py new file mode 100644 index 000000000000..26949d8a579a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_generate_stateless_summary_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateStatelessSummary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_GenerateStatelessSummary_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_generate_stateless_summary(): + # Create a client + client = dialogflow_v2beta1.ConversationsClient() + + # Initialize request argument(s) + stateless_conversation = dialogflow_v2beta1.MinimalConversation() + stateless_conversation.messages.content = "content_value" + stateless_conversation.parent = "parent_value" + + conversation_profile = dialogflow_v2beta1.ConversationProfile() + conversation_profile.display_name = "display_name_value" + + request = dialogflow_v2beta1.GenerateStatelessSummaryRequest( + stateless_conversation=stateless_conversation, + conversation_profile=conversation_profile, + ) + + # Make the request + response = client.generate_stateless_summary(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_GenerateStatelessSummary_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_get_conversation_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_get_conversation_async.py new file mode 100644 index 000000000000..45fc6d9e44f1 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_get_conversation_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_GetConversation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_conversation(): + # Create a client + client = dialogflow_v2beta1.ConversationsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetConversationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conversation(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_GetConversation_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_get_conversation_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_get_conversation_sync.py new file mode 100644 index 000000000000..8524b76635d8 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_get_conversation_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_GetConversation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_conversation(): + # Create a client + client = dialogflow_v2beta1.ConversationsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetConversationRequest( + name="name_value", + ) + + # Make the request + response = client.get_conversation(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_GetConversation_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_list_conversations_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_list_conversations_async.py new file mode 100644 index 000000000000..d2a95555568c --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_list_conversations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_ListConversations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_conversations(): + # Create a client + client = dialogflow_v2beta1.ConversationsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListConversationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_ListConversations_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_list_conversations_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_list_conversations_sync.py new file mode 100644 index 000000000000..88839f280523 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_list_conversations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_ListConversations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_conversations(): + # Create a client + client = dialogflow_v2beta1.ConversationsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListConversationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_ListConversations_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_list_messages_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_list_messages_async.py new file mode 100644 index 000000000000..bbd25deec510 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_list_messages_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMessages +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_ListMessages_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_messages(): + # Create a client + client = dialogflow_v2beta1.ConversationsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListMessagesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_messages(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_ListMessages_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_list_messages_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_list_messages_sync.py new file mode 100644 index 000000000000..2d8a6a4ae7c0 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_list_messages_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMessages +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_ListMessages_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_messages(): + # Create a client + client = dialogflow_v2beta1.ConversationsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListMessagesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_messages(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_ListMessages_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_search_knowledge_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_search_knowledge_async.py new file mode 100644 index 000000000000..33534cc11662 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_search_knowledge_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchKnowledge +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_SearchKnowledge_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_search_knowledge(): + # Create a client + client = dialogflow_v2beta1.ConversationsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.SearchKnowledgeRequest( + conversation_profile="conversation_profile_value", + ) + + # Make the request + response = await client.search_knowledge(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_SearchKnowledge_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_search_knowledge_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_search_knowledge_sync.py new file mode 100644 index 000000000000..9608390a20a5 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_search_knowledge_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchKnowledge +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_SearchKnowledge_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_search_knowledge(): + # Create a client + client = dialogflow_v2beta1.ConversationsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.SearchKnowledgeRequest( + conversation_profile="conversation_profile_value", + ) + + # Make the request + response = client.search_knowledge(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_SearchKnowledge_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_suggest_conversation_summary_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_suggest_conversation_summary_async.py new file mode 100644 index 000000000000..c127b2807461 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_suggest_conversation_summary_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestConversationSummary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_SuggestConversationSummary_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_suggest_conversation_summary(): + # Create a client + client = dialogflow_v2beta1.ConversationsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.SuggestConversationSummaryRequest( + conversation="conversation_value", + ) + + # Make the request + response = await client.suggest_conversation_summary(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_SuggestConversationSummary_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_suggest_conversation_summary_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_suggest_conversation_summary_sync.py new file mode 100644 index 000000000000..2e36259278df --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_conversations_suggest_conversation_summary_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestConversationSummary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Conversations_SuggestConversationSummary_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_suggest_conversation_summary(): + # Create a client + client = dialogflow_v2beta1.ConversationsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.SuggestConversationSummaryRequest( + conversation="conversation_value", + ) + + # Make the request + response = client.suggest_conversation_summary(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Conversations_SuggestConversationSummary_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_create_document_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_create_document_async.py new file mode 100644 index 000000000000..ad74167c6652 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_create_document_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Documents_CreateDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_create_document(): + # Create a client + client = dialogflow_v2beta1.DocumentsAsyncClient() + + # Initialize request argument(s) + document = dialogflow_v2beta1.Document() + document.content_uri = "content_uri_value" + document.display_name = "display_name_value" + document.mime_type = "mime_type_value" + document.knowledge_types = ['SMART_REPLY'] + + request = dialogflow_v2beta1.CreateDocumentRequest( + parent="parent_value", + document=document, + ) + + # Make the request + operation = client.create_document(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Documents_CreateDocument_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_create_document_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_create_document_sync.py new file mode 100644 index 000000000000..c0ec64efdc31 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_create_document_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Documents_CreateDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_create_document(): + # Create a client + client = dialogflow_v2beta1.DocumentsClient() + + # Initialize request argument(s) + document = dialogflow_v2beta1.Document() + document.content_uri = "content_uri_value" + document.display_name = "display_name_value" + document.mime_type = "mime_type_value" + document.knowledge_types = ['SMART_REPLY'] + + request = dialogflow_v2beta1.CreateDocumentRequest( + parent="parent_value", + document=document, + ) + + # Make the request + operation = client.create_document(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Documents_CreateDocument_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_delete_document_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_delete_document_async.py new file mode 100644 index 000000000000..808878fbe350 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_delete_document_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Documents_DeleteDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_delete_document(): + # Create a client + client = dialogflow_v2beta1.DocumentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_document(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Documents_DeleteDocument_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_delete_document_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_delete_document_sync.py new file mode 100644 index 000000000000..70f9c7102aff --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_delete_document_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Documents_DeleteDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_delete_document(): + # Create a client + client = dialogflow_v2beta1.DocumentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_document(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Documents_DeleteDocument_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_get_document_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_get_document_async.py new file mode 100644 index 000000000000..e7838be5f70d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_get_document_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Documents_GetDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_document(): + # Create a client + client = dialogflow_v2beta1.DocumentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_document(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Documents_GetDocument_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_get_document_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_get_document_sync.py new file mode 100644 index 000000000000..602f8946b80e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_get_document_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Documents_GetDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_document(): + # Create a client + client = dialogflow_v2beta1.DocumentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = client.get_document(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Documents_GetDocument_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_import_documents_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_import_documents_async.py new file mode 100644 index 000000000000..f8c6146a7000 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_import_documents_async.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Documents_ImportDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_import_documents(): + # Create a client + client = dialogflow_v2beta1.DocumentsAsyncClient() + + # Initialize request argument(s) + gcs_source = dialogflow_v2beta1.GcsSources() + gcs_source.uris = ['uris_value1', 'uris_value2'] + + document_template = dialogflow_v2beta1.ImportDocumentTemplate() + document_template.mime_type = "mime_type_value" + document_template.knowledge_types = ['SMART_REPLY'] + + request = dialogflow_v2beta1.ImportDocumentsRequest( + gcs_source=gcs_source, + parent="parent_value", + document_template=document_template, + ) + + # Make the request + operation = client.import_documents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Documents_ImportDocuments_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_import_documents_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_import_documents_sync.py new file mode 100644 index 000000000000..617d4298288f --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_import_documents_sync.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Documents_ImportDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_import_documents(): + # Create a client + client = dialogflow_v2beta1.DocumentsClient() + + # Initialize request argument(s) + gcs_source = dialogflow_v2beta1.GcsSources() + gcs_source.uris = ['uris_value1', 'uris_value2'] + + document_template = dialogflow_v2beta1.ImportDocumentTemplate() + document_template.mime_type = "mime_type_value" + document_template.knowledge_types = ['SMART_REPLY'] + + request = dialogflow_v2beta1.ImportDocumentsRequest( + gcs_source=gcs_source, + parent="parent_value", + document_template=document_template, + ) + + # Make the request + operation = client.import_documents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Documents_ImportDocuments_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_list_documents_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_list_documents_async.py new file mode 100644 index 000000000000..274d5309e8d9 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_list_documents_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Documents_ListDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_documents(): + # Create a client + client = dialogflow_v2beta1.DocumentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Documents_ListDocuments_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_list_documents_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_list_documents_sync.py new file mode 100644 index 000000000000..23c145c40b64 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_list_documents_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Documents_ListDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_documents(): + # Create a client + client = dialogflow_v2beta1.DocumentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Documents_ListDocuments_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_reload_document_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_reload_document_async.py new file mode 100644 index 000000000000..ffa8f5aa9597 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_reload_document_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReloadDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Documents_ReloadDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_reload_document(): + # Create a client + client = dialogflow_v2beta1.DocumentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ReloadDocumentRequest( + name="name_value", + ) + + # Make the request + operation = client.reload_document(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Documents_ReloadDocument_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_reload_document_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_reload_document_sync.py new file mode 100644 index 000000000000..66c0757e1058 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_reload_document_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReloadDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Documents_ReloadDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_reload_document(): + # Create a client + client = dialogflow_v2beta1.DocumentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ReloadDocumentRequest( + name="name_value", + ) + + # Make the request + operation = client.reload_document(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Documents_ReloadDocument_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_update_document_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_update_document_async.py new file mode 100644 index 000000000000..b2840cd70230 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_update_document_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Documents_UpdateDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_update_document(): + # Create a client + client = dialogflow_v2beta1.DocumentsAsyncClient() + + # Initialize request argument(s) + document = dialogflow_v2beta1.Document() + document.content_uri = "content_uri_value" + document.display_name = "display_name_value" + document.mime_type = "mime_type_value" + document.knowledge_types = ['SMART_REPLY'] + + request = dialogflow_v2beta1.UpdateDocumentRequest( + document=document, + ) + + # Make the request + operation = client.update_document(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Documents_UpdateDocument_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_update_document_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_update_document_sync.py new file mode 100644 index 000000000000..af0698d1db36 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_documents_update_document_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Documents_UpdateDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_update_document(): + # Create a client + client = dialogflow_v2beta1.DocumentsClient() + + # Initialize request argument(s) + document = dialogflow_v2beta1.Document() + document.content_uri = "content_uri_value" + document.display_name = "display_name_value" + document.mime_type = "mime_type_value" + document.knowledge_types = ['SMART_REPLY'] + + request = dialogflow_v2beta1.UpdateDocumentRequest( + document=document, + ) + + # Make the request + operation = client.update_document(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Documents_UpdateDocument_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_create_entities_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_create_entities_async.py new file mode 100644 index 000000000000..b8add06d56ba --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_create_entities_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_BatchCreateEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_batch_create_entities(): + # Create a client + client = dialogflow_v2beta1.EntityTypesAsyncClient() + + # Initialize request argument(s) + entities = dialogflow_v2beta1.Entity() + entities.value = "value_value" + + request = dialogflow_v2beta1.BatchCreateEntitiesRequest( + parent="parent_value", + entities=entities, + ) + + # Make the request + operation = client.batch_create_entities(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_BatchCreateEntities_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_create_entities_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_create_entities_sync.py new file mode 100644 index 000000000000..19225dce7bdb --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_create_entities_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_BatchCreateEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_batch_create_entities(): + # Create a client + client = dialogflow_v2beta1.EntityTypesClient() + + # Initialize request argument(s) + entities = dialogflow_v2beta1.Entity() + entities.value = "value_value" + + request = dialogflow_v2beta1.BatchCreateEntitiesRequest( + parent="parent_value", + entities=entities, + ) + + # Make the request + operation = client.batch_create_entities(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_BatchCreateEntities_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_delete_entities_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_delete_entities_async.py new file mode 100644 index 000000000000..0184b6669f1e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_delete_entities_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_BatchDeleteEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_batch_delete_entities(): + # Create a client + client = dialogflow_v2beta1.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.BatchDeleteEntitiesRequest( + parent="parent_value", + entity_values=['entity_values_value1', 'entity_values_value2'], + ) + + # Make the request + operation = client.batch_delete_entities(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_BatchDeleteEntities_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_delete_entities_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_delete_entities_sync.py new file mode 100644 index 000000000000..b7d10ac4e675 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_delete_entities_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_BatchDeleteEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_batch_delete_entities(): + # Create a client + client = dialogflow_v2beta1.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.BatchDeleteEntitiesRequest( + parent="parent_value", + entity_values=['entity_values_value1', 'entity_values_value2'], + ) + + # Make the request + operation = client.batch_delete_entities(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_BatchDeleteEntities_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_delete_entity_types_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_delete_entity_types_async.py new file mode 100644 index 000000000000..9e9f948da63a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_delete_entity_types_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_BatchDeleteEntityTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_batch_delete_entity_types(): + # Create a client + client = dialogflow_v2beta1.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.BatchDeleteEntityTypesRequest( + parent="parent_value", + entity_type_names=['entity_type_names_value1', 'entity_type_names_value2'], + ) + + # Make the request + operation = client.batch_delete_entity_types(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_BatchDeleteEntityTypes_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_delete_entity_types_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_delete_entity_types_sync.py new file mode 100644 index 000000000000..f60682b4755b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_delete_entity_types_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_BatchDeleteEntityTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_batch_delete_entity_types(): + # Create a client + client = dialogflow_v2beta1.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.BatchDeleteEntityTypesRequest( + parent="parent_value", + entity_type_names=['entity_type_names_value1', 'entity_type_names_value2'], + ) + + # Make the request + operation = client.batch_delete_entity_types(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_BatchDeleteEntityTypes_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_update_entities_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_update_entities_async.py new file mode 100644 index 000000000000..4cee71e98281 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_update_entities_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_BatchUpdateEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_batch_update_entities(): + # Create a client + client = dialogflow_v2beta1.EntityTypesAsyncClient() + + # Initialize request argument(s) + entities = dialogflow_v2beta1.Entity() + entities.value = "value_value" + + request = dialogflow_v2beta1.BatchUpdateEntitiesRequest( + parent="parent_value", + entities=entities, + ) + + # Make the request + operation = client.batch_update_entities(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_BatchUpdateEntities_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_update_entities_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_update_entities_sync.py new file mode 100644 index 000000000000..9735b6d3356f --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_update_entities_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_BatchUpdateEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_batch_update_entities(): + # Create a client + client = dialogflow_v2beta1.EntityTypesClient() + + # Initialize request argument(s) + entities = dialogflow_v2beta1.Entity() + entities.value = "value_value" + + request = dialogflow_v2beta1.BatchUpdateEntitiesRequest( + parent="parent_value", + entities=entities, + ) + + # Make the request + operation = client.batch_update_entities(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_BatchUpdateEntities_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_update_entity_types_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_update_entity_types_async.py new file mode 100644 index 000000000000..e1a85419f2b6 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_update_entity_types_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_BatchUpdateEntityTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_batch_update_entity_types(): + # Create a client + client = dialogflow_v2beta1.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.BatchUpdateEntityTypesRequest( + entity_type_batch_uri="entity_type_batch_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.batch_update_entity_types(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_BatchUpdateEntityTypes_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_update_entity_types_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_update_entity_types_sync.py new file mode 100644 index 000000000000..c6f66cf6e51f --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_batch_update_entity_types_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_BatchUpdateEntityTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_batch_update_entity_types(): + # Create a client + client = dialogflow_v2beta1.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.BatchUpdateEntityTypesRequest( + entity_type_batch_uri="entity_type_batch_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.batch_update_entity_types(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_BatchUpdateEntityTypes_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_create_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_create_entity_type_async.py new file mode 100644 index 000000000000..fc757a79d6ab --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_create_entity_type_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_CreateEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_create_entity_type(): + # Create a client + client = dialogflow_v2beta1.EntityTypesAsyncClient() + + # Initialize request argument(s) + entity_type = dialogflow_v2beta1.EntityType() + entity_type.display_name = "display_name_value" + entity_type.kind = "KIND_REGEXP" + + request = dialogflow_v2beta1.CreateEntityTypeRequest( + parent="parent_value", + entity_type=entity_type, + ) + + # Make the request + response = await client.create_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_CreateEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_create_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_create_entity_type_sync.py new file mode 100644 index 000000000000..b0a6bb0bc4d1 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_create_entity_type_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_CreateEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_create_entity_type(): + # Create a client + client = dialogflow_v2beta1.EntityTypesClient() + + # Initialize request argument(s) + entity_type = dialogflow_v2beta1.EntityType() + entity_type.display_name = "display_name_value" + entity_type.kind = "KIND_REGEXP" + + request = dialogflow_v2beta1.CreateEntityTypeRequest( + parent="parent_value", + entity_type=entity_type, + ) + + # Make the request + response = client.create_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_CreateEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_delete_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_delete_entity_type_async.py new file mode 100644 index 000000000000..fb56e3e5f793 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_delete_entity_type_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_DeleteEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_delete_entity_type(): + # Create a client + client = dialogflow_v2beta1.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteEntityTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_entity_type(request=request) + + +# [END dialogflow_v2beta1_generated_EntityTypes_DeleteEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_delete_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_delete_entity_type_sync.py new file mode 100644 index 000000000000..89c32041e2be --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_delete_entity_type_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_DeleteEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_delete_entity_type(): + # Create a client + client = dialogflow_v2beta1.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteEntityTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_entity_type(request=request) + + +# [END dialogflow_v2beta1_generated_EntityTypes_DeleteEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_get_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_get_entity_type_async.py new file mode 100644 index 000000000000..ccf2e6d4afa0 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_get_entity_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_GetEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_entity_type(): + # Create a client + client = dialogflow_v2beta1.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetEntityTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_GetEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_get_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_get_entity_type_sync.py new file mode 100644 index 000000000000..1f90126183ef --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_get_entity_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_GetEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_entity_type(): + # Create a client + client = dialogflow_v2beta1.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetEntityTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_GetEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_list_entity_types_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_list_entity_types_async.py new file mode 100644 index 000000000000..2b059deca201 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_list_entity_types_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_ListEntityTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_entity_types(): + # Create a client + client = dialogflow_v2beta1.EntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListEntityTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entity_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_ListEntityTypes_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_list_entity_types_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_list_entity_types_sync.py new file mode 100644 index 000000000000..c336b0291bb6 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_list_entity_types_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_ListEntityTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_entity_types(): + # Create a client + client = dialogflow_v2beta1.EntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListEntityTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entity_types(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_ListEntityTypes_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_update_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_update_entity_type_async.py new file mode 100644 index 000000000000..b23a192d78eb --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_update_entity_type_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_UpdateEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_update_entity_type(): + # Create a client + client = dialogflow_v2beta1.EntityTypesAsyncClient() + + # Initialize request argument(s) + entity_type = dialogflow_v2beta1.EntityType() + entity_type.display_name = "display_name_value" + entity_type.kind = "KIND_REGEXP" + + request = dialogflow_v2beta1.UpdateEntityTypeRequest( + entity_type=entity_type, + ) + + # Make the request + response = await client.update_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_UpdateEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_update_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_update_entity_type_sync.py new file mode 100644 index 000000000000..e773fed4b7e8 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_entity_types_update_entity_type_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_EntityTypes_UpdateEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_update_entity_type(): + # Create a client + client = dialogflow_v2beta1.EntityTypesClient() + + # Initialize request argument(s) + entity_type = dialogflow_v2beta1.EntityType() + entity_type.display_name = "display_name_value" + entity_type.kind = "KIND_REGEXP" + + request = dialogflow_v2beta1.UpdateEntityTypeRequest( + entity_type=entity_type, + ) + + # Make the request + response = client.update_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_EntityTypes_UpdateEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_create_environment_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_create_environment_async.py new file mode 100644 index 000000000000..a28b9eba69fe --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_create_environment_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Environments_CreateEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_create_environment(): + # Create a client + client = dialogflow_v2beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + ) + + # Make the request + response = await client.create_environment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Environments_CreateEnvironment_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_create_environment_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_create_environment_sync.py new file mode 100644 index 000000000000..41520a84f950 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_create_environment_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Environments_CreateEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_create_environment(): + # Create a client + client = dialogflow_v2beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + ) + + # Make the request + response = client.create_environment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Environments_CreateEnvironment_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_delete_environment_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_delete_environment_async.py new file mode 100644 index 000000000000..06463a36d67d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_delete_environment_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Environments_DeleteEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_delete_environment(): + # Create a client + client = dialogflow_v2beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + await client.delete_environment(request=request) + + +# [END dialogflow_v2beta1_generated_Environments_DeleteEnvironment_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_delete_environment_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_delete_environment_sync.py new file mode 100644 index 000000000000..5e0196c3f84f --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_delete_environment_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Environments_DeleteEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_delete_environment(): + # Create a client + client = dialogflow_v2beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + client.delete_environment(request=request) + + +# [END dialogflow_v2beta1_generated_Environments_DeleteEnvironment_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_get_environment_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_get_environment_async.py new file mode 100644 index 000000000000..15c54f78e8d9 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_get_environment_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Environments_GetEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_environment(): + # Create a client + client = dialogflow_v2beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_environment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Environments_GetEnvironment_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_get_environment_history_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_get_environment_history_async.py new file mode 100644 index 000000000000..1d436efdbfd5 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_get_environment_history_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironmentHistory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Environments_GetEnvironmentHistory_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_environment_history(): + # Create a client + client = dialogflow_v2beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetEnvironmentHistoryRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.get_environment_history(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Environments_GetEnvironmentHistory_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_get_environment_history_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_get_environment_history_sync.py new file mode 100644 index 000000000000..128263d3f033 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_get_environment_history_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironmentHistory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Environments_GetEnvironmentHistory_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_environment_history(): + # Create a client + client = dialogflow_v2beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetEnvironmentHistoryRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.get_environment_history(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Environments_GetEnvironmentHistory_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_get_environment_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_get_environment_sync.py new file mode 100644 index 000000000000..d7d2a92bbfdc --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_get_environment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Environments_GetEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_environment(): + # Create a client + client = dialogflow_v2beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_environment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Environments_GetEnvironment_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_list_environments_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_list_environments_async.py new file mode 100644 index 000000000000..566994aa1a26 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_list_environments_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnvironments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Environments_ListEnvironments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_environments(): + # Create a client + client = dialogflow_v2beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Environments_ListEnvironments_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_list_environments_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_list_environments_sync.py new file mode 100644 index 000000000000..3515e55c2dd4 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_list_environments_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnvironments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Environments_ListEnvironments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_environments(): + # Create a client + client = dialogflow_v2beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Environments_ListEnvironments_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_update_environment_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_update_environment_async.py new file mode 100644 index 000000000000..513c8feeb619 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_update_environment_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Environments_UpdateEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_update_environment(): + # Create a client + client = dialogflow_v2beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdateEnvironmentRequest( + ) + + # Make the request + response = await client.update_environment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Environments_UpdateEnvironment_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_update_environment_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_update_environment_sync.py new file mode 100644 index 000000000000..0a7abe4437ed --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_environments_update_environment_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Environments_UpdateEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_update_environment(): + # Create a client + client = dialogflow_v2beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdateEnvironmentRequest( + ) + + # Make the request + response = client.update_environment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Environments_UpdateEnvironment_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_fulfillments_get_fulfillment_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_fulfillments_get_fulfillment_async.py new file mode 100644 index 000000000000..510631f2342d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_fulfillments_get_fulfillment_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFulfillment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Fulfillments_GetFulfillment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_fulfillment(): + # Create a client + client = dialogflow_v2beta1.FulfillmentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetFulfillmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_fulfillment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Fulfillments_GetFulfillment_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_fulfillments_get_fulfillment_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_fulfillments_get_fulfillment_sync.py new file mode 100644 index 000000000000..9335052acbb7 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_fulfillments_get_fulfillment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFulfillment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Fulfillments_GetFulfillment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_fulfillment(): + # Create a client + client = dialogflow_v2beta1.FulfillmentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetFulfillmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_fulfillment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Fulfillments_GetFulfillment_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_fulfillments_update_fulfillment_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_fulfillments_update_fulfillment_async.py new file mode 100644 index 000000000000..822fe6d3f6f0 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_fulfillments_update_fulfillment_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateFulfillment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Fulfillments_UpdateFulfillment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_update_fulfillment(): + # Create a client + client = dialogflow_v2beta1.FulfillmentsAsyncClient() + + # Initialize request argument(s) + fulfillment = dialogflow_v2beta1.Fulfillment() + fulfillment.generic_web_service.uri = "uri_value" + fulfillment.name = "name_value" + + request = dialogflow_v2beta1.UpdateFulfillmentRequest( + fulfillment=fulfillment, + ) + + # Make the request + response = await client.update_fulfillment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Fulfillments_UpdateFulfillment_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_fulfillments_update_fulfillment_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_fulfillments_update_fulfillment_sync.py new file mode 100644 index 000000000000..fc43a755e481 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_fulfillments_update_fulfillment_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateFulfillment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Fulfillments_UpdateFulfillment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_update_fulfillment(): + # Create a client + client = dialogflow_v2beta1.FulfillmentsClient() + + # Initialize request argument(s) + fulfillment = dialogflow_v2beta1.Fulfillment() + fulfillment.generic_web_service.uri = "uri_value" + fulfillment.name = "name_value" + + request = dialogflow_v2beta1.UpdateFulfillmentRequest( + fulfillment=fulfillment, + ) + + # Make the request + response = client.update_fulfillment(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Fulfillments_UpdateFulfillment_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_batch_delete_intents_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_batch_delete_intents_async.py new file mode 100644 index 000000000000..7bf265d02f37 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_batch_delete_intents_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Intents_BatchDeleteIntents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_batch_delete_intents(): + # Create a client + client = dialogflow_v2beta1.IntentsAsyncClient() + + # Initialize request argument(s) + intents = dialogflow_v2beta1.Intent() + intents.display_name = "display_name_value" + + request = dialogflow_v2beta1.BatchDeleteIntentsRequest( + parent="parent_value", + intents=intents, + ) + + # Make the request + operation = client.batch_delete_intents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Intents_BatchDeleteIntents_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_batch_delete_intents_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_batch_delete_intents_sync.py new file mode 100644 index 000000000000..a39120b35e2e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_batch_delete_intents_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Intents_BatchDeleteIntents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_batch_delete_intents(): + # Create a client + client = dialogflow_v2beta1.IntentsClient() + + # Initialize request argument(s) + intents = dialogflow_v2beta1.Intent() + intents.display_name = "display_name_value" + + request = dialogflow_v2beta1.BatchDeleteIntentsRequest( + parent="parent_value", + intents=intents, + ) + + # Make the request + operation = client.batch_delete_intents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Intents_BatchDeleteIntents_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_batch_update_intents_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_batch_update_intents_async.py new file mode 100644 index 000000000000..5509f3db0043 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_batch_update_intents_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Intents_BatchUpdateIntents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_batch_update_intents(): + # Create a client + client = dialogflow_v2beta1.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.BatchUpdateIntentsRequest( + intent_batch_uri="intent_batch_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.batch_update_intents(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Intents_BatchUpdateIntents_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_batch_update_intents_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_batch_update_intents_sync.py new file mode 100644 index 000000000000..ac62cbe6797b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_batch_update_intents_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Intents_BatchUpdateIntents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_batch_update_intents(): + # Create a client + client = dialogflow_v2beta1.IntentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.BatchUpdateIntentsRequest( + intent_batch_uri="intent_batch_uri_value", + parent="parent_value", + ) + + # Make the request + operation = client.batch_update_intents(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Intents_BatchUpdateIntents_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_create_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_create_intent_async.py new file mode 100644 index 000000000000..3cb4a8876e00 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_create_intent_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Intents_CreateIntent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_create_intent(): + # Create a client + client = dialogflow_v2beta1.IntentsAsyncClient() + + # Initialize request argument(s) + intent = dialogflow_v2beta1.Intent() + intent.display_name = "display_name_value" + + request = dialogflow_v2beta1.CreateIntentRequest( + parent="parent_value", + intent=intent, + ) + + # Make the request + response = await client.create_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Intents_CreateIntent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_create_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_create_intent_sync.py new file mode 100644 index 000000000000..1cad2f15182e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_create_intent_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Intents_CreateIntent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_create_intent(): + # Create a client + client = dialogflow_v2beta1.IntentsClient() + + # Initialize request argument(s) + intent = dialogflow_v2beta1.Intent() + intent.display_name = "display_name_value" + + request = dialogflow_v2beta1.CreateIntentRequest( + parent="parent_value", + intent=intent, + ) + + # Make the request + response = client.create_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Intents_CreateIntent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_delete_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_delete_intent_async.py new file mode 100644 index 000000000000..a8e0512f98f7 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_delete_intent_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Intents_DeleteIntent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_delete_intent(): + # Create a client + client = dialogflow_v2beta1.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteIntentRequest( + name="name_value", + ) + + # Make the request + await client.delete_intent(request=request) + + +# [END dialogflow_v2beta1_generated_Intents_DeleteIntent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_delete_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_delete_intent_sync.py new file mode 100644 index 000000000000..e800f238799d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_delete_intent_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Intents_DeleteIntent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_delete_intent(): + # Create a client + client = dialogflow_v2beta1.IntentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteIntentRequest( + name="name_value", + ) + + # Make the request + client.delete_intent(request=request) + + +# [END dialogflow_v2beta1_generated_Intents_DeleteIntent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_get_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_get_intent_async.py new file mode 100644 index 000000000000..1826b3cbce8d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_get_intent_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Intents_GetIntent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_intent(): + # Create a client + client = dialogflow_v2beta1.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetIntentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Intents_GetIntent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_get_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_get_intent_sync.py new file mode 100644 index 000000000000..69c1e22745e5 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_get_intent_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Intents_GetIntent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_intent(): + # Create a client + client = dialogflow_v2beta1.IntentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetIntentRequest( + name="name_value", + ) + + # Make the request + response = client.get_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Intents_GetIntent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_list_intents_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_list_intents_async.py new file mode 100644 index 000000000000..65c5b277ac43 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_list_intents_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Intents_ListIntents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_intents(): + # Create a client + client = dialogflow_v2beta1.IntentsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListIntentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intents(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Intents_ListIntents_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_list_intents_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_list_intents_sync.py new file mode 100644 index 000000000000..d805319a78ca --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_list_intents_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListIntents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Intents_ListIntents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_intents(): + # Create a client + client = dialogflow_v2beta1.IntentsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListIntentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_intents(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Intents_ListIntents_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_update_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_update_intent_async.py new file mode 100644 index 000000000000..a7855740a031 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_update_intent_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Intents_UpdateIntent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_update_intent(): + # Create a client + client = dialogflow_v2beta1.IntentsAsyncClient() + + # Initialize request argument(s) + intent = dialogflow_v2beta1.Intent() + intent.display_name = "display_name_value" + + request = dialogflow_v2beta1.UpdateIntentRequest( + intent=intent, + ) + + # Make the request + response = await client.update_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Intents_UpdateIntent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_update_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_update_intent_sync.py new file mode 100644 index 000000000000..49e0ca89c8e5 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_intents_update_intent_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Intents_UpdateIntent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_update_intent(): + # Create a client + client = dialogflow_v2beta1.IntentsClient() + + # Initialize request argument(s) + intent = dialogflow_v2beta1.Intent() + intent.display_name = "display_name_value" + + request = dialogflow_v2beta1.UpdateIntentRequest( + intent=intent, + ) + + # Make the request + response = client.update_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Intents_UpdateIntent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_create_knowledge_base_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_create_knowledge_base_async.py new file mode 100644 index 000000000000..943f629efe8b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_create_knowledge_base_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_KnowledgeBases_CreateKnowledgeBase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_create_knowledge_base(): + # Create a client + client = dialogflow_v2beta1.KnowledgeBasesAsyncClient() + + # Initialize request argument(s) + knowledge_base = dialogflow_v2beta1.KnowledgeBase() + knowledge_base.display_name = "display_name_value" + + request = dialogflow_v2beta1.CreateKnowledgeBaseRequest( + parent="parent_value", + knowledge_base=knowledge_base, + ) + + # Make the request + response = await client.create_knowledge_base(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_KnowledgeBases_CreateKnowledgeBase_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_create_knowledge_base_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_create_knowledge_base_sync.py new file mode 100644 index 000000000000..eecdac30b579 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_create_knowledge_base_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_KnowledgeBases_CreateKnowledgeBase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_create_knowledge_base(): + # Create a client + client = dialogflow_v2beta1.KnowledgeBasesClient() + + # Initialize request argument(s) + knowledge_base = dialogflow_v2beta1.KnowledgeBase() + knowledge_base.display_name = "display_name_value" + + request = dialogflow_v2beta1.CreateKnowledgeBaseRequest( + parent="parent_value", + knowledge_base=knowledge_base, + ) + + # Make the request + response = client.create_knowledge_base(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_KnowledgeBases_CreateKnowledgeBase_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_delete_knowledge_base_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_delete_knowledge_base_async.py new file mode 100644 index 000000000000..f8bd2523a8e2 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_delete_knowledge_base_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_KnowledgeBases_DeleteKnowledgeBase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_delete_knowledge_base(): + # Create a client + client = dialogflow_v2beta1.KnowledgeBasesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteKnowledgeBaseRequest( + name="name_value", + ) + + # Make the request + await client.delete_knowledge_base(request=request) + + +# [END dialogflow_v2beta1_generated_KnowledgeBases_DeleteKnowledgeBase_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_delete_knowledge_base_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_delete_knowledge_base_sync.py new file mode 100644 index 000000000000..78f968f569e6 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_delete_knowledge_base_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_KnowledgeBases_DeleteKnowledgeBase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_delete_knowledge_base(): + # Create a client + client = dialogflow_v2beta1.KnowledgeBasesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteKnowledgeBaseRequest( + name="name_value", + ) + + # Make the request + client.delete_knowledge_base(request=request) + + +# [END dialogflow_v2beta1_generated_KnowledgeBases_DeleteKnowledgeBase_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_get_knowledge_base_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_get_knowledge_base_async.py new file mode 100644 index 000000000000..3d783f05de85 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_get_knowledge_base_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_KnowledgeBases_GetKnowledgeBase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_knowledge_base(): + # Create a client + client = dialogflow_v2beta1.KnowledgeBasesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetKnowledgeBaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_knowledge_base(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_KnowledgeBases_GetKnowledgeBase_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_get_knowledge_base_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_get_knowledge_base_sync.py new file mode 100644 index 000000000000..9cd9c4ff4dd5 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_get_knowledge_base_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_KnowledgeBases_GetKnowledgeBase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_knowledge_base(): + # Create a client + client = dialogflow_v2beta1.KnowledgeBasesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetKnowledgeBaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_knowledge_base(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_KnowledgeBases_GetKnowledgeBase_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_list_knowledge_bases_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_list_knowledge_bases_async.py new file mode 100644 index 000000000000..5d5ba4f216b2 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_list_knowledge_bases_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListKnowledgeBases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_KnowledgeBases_ListKnowledgeBases_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_knowledge_bases(): + # Create a client + client = dialogflow_v2beta1.KnowledgeBasesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListKnowledgeBasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_knowledge_bases(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_KnowledgeBases_ListKnowledgeBases_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_list_knowledge_bases_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_list_knowledge_bases_sync.py new file mode 100644 index 000000000000..d8fb8622a56c --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_list_knowledge_bases_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListKnowledgeBases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_KnowledgeBases_ListKnowledgeBases_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_knowledge_bases(): + # Create a client + client = dialogflow_v2beta1.KnowledgeBasesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListKnowledgeBasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_knowledge_bases(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_KnowledgeBases_ListKnowledgeBases_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_update_knowledge_base_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_update_knowledge_base_async.py new file mode 100644 index 000000000000..9f3a42564429 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_update_knowledge_base_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_KnowledgeBases_UpdateKnowledgeBase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_update_knowledge_base(): + # Create a client + client = dialogflow_v2beta1.KnowledgeBasesAsyncClient() + + # Initialize request argument(s) + knowledge_base = dialogflow_v2beta1.KnowledgeBase() + knowledge_base.display_name = "display_name_value" + + request = dialogflow_v2beta1.UpdateKnowledgeBaseRequest( + knowledge_base=knowledge_base, + ) + + # Make the request + response = await client.update_knowledge_base(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_KnowledgeBases_UpdateKnowledgeBase_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_update_knowledge_base_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_update_knowledge_base_sync.py new file mode 100644 index 000000000000..b3dd207961b5 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_knowledge_bases_update_knowledge_base_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateKnowledgeBase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_KnowledgeBases_UpdateKnowledgeBase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_update_knowledge_base(): + # Create a client + client = dialogflow_v2beta1.KnowledgeBasesClient() + + # Initialize request argument(s) + knowledge_base = dialogflow_v2beta1.KnowledgeBase() + knowledge_base.display_name = "display_name_value" + + request = dialogflow_v2beta1.UpdateKnowledgeBaseRequest( + knowledge_base=knowledge_base, + ) + + # Make the request + response = client.update_knowledge_base(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_KnowledgeBases_UpdateKnowledgeBase_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_analyze_content_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_analyze_content_async.py new file mode 100644 index 000000000000..567e6b40a5fb --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_analyze_content_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_AnalyzeContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_analyze_content(): + # Create a client + client = dialogflow_v2beta1.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.AnalyzeContentRequest( + participant="participant_value", + ) + + # Make the request + response = await client.analyze_content(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_AnalyzeContent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_analyze_content_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_analyze_content_sync.py new file mode 100644 index 000000000000..365bbf962a3b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_analyze_content_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_AnalyzeContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_analyze_content(): + # Create a client + client = dialogflow_v2beta1.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.AnalyzeContentRequest( + participant="participant_value", + ) + + # Make the request + response = client.analyze_content(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_AnalyzeContent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_compile_suggestion_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_compile_suggestion_async.py new file mode 100644 index 000000000000..23d1025b511d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_compile_suggestion_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CompileSuggestion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_CompileSuggestion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_compile_suggestion(): + # Create a client + client = dialogflow_v2beta1.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.CompileSuggestionRequest( + ) + + # Make the request + response = await client.compile_suggestion(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_CompileSuggestion_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_compile_suggestion_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_compile_suggestion_sync.py new file mode 100644 index 000000000000..52d64db6c49b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_compile_suggestion_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CompileSuggestion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_CompileSuggestion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_compile_suggestion(): + # Create a client + client = dialogflow_v2beta1.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.CompileSuggestionRequest( + ) + + # Make the request + response = client.compile_suggestion(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_CompileSuggestion_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_create_participant_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_create_participant_async.py new file mode 100644 index 000000000000..ed39ed1ab191 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_create_participant_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateParticipant +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_CreateParticipant_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_create_participant(): + # Create a client + client = dialogflow_v2beta1.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.CreateParticipantRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_participant(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_CreateParticipant_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_create_participant_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_create_participant_sync.py new file mode 100644 index 000000000000..c5d5299c8a18 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_create_participant_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateParticipant +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_CreateParticipant_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_create_participant(): + # Create a client + client = dialogflow_v2beta1.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.CreateParticipantRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_participant(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_CreateParticipant_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_get_participant_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_get_participant_async.py new file mode 100644 index 000000000000..4cb96c0330dd --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_get_participant_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetParticipant +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_GetParticipant_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_participant(): + # Create a client + client = dialogflow_v2beta1.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetParticipantRequest( + name="name_value", + ) + + # Make the request + response = await client.get_participant(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_GetParticipant_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_get_participant_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_get_participant_sync.py new file mode 100644 index 000000000000..7bea4b45e0a2 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_get_participant_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetParticipant +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_GetParticipant_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_participant(): + # Create a client + client = dialogflow_v2beta1.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetParticipantRequest( + name="name_value", + ) + + # Make the request + response = client.get_participant(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_GetParticipant_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_list_participants_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_list_participants_async.py new file mode 100644 index 000000000000..b22aa5ea1473 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_list_participants_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListParticipants +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_ListParticipants_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_participants(): + # Create a client + client = dialogflow_v2beta1.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListParticipantsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_participants(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Participants_ListParticipants_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_list_participants_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_list_participants_sync.py new file mode 100644 index 000000000000..a53b7ba34591 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_list_participants_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListParticipants +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_ListParticipants_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_participants(): + # Create a client + client = dialogflow_v2beta1.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListParticipantsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_participants(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Participants_ListParticipants_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_list_suggestions_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_list_suggestions_async.py new file mode 100644 index 000000000000..f085ce552080 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_list_suggestions_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSuggestions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_ListSuggestions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_suggestions(): + # Create a client + client = dialogflow_v2beta1.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListSuggestionsRequest( + ) + + # Make the request + page_result = client.list_suggestions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Participants_ListSuggestions_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_list_suggestions_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_list_suggestions_sync.py new file mode 100644 index 000000000000..9e224f9c04d7 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_list_suggestions_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSuggestions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_ListSuggestions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_suggestions(): + # Create a client + client = dialogflow_v2beta1.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListSuggestionsRequest( + ) + + # Make the request + page_result = client.list_suggestions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Participants_ListSuggestions_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_streaming_analyze_content_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_streaming_analyze_content_async.py new file mode 100644 index 000000000000..0e2b6ae6e89e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_streaming_analyze_content_async.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingAnalyzeContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_StreamingAnalyzeContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_streaming_analyze_content(): + # Create a client + client = dialogflow_v2beta1.ParticipantsAsyncClient() + + # Initialize request argument(s) + audio_config = dialogflow_v2beta1.InputAudioConfig() + audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + audio_config.sample_rate_hertz = 1817 + audio_config.language_code = "language_code_value" + + request = dialogflow_v2beta1.StreamingAnalyzeContentRequest( + audio_config=audio_config, + input_audio=b'input_audio_blob', + participant="participant_value", + ) + + # This method expects an iterator which contains + # 'dialogflow_v2beta1.StreamingAnalyzeContentRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_analyze_content(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + +# [END dialogflow_v2beta1_generated_Participants_StreamingAnalyzeContent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_streaming_analyze_content_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_streaming_analyze_content_sync.py new file mode 100644 index 000000000000..f81b75bea46a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_streaming_analyze_content_sync.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingAnalyzeContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_StreamingAnalyzeContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_streaming_analyze_content(): + # Create a client + client = dialogflow_v2beta1.ParticipantsClient() + + # Initialize request argument(s) + audio_config = dialogflow_v2beta1.InputAudioConfig() + audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + audio_config.sample_rate_hertz = 1817 + audio_config.language_code = "language_code_value" + + request = dialogflow_v2beta1.StreamingAnalyzeContentRequest( + audio_config=audio_config, + input_audio=b'input_audio_blob', + participant="participant_value", + ) + + # This method expects an iterator which contains + # 'dialogflow_v2beta1.StreamingAnalyzeContentRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_analyze_content(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + +# [END dialogflow_v2beta1_generated_Participants_StreamingAnalyzeContent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_articles_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_articles_async.py new file mode 100644 index 000000000000..6a371389fa4d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_articles_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestArticles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_SuggestArticles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_suggest_articles(): + # Create a client + client = dialogflow_v2beta1.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.SuggestArticlesRequest( + parent="parent_value", + ) + + # Make the request + response = await client.suggest_articles(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_SuggestArticles_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_articles_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_articles_sync.py new file mode 100644 index 000000000000..6684d84082f0 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_articles_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestArticles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_SuggestArticles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_suggest_articles(): + # Create a client + client = dialogflow_v2beta1.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.SuggestArticlesRequest( + parent="parent_value", + ) + + # Make the request + response = client.suggest_articles(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_SuggestArticles_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_faq_answers_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_faq_answers_async.py new file mode 100644 index 000000000000..e69f88237922 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_faq_answers_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestFaqAnswers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_SuggestFaqAnswers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_suggest_faq_answers(): + # Create a client + client = dialogflow_v2beta1.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.SuggestFaqAnswersRequest( + parent="parent_value", + ) + + # Make the request + response = await client.suggest_faq_answers(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_SuggestFaqAnswers_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_faq_answers_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_faq_answers_sync.py new file mode 100644 index 000000000000..3b2d4d20f05e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_faq_answers_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestFaqAnswers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_SuggestFaqAnswers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_suggest_faq_answers(): + # Create a client + client = dialogflow_v2beta1.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.SuggestFaqAnswersRequest( + parent="parent_value", + ) + + # Make the request + response = client.suggest_faq_answers(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_SuggestFaqAnswers_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_smart_replies_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_smart_replies_async.py new file mode 100644 index 000000000000..9e9f149284f5 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_smart_replies_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestSmartReplies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_SuggestSmartReplies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_suggest_smart_replies(): + # Create a client + client = dialogflow_v2beta1.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.SuggestSmartRepliesRequest( + parent="parent_value", + ) + + # Make the request + response = await client.suggest_smart_replies(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_SuggestSmartReplies_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_smart_replies_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_smart_replies_sync.py new file mode 100644 index 000000000000..2041ec133674 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_suggest_smart_replies_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SuggestSmartReplies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_SuggestSmartReplies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_suggest_smart_replies(): + # Create a client + client = dialogflow_v2beta1.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.SuggestSmartRepliesRequest( + parent="parent_value", + ) + + # Make the request + response = client.suggest_smart_replies(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_SuggestSmartReplies_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_update_participant_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_update_participant_async.py new file mode 100644 index 000000000000..e8738dfe0afb --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_update_participant_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateParticipant +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_UpdateParticipant_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_update_participant(): + # Create a client + client = dialogflow_v2beta1.ParticipantsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdateParticipantRequest( + ) + + # Make the request + response = await client.update_participant(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_UpdateParticipant_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_update_participant_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_update_participant_sync.py new file mode 100644 index 000000000000..1e224c27e59b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_participants_update_participant_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateParticipant +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Participants_UpdateParticipant_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_update_participant(): + # Create a client + client = dialogflow_v2beta1.ParticipantsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdateParticipantRequest( + ) + + # Make the request + response = client.update_participant(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Participants_UpdateParticipant_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_create_session_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_create_session_entity_type_async.py new file mode 100644 index 000000000000..6a1490f5f7c4 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_create_session_entity_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_SessionEntityTypes_CreateSessionEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_create_session_entity_type(): + # Create a client + client = dialogflow_v2beta1.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.CreateSessionEntityTypeRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_session_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_SessionEntityTypes_CreateSessionEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_create_session_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_create_session_entity_type_sync.py new file mode 100644 index 000000000000..f52faa90035d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_create_session_entity_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_SessionEntityTypes_CreateSessionEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_create_session_entity_type(): + # Create a client + client = dialogflow_v2beta1.SessionEntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.CreateSessionEntityTypeRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_session_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_SessionEntityTypes_CreateSessionEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_delete_session_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_delete_session_entity_type_async.py new file mode 100644 index 000000000000..18a5829c232d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_delete_session_entity_type_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_SessionEntityTypes_DeleteSessionEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_delete_session_entity_type(): + # Create a client + client = dialogflow_v2beta1.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteSessionEntityTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_session_entity_type(request=request) + + +# [END dialogflow_v2beta1_generated_SessionEntityTypes_DeleteSessionEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_delete_session_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_delete_session_entity_type_sync.py new file mode 100644 index 000000000000..1457b0a67b6a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_delete_session_entity_type_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_SessionEntityTypes_DeleteSessionEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_delete_session_entity_type(): + # Create a client + client = dialogflow_v2beta1.SessionEntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteSessionEntityTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_session_entity_type(request=request) + + +# [END dialogflow_v2beta1_generated_SessionEntityTypes_DeleteSessionEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_get_session_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_get_session_entity_type_async.py new file mode 100644 index 000000000000..d34524c99a6b --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_get_session_entity_type_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_SessionEntityTypes_GetSessionEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_session_entity_type(): + # Create a client + client = dialogflow_v2beta1.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetSessionEntityTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_session_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_SessionEntityTypes_GetSessionEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_get_session_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_get_session_entity_type_sync.py new file mode 100644 index 000000000000..2919305c0e4e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_get_session_entity_type_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_SessionEntityTypes_GetSessionEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_session_entity_type(): + # Create a client + client = dialogflow_v2beta1.SessionEntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetSessionEntityTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_session_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_SessionEntityTypes_GetSessionEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_list_session_entity_types_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_list_session_entity_types_async.py new file mode 100644 index 000000000000..88edc801860d --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_list_session_entity_types_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessionEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_SessionEntityTypes_ListSessionEntityTypes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_session_entity_types(): + # Create a client + client = dialogflow_v2beta1.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListSessionEntityTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_session_entity_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_SessionEntityTypes_ListSessionEntityTypes_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_list_session_entity_types_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_list_session_entity_types_sync.py new file mode 100644 index 000000000000..f1a5988cd893 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_list_session_entity_types_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessionEntityTypes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_SessionEntityTypes_ListSessionEntityTypes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_session_entity_types(): + # Create a client + client = dialogflow_v2beta1.SessionEntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListSessionEntityTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_session_entity_types(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_SessionEntityTypes_ListSessionEntityTypes_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_update_session_entity_type_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_update_session_entity_type_async.py new file mode 100644 index 000000000000..4e84f622fc84 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_update_session_entity_type_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_SessionEntityTypes_UpdateSessionEntityType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_update_session_entity_type(): + # Create a client + client = dialogflow_v2beta1.SessionEntityTypesAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdateSessionEntityTypeRequest( + ) + + # Make the request + response = await client.update_session_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_SessionEntityTypes_UpdateSessionEntityType_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_update_session_entity_type_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_update_session_entity_type_sync.py new file mode 100644 index 000000000000..ff4b2835d7b0 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_session_entity_types_update_session_entity_type_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSessionEntityType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_SessionEntityTypes_UpdateSessionEntityType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_update_session_entity_type(): + # Create a client + client = dialogflow_v2beta1.SessionEntityTypesClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdateSessionEntityTypeRequest( + ) + + # Make the request + response = client.update_session_entity_type(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_SessionEntityTypes_UpdateSessionEntityType_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_sessions_detect_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_sessions_detect_intent_async.py new file mode 100644 index 000000000000..23c1f0e081cf --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_sessions_detect_intent_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DetectIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Sessions_DetectIntent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_detect_intent(): + # Create a client + client = dialogflow_v2beta1.SessionsAsyncClient() + + # Initialize request argument(s) + query_input = dialogflow_v2beta1.QueryInput() + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.sample_rate_hertz = 1817 + query_input.audio_config.language_code = "language_code_value" + + request = dialogflow_v2beta1.DetectIntentRequest( + session="session_value", + query_input=query_input, + ) + + # Make the request + response = await client.detect_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Sessions_DetectIntent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_sessions_detect_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_sessions_detect_intent_sync.py new file mode 100644 index 000000000000..d55ccf0ba559 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_sessions_detect_intent_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DetectIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Sessions_DetectIntent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_detect_intent(): + # Create a client + client = dialogflow_v2beta1.SessionsClient() + + # Initialize request argument(s) + query_input = dialogflow_v2beta1.QueryInput() + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.sample_rate_hertz = 1817 + query_input.audio_config.language_code = "language_code_value" + + request = dialogflow_v2beta1.DetectIntentRequest( + session="session_value", + query_input=query_input, + ) + + # Make the request + response = client.detect_intent(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Sessions_DetectIntent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_sessions_streaming_detect_intent_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_sessions_streaming_detect_intent_async.py new file mode 100644 index 000000000000..ac8d90b65db8 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_sessions_streaming_detect_intent_async.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingDetectIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Sessions_StreamingDetectIntent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_streaming_detect_intent(): + # Create a client + client = dialogflow_v2beta1.SessionsAsyncClient() + + # Initialize request argument(s) + query_input = dialogflow_v2beta1.QueryInput() + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.sample_rate_hertz = 1817 + query_input.audio_config.language_code = "language_code_value" + + request = dialogflow_v2beta1.StreamingDetectIntentRequest( + session="session_value", + query_input=query_input, + ) + + # This method expects an iterator which contains + # 'dialogflow_v2beta1.StreamingDetectIntentRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_detect_intent(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + +# [END dialogflow_v2beta1_generated_Sessions_StreamingDetectIntent_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_sessions_streaming_detect_intent_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_sessions_streaming_detect_intent_sync.py new file mode 100644 index 000000000000..2a70667bba1e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_sessions_streaming_detect_intent_sync.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingDetectIntent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Sessions_StreamingDetectIntent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_streaming_detect_intent(): + # Create a client + client = dialogflow_v2beta1.SessionsClient() + + # Initialize request argument(s) + query_input = dialogflow_v2beta1.QueryInput() + query_input.audio_config.audio_encoding = "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE" + query_input.audio_config.sample_rate_hertz = 1817 + query_input.audio_config.language_code = "language_code_value" + + request = dialogflow_v2beta1.StreamingDetectIntentRequest( + session="session_value", + query_input=query_input, + ) + + # This method expects an iterator which contains + # 'dialogflow_v2beta1.StreamingDetectIntentRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_detect_intent(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + +# [END dialogflow_v2beta1_generated_Sessions_StreamingDetectIntent_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_create_version_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_create_version_async.py new file mode 100644 index 000000000000..54949e4c1e63 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_create_version_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Versions_CreateVersion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_create_version(): + # Create a client + client = dialogflow_v2beta1.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.CreateVersionRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_version(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Versions_CreateVersion_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_create_version_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_create_version_sync.py new file mode 100644 index 000000000000..7507d7ba3db2 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_create_version_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Versions_CreateVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_create_version(): + # Create a client + client = dialogflow_v2beta1.VersionsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.CreateVersionRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_version(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Versions_CreateVersion_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_delete_version_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_delete_version_async.py new file mode 100644 index 000000000000..7d9701a63f8e --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_delete_version_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Versions_DeleteVersion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_delete_version(): + # Create a client + client = dialogflow_v2beta1.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteVersionRequest( + name="name_value", + ) + + # Make the request + await client.delete_version(request=request) + + +# [END dialogflow_v2beta1_generated_Versions_DeleteVersion_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_delete_version_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_delete_version_sync.py new file mode 100644 index 000000000000..9a4e291f45ca --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_delete_version_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Versions_DeleteVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_delete_version(): + # Create a client + client = dialogflow_v2beta1.VersionsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeleteVersionRequest( + name="name_value", + ) + + # Make the request + client.delete_version(request=request) + + +# [END dialogflow_v2beta1_generated_Versions_DeleteVersion_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_get_version_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_get_version_async.py new file mode 100644 index 000000000000..c9b0b611a12a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_get_version_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Versions_GetVersion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_get_version(): + # Create a client + client = dialogflow_v2beta1.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetVersionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_version(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Versions_GetVersion_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_get_version_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_get_version_sync.py new file mode 100644 index 000000000000..816fa69e1861 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_get_version_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Versions_GetVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_get_version(): + # Create a client + client = dialogflow_v2beta1.VersionsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.GetVersionRequest( + name="name_value", + ) + + # Make the request + response = client.get_version(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Versions_GetVersion_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_list_versions_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_list_versions_async.py new file mode 100644 index 000000000000..53869337c534 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_list_versions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Versions_ListVersions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_versions(): + # Create a client + client = dialogflow_v2beta1.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_versions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Versions_ListVersions_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_list_versions_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_list_versions_sync.py new file mode 100644 index 000000000000..369998d999f8 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_list_versions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Versions_ListVersions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_versions(): + # Create a client + client = dialogflow_v2beta1.VersionsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListVersionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_Versions_ListVersions_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_update_version_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_update_version_async.py new file mode 100644 index 000000000000..da811ef057ee --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_update_version_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Versions_UpdateVersion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_update_version(): + # Create a client + client = dialogflow_v2beta1.VersionsAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdateVersionRequest( + ) + + # Make the request + response = await client.update_version(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Versions_UpdateVersion_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_update_version_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_update_version_sync.py new file mode 100644 index 000000000000..592a2a3b4473 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_versions_update_version_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateVersion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_Versions_UpdateVersion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_update_version(): + # Create a client + client = dialogflow_v2beta1.VersionsClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdateVersionRequest( + ) + + # Make the request + response = client.update_version(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_Versions_UpdateVersion_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json new file mode 100644 index 000000000000..15f4b2c8fa69 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json @@ -0,0 +1,17108 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.dialogflow.v2", + "version": "v2" + } + ], + "language": "PYTHON", + "name": "google-cloud-dialogflow", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient.delete_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.DeleteAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "DeleteAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_agent" + }, + "description": "Sample for DeleteAgent", + "file": "dialogflow_v2_generated_agents_delete_agent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_DeleteAgent_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_delete_agent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsClient.delete_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.DeleteAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "DeleteAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_agent" + }, + "description": "Sample for DeleteAgent", + "file": "dialogflow_v2_generated_agents_delete_agent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_DeleteAgent_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_delete_agent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient.export_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.ExportAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "ExportAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ExportAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_agent" + }, + "description": "Sample for ExportAgent", + "file": "dialogflow_v2_generated_agents_export_agent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_ExportAgent_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_export_agent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsClient.export_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.ExportAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "ExportAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ExportAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_agent" + }, + "description": "Sample for ExportAgent", + "file": "dialogflow_v2_generated_agents_export_agent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_ExportAgent_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_export_agent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient.get_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.GetAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "GetAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Agent", + "shortName": "get_agent" + }, + "description": "Sample for GetAgent", + "file": "dialogflow_v2_generated_agents_get_agent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_GetAgent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_get_agent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsClient.get_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.GetAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "GetAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Agent", + "shortName": "get_agent" + }, + "description": "Sample for GetAgent", + "file": "dialogflow_v2_generated_agents_get_agent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_GetAgent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_get_agent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient.get_validation_result", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.GetValidationResult", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "GetValidationResult" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetValidationResultRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.ValidationResult", + "shortName": "get_validation_result" + }, + "description": "Sample for GetValidationResult", + "file": "dialogflow_v2_generated_agents_get_validation_result_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_GetValidationResult_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_get_validation_result_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsClient.get_validation_result", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.GetValidationResult", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "GetValidationResult" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetValidationResultRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.ValidationResult", + "shortName": "get_validation_result" + }, + "description": "Sample for GetValidationResult", + "file": "dialogflow_v2_generated_agents_get_validation_result_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_GetValidationResult_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_get_validation_result_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient.import_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.ImportAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "ImportAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ImportAgentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_agent" + }, + "description": "Sample for ImportAgent", + "file": "dialogflow_v2_generated_agents_import_agent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_ImportAgent_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_import_agent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsClient.import_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.ImportAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "ImportAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ImportAgentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_agent" + }, + "description": "Sample for ImportAgent", + "file": "dialogflow_v2_generated_agents_import_agent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_ImportAgent_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_import_agent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient.restore_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.RestoreAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "RestoreAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.RestoreAgentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_agent" + }, + "description": "Sample for RestoreAgent", + "file": "dialogflow_v2_generated_agents_restore_agent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_RestoreAgent_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_restore_agent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsClient.restore_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.RestoreAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "RestoreAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.RestoreAgentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_agent" + }, + "description": "Sample for RestoreAgent", + "file": "dialogflow_v2_generated_agents_restore_agent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_RestoreAgent_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_restore_agent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient.search_agents", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.SearchAgents", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "SearchAgents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SearchAgentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.agents.pagers.SearchAgentsAsyncPager", + "shortName": "search_agents" + }, + "description": "Sample for SearchAgents", + "file": "dialogflow_v2_generated_agents_search_agents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_SearchAgents_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_search_agents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsClient.search_agents", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.SearchAgents", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "SearchAgents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SearchAgentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.agents.pagers.SearchAgentsPager", + "shortName": "search_agents" + }, + "description": "Sample for SearchAgents", + "file": "dialogflow_v2_generated_agents_search_agents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_SearchAgents_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_search_agents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient.set_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.SetAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "SetAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SetAgentRequest" + }, + { + "name": "agent", + "type": "google.cloud.dialogflow_v2.types.Agent" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Agent", + "shortName": "set_agent" + }, + "description": "Sample for SetAgent", + "file": "dialogflow_v2_generated_agents_set_agent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_SetAgent_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_set_agent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsClient.set_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.SetAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "SetAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SetAgentRequest" + }, + { + "name": "agent", + "type": "google.cloud.dialogflow_v2.types.Agent" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Agent", + "shortName": "set_agent" + }, + "description": "Sample for SetAgent", + "file": "dialogflow_v2_generated_agents_set_agent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_SetAgent_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_set_agent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsAsyncClient.train_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.TrainAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "TrainAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.TrainAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "train_agent" + }, + "description": "Sample for TrainAgent", + "file": "dialogflow_v2_generated_agents_train_agent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_TrainAgent_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_train_agent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.AgentsClient.train_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Agents.TrainAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Agents", + "shortName": "Agents" + }, + "shortName": "TrainAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.TrainAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "train_agent" + }, + "description": "Sample for TrainAgent", + "file": "dialogflow_v2_generated_agents_train_agent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Agents_TrainAgent_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_agents_train_agent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.AnswerRecordsAsyncClient", + "shortName": "AnswerRecordsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.AnswerRecordsAsyncClient.list_answer_records", + "method": { + "fullName": "google.cloud.dialogflow.v2.AnswerRecords.ListAnswerRecords", + "service": { + "fullName": "google.cloud.dialogflow.v2.AnswerRecords", + "shortName": "AnswerRecords" + }, + "shortName": "ListAnswerRecords" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListAnswerRecordsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.answer_records.pagers.ListAnswerRecordsAsyncPager", + "shortName": "list_answer_records" + }, + "description": "Sample for ListAnswerRecords", + "file": "dialogflow_v2_generated_answer_records_list_answer_records_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_AnswerRecords_ListAnswerRecords_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_answer_records_list_answer_records_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.AnswerRecordsClient", + "shortName": "AnswerRecordsClient" + }, + "fullName": "google.cloud.dialogflow_v2.AnswerRecordsClient.list_answer_records", + "method": { + "fullName": "google.cloud.dialogflow.v2.AnswerRecords.ListAnswerRecords", + "service": { + "fullName": "google.cloud.dialogflow.v2.AnswerRecords", + "shortName": "AnswerRecords" + }, + "shortName": "ListAnswerRecords" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListAnswerRecordsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.answer_records.pagers.ListAnswerRecordsPager", + "shortName": "list_answer_records" + }, + "description": "Sample for ListAnswerRecords", + "file": "dialogflow_v2_generated_answer_records_list_answer_records_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_AnswerRecords_ListAnswerRecords_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_answer_records_list_answer_records_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.AnswerRecordsAsyncClient", + "shortName": "AnswerRecordsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.AnswerRecordsAsyncClient.update_answer_record", + "method": { + "fullName": "google.cloud.dialogflow.v2.AnswerRecords.UpdateAnswerRecord", + "service": { + "fullName": "google.cloud.dialogflow.v2.AnswerRecords", + "shortName": "AnswerRecords" + }, + "shortName": "UpdateAnswerRecord" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateAnswerRecordRequest" + }, + { + "name": "answer_record", + "type": "google.cloud.dialogflow_v2.types.AnswerRecord" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.AnswerRecord", + "shortName": "update_answer_record" + }, + "description": "Sample for UpdateAnswerRecord", + "file": "dialogflow_v2_generated_answer_records_update_answer_record_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_AnswerRecords_UpdateAnswerRecord_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_answer_records_update_answer_record_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.AnswerRecordsClient", + "shortName": "AnswerRecordsClient" + }, + "fullName": "google.cloud.dialogflow_v2.AnswerRecordsClient.update_answer_record", + "method": { + "fullName": "google.cloud.dialogflow.v2.AnswerRecords.UpdateAnswerRecord", + "service": { + "fullName": "google.cloud.dialogflow.v2.AnswerRecords", + "shortName": "AnswerRecords" + }, + "shortName": "UpdateAnswerRecord" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateAnswerRecordRequest" + }, + { + "name": "answer_record", + "type": "google.cloud.dialogflow_v2.types.AnswerRecord" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.AnswerRecord", + "shortName": "update_answer_record" + }, + "description": "Sample for UpdateAnswerRecord", + "file": "dialogflow_v2_generated_answer_records_update_answer_record_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_AnswerRecords_UpdateAnswerRecord_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_answer_records_update_answer_record_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ContextsAsyncClient", + "shortName": "ContextsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ContextsAsyncClient.create_context", + "method": { + "fullName": "google.cloud.dialogflow.v2.Contexts.CreateContext", + "service": { + "fullName": "google.cloud.dialogflow.v2.Contexts", + "shortName": "Contexts" + }, + "shortName": "CreateContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateContextRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "context", + "type": "google.cloud.dialogflow_v2.types.Context" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Context", + "shortName": "create_context" + }, + "description": "Sample for CreateContext", + "file": "dialogflow_v2_generated_contexts_create_context_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Contexts_CreateContext_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_contexts_create_context_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ContextsClient", + "shortName": "ContextsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ContextsClient.create_context", + "method": { + "fullName": "google.cloud.dialogflow.v2.Contexts.CreateContext", + "service": { + "fullName": "google.cloud.dialogflow.v2.Contexts", + "shortName": "Contexts" + }, + "shortName": "CreateContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateContextRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "context", + "type": "google.cloud.dialogflow_v2.types.Context" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Context", + "shortName": "create_context" + }, + "description": "Sample for CreateContext", + "file": "dialogflow_v2_generated_contexts_create_context_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Contexts_CreateContext_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_contexts_create_context_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ContextsAsyncClient", + "shortName": "ContextsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ContextsAsyncClient.delete_all_contexts", + "method": { + "fullName": "google.cloud.dialogflow.v2.Contexts.DeleteAllContexts", + "service": { + "fullName": "google.cloud.dialogflow.v2.Contexts", + "shortName": "Contexts" + }, + "shortName": "DeleteAllContexts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteAllContextsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_all_contexts" + }, + "description": "Sample for DeleteAllContexts", + "file": "dialogflow_v2_generated_contexts_delete_all_contexts_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Contexts_DeleteAllContexts_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_contexts_delete_all_contexts_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ContextsClient", + "shortName": "ContextsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ContextsClient.delete_all_contexts", + "method": { + "fullName": "google.cloud.dialogflow.v2.Contexts.DeleteAllContexts", + "service": { + "fullName": "google.cloud.dialogflow.v2.Contexts", + "shortName": "Contexts" + }, + "shortName": "DeleteAllContexts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteAllContextsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_all_contexts" + }, + "description": "Sample for DeleteAllContexts", + "file": "dialogflow_v2_generated_contexts_delete_all_contexts_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Contexts_DeleteAllContexts_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_contexts_delete_all_contexts_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ContextsAsyncClient", + "shortName": "ContextsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ContextsAsyncClient.delete_context", + "method": { + "fullName": "google.cloud.dialogflow.v2.Contexts.DeleteContext", + "service": { + "fullName": "google.cloud.dialogflow.v2.Contexts", + "shortName": "Contexts" + }, + "shortName": "DeleteContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteContextRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_context" + }, + "description": "Sample for DeleteContext", + "file": "dialogflow_v2_generated_contexts_delete_context_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Contexts_DeleteContext_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_contexts_delete_context_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ContextsClient", + "shortName": "ContextsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ContextsClient.delete_context", + "method": { + "fullName": "google.cloud.dialogflow.v2.Contexts.DeleteContext", + "service": { + "fullName": "google.cloud.dialogflow.v2.Contexts", + "shortName": "Contexts" + }, + "shortName": "DeleteContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteContextRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_context" + }, + "description": "Sample for DeleteContext", + "file": "dialogflow_v2_generated_contexts_delete_context_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Contexts_DeleteContext_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_contexts_delete_context_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ContextsAsyncClient", + "shortName": "ContextsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ContextsAsyncClient.get_context", + "method": { + "fullName": "google.cloud.dialogflow.v2.Contexts.GetContext", + "service": { + "fullName": "google.cloud.dialogflow.v2.Contexts", + "shortName": "Contexts" + }, + "shortName": "GetContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetContextRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Context", + "shortName": "get_context" + }, + "description": "Sample for GetContext", + "file": "dialogflow_v2_generated_contexts_get_context_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Contexts_GetContext_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_contexts_get_context_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ContextsClient", + "shortName": "ContextsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ContextsClient.get_context", + "method": { + "fullName": "google.cloud.dialogflow.v2.Contexts.GetContext", + "service": { + "fullName": "google.cloud.dialogflow.v2.Contexts", + "shortName": "Contexts" + }, + "shortName": "GetContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetContextRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Context", + "shortName": "get_context" + }, + "description": "Sample for GetContext", + "file": "dialogflow_v2_generated_contexts_get_context_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Contexts_GetContext_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_contexts_get_context_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ContextsAsyncClient", + "shortName": "ContextsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ContextsAsyncClient.list_contexts", + "method": { + "fullName": "google.cloud.dialogflow.v2.Contexts.ListContexts", + "service": { + "fullName": "google.cloud.dialogflow.v2.Contexts", + "shortName": "Contexts" + }, + "shortName": "ListContexts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListContextsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.contexts.pagers.ListContextsAsyncPager", + "shortName": "list_contexts" + }, + "description": "Sample for ListContexts", + "file": "dialogflow_v2_generated_contexts_list_contexts_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Contexts_ListContexts_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_contexts_list_contexts_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ContextsClient", + "shortName": "ContextsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ContextsClient.list_contexts", + "method": { + "fullName": "google.cloud.dialogflow.v2.Contexts.ListContexts", + "service": { + "fullName": "google.cloud.dialogflow.v2.Contexts", + "shortName": "Contexts" + }, + "shortName": "ListContexts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListContextsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.contexts.pagers.ListContextsPager", + "shortName": "list_contexts" + }, + "description": "Sample for ListContexts", + "file": "dialogflow_v2_generated_contexts_list_contexts_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Contexts_ListContexts_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_contexts_list_contexts_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ContextsAsyncClient", + "shortName": "ContextsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ContextsAsyncClient.update_context", + "method": { + "fullName": "google.cloud.dialogflow.v2.Contexts.UpdateContext", + "service": { + "fullName": "google.cloud.dialogflow.v2.Contexts", + "shortName": "Contexts" + }, + "shortName": "UpdateContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateContextRequest" + }, + { + "name": "context", + "type": "google.cloud.dialogflow_v2.types.Context" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Context", + "shortName": "update_context" + }, + "description": "Sample for UpdateContext", + "file": "dialogflow_v2_generated_contexts_update_context_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Contexts_UpdateContext_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_contexts_update_context_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ContextsClient", + "shortName": "ContextsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ContextsClient.update_context", + "method": { + "fullName": "google.cloud.dialogflow.v2.Contexts.UpdateContext", + "service": { + "fullName": "google.cloud.dialogflow.v2.Contexts", + "shortName": "Contexts" + }, + "shortName": "UpdateContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateContextRequest" + }, + { + "name": "context", + "type": "google.cloud.dialogflow_v2.types.Context" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Context", + "shortName": "update_context" + }, + "description": "Sample for UpdateContext", + "file": "dialogflow_v2_generated_contexts_update_context_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Contexts_UpdateContext_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_contexts_update_context_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsAsyncClient", + "shortName": "ConversationDatasetsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsAsyncClient.create_conversation_dataset", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets.CreateConversationDataset", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets", + "shortName": "ConversationDatasets" + }, + "shortName": "CreateConversationDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateConversationDatasetRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversation_dataset", + "type": "google.cloud.dialogflow_v2.types.ConversationDataset" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_conversation_dataset" + }, + "description": "Sample for CreateConversationDataset", + "file": "dialogflow_v2_generated_conversation_datasets_create_conversation_dataset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationDatasets_CreateConversationDataset_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_datasets_create_conversation_dataset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsClient", + "shortName": "ConversationDatasetsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsClient.create_conversation_dataset", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets.CreateConversationDataset", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets", + "shortName": "ConversationDatasets" + }, + "shortName": "CreateConversationDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateConversationDatasetRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversation_dataset", + "type": "google.cloud.dialogflow_v2.types.ConversationDataset" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_conversation_dataset" + }, + "description": "Sample for CreateConversationDataset", + "file": "dialogflow_v2_generated_conversation_datasets_create_conversation_dataset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationDatasets_CreateConversationDataset_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_datasets_create_conversation_dataset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsAsyncClient", + "shortName": "ConversationDatasetsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsAsyncClient.delete_conversation_dataset", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets.DeleteConversationDataset", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets", + "shortName": "ConversationDatasets" + }, + "shortName": "DeleteConversationDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteConversationDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_conversation_dataset" + }, + "description": "Sample for DeleteConversationDataset", + "file": "dialogflow_v2_generated_conversation_datasets_delete_conversation_dataset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationDatasets_DeleteConversationDataset_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_datasets_delete_conversation_dataset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsClient", + "shortName": "ConversationDatasetsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsClient.delete_conversation_dataset", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets.DeleteConversationDataset", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets", + "shortName": "ConversationDatasets" + }, + "shortName": "DeleteConversationDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteConversationDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_conversation_dataset" + }, + "description": "Sample for DeleteConversationDataset", + "file": "dialogflow_v2_generated_conversation_datasets_delete_conversation_dataset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationDatasets_DeleteConversationDataset_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_datasets_delete_conversation_dataset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsAsyncClient", + "shortName": "ConversationDatasetsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsAsyncClient.get_conversation_dataset", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets.GetConversationDataset", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets", + "shortName": "ConversationDatasets" + }, + "shortName": "GetConversationDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetConversationDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.ConversationDataset", + "shortName": "get_conversation_dataset" + }, + "description": "Sample for GetConversationDataset", + "file": "dialogflow_v2_generated_conversation_datasets_get_conversation_dataset_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationDatasets_GetConversationDataset_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_datasets_get_conversation_dataset_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsClient", + "shortName": "ConversationDatasetsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsClient.get_conversation_dataset", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets.GetConversationDataset", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets", + "shortName": "ConversationDatasets" + }, + "shortName": "GetConversationDataset" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetConversationDatasetRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.ConversationDataset", + "shortName": "get_conversation_dataset" + }, + "description": "Sample for GetConversationDataset", + "file": "dialogflow_v2_generated_conversation_datasets_get_conversation_dataset_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationDatasets_GetConversationDataset_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_datasets_get_conversation_dataset_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsAsyncClient", + "shortName": "ConversationDatasetsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsAsyncClient.import_conversation_data", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets.ImportConversationData", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets", + "shortName": "ConversationDatasets" + }, + "shortName": "ImportConversationData" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ImportConversationDataRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_conversation_data" + }, + "description": "Sample for ImportConversationData", + "file": "dialogflow_v2_generated_conversation_datasets_import_conversation_data_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationDatasets_ImportConversationData_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_datasets_import_conversation_data_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsClient", + "shortName": "ConversationDatasetsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsClient.import_conversation_data", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets.ImportConversationData", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets", + "shortName": "ConversationDatasets" + }, + "shortName": "ImportConversationData" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ImportConversationDataRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_conversation_data" + }, + "description": "Sample for ImportConversationData", + "file": "dialogflow_v2_generated_conversation_datasets_import_conversation_data_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationDatasets_ImportConversationData_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_datasets_import_conversation_data_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsAsyncClient", + "shortName": "ConversationDatasetsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsAsyncClient.list_conversation_datasets", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets.ListConversationDatasets", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets", + "shortName": "ConversationDatasets" + }, + "shortName": "ListConversationDatasets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListConversationDatasetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.conversation_datasets.pagers.ListConversationDatasetsAsyncPager", + "shortName": "list_conversation_datasets" + }, + "description": "Sample for ListConversationDatasets", + "file": "dialogflow_v2_generated_conversation_datasets_list_conversation_datasets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationDatasets_ListConversationDatasets_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_datasets_list_conversation_datasets_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsClient", + "shortName": "ConversationDatasetsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationDatasetsClient.list_conversation_datasets", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets.ListConversationDatasets", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationDatasets", + "shortName": "ConversationDatasets" + }, + "shortName": "ListConversationDatasets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListConversationDatasetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.conversation_datasets.pagers.ListConversationDatasetsPager", + "shortName": "list_conversation_datasets" + }, + "description": "Sample for ListConversationDatasets", + "file": "dialogflow_v2_generated_conversation_datasets_list_conversation_datasets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationDatasets_ListConversationDatasets_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_datasets_list_conversation_datasets_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient", + "shortName": "ConversationModelsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient.create_conversation_model_evaluation", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.CreateConversationModelEvaluation", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "CreateConversationModelEvaluation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateConversationModelEvaluationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversation_model_evaluation", + "type": "google.cloud.dialogflow_v2.types.ConversationModelEvaluation" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_conversation_model_evaluation" + }, + "description": "Sample for CreateConversationModelEvaluation", + "file": "dialogflow_v2_generated_conversation_models_create_conversation_model_evaluation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_CreateConversationModelEvaluation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_create_conversation_model_evaluation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient", + "shortName": "ConversationModelsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient.create_conversation_model_evaluation", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.CreateConversationModelEvaluation", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "CreateConversationModelEvaluation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateConversationModelEvaluationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversation_model_evaluation", + "type": "google.cloud.dialogflow_v2.types.ConversationModelEvaluation" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_conversation_model_evaluation" + }, + "description": "Sample for CreateConversationModelEvaluation", + "file": "dialogflow_v2_generated_conversation_models_create_conversation_model_evaluation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_CreateConversationModelEvaluation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_create_conversation_model_evaluation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient", + "shortName": "ConversationModelsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient.create_conversation_model", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.CreateConversationModel", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "CreateConversationModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateConversationModelRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversation_model", + "type": "google.cloud.dialogflow_v2.types.ConversationModel" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_conversation_model" + }, + "description": "Sample for CreateConversationModel", + "file": "dialogflow_v2_generated_conversation_models_create_conversation_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_CreateConversationModel_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_create_conversation_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient", + "shortName": "ConversationModelsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient.create_conversation_model", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.CreateConversationModel", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "CreateConversationModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateConversationModelRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversation_model", + "type": "google.cloud.dialogflow_v2.types.ConversationModel" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_conversation_model" + }, + "description": "Sample for CreateConversationModel", + "file": "dialogflow_v2_generated_conversation_models_create_conversation_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_CreateConversationModel_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_create_conversation_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient", + "shortName": "ConversationModelsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient.delete_conversation_model", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.DeleteConversationModel", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "DeleteConversationModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteConversationModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_conversation_model" + }, + "description": "Sample for DeleteConversationModel", + "file": "dialogflow_v2_generated_conversation_models_delete_conversation_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_DeleteConversationModel_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_delete_conversation_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient", + "shortName": "ConversationModelsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient.delete_conversation_model", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.DeleteConversationModel", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "DeleteConversationModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteConversationModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_conversation_model" + }, + "description": "Sample for DeleteConversationModel", + "file": "dialogflow_v2_generated_conversation_models_delete_conversation_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_DeleteConversationModel_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_delete_conversation_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient", + "shortName": "ConversationModelsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient.deploy_conversation_model", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.DeployConversationModel", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "DeployConversationModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeployConversationModelRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "deploy_conversation_model" + }, + "description": "Sample for DeployConversationModel", + "file": "dialogflow_v2_generated_conversation_models_deploy_conversation_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_DeployConversationModel_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_deploy_conversation_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient", + "shortName": "ConversationModelsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient.deploy_conversation_model", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.DeployConversationModel", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "DeployConversationModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeployConversationModelRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "deploy_conversation_model" + }, + "description": "Sample for DeployConversationModel", + "file": "dialogflow_v2_generated_conversation_models_deploy_conversation_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_DeployConversationModel_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_deploy_conversation_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient", + "shortName": "ConversationModelsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient.get_conversation_model_evaluation", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.GetConversationModelEvaluation", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "GetConversationModelEvaluation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetConversationModelEvaluationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.ConversationModelEvaluation", + "shortName": "get_conversation_model_evaluation" + }, + "description": "Sample for GetConversationModelEvaluation", + "file": "dialogflow_v2_generated_conversation_models_get_conversation_model_evaluation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_GetConversationModelEvaluation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_get_conversation_model_evaluation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient", + "shortName": "ConversationModelsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient.get_conversation_model_evaluation", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.GetConversationModelEvaluation", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "GetConversationModelEvaluation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetConversationModelEvaluationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.ConversationModelEvaluation", + "shortName": "get_conversation_model_evaluation" + }, + "description": "Sample for GetConversationModelEvaluation", + "file": "dialogflow_v2_generated_conversation_models_get_conversation_model_evaluation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_GetConversationModelEvaluation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_get_conversation_model_evaluation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient", + "shortName": "ConversationModelsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient.get_conversation_model", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.GetConversationModel", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "GetConversationModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetConversationModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.ConversationModel", + "shortName": "get_conversation_model" + }, + "description": "Sample for GetConversationModel", + "file": "dialogflow_v2_generated_conversation_models_get_conversation_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_GetConversationModel_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_get_conversation_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient", + "shortName": "ConversationModelsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient.get_conversation_model", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.GetConversationModel", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "GetConversationModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetConversationModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.ConversationModel", + "shortName": "get_conversation_model" + }, + "description": "Sample for GetConversationModel", + "file": "dialogflow_v2_generated_conversation_models_get_conversation_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_GetConversationModel_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_get_conversation_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient", + "shortName": "ConversationModelsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient.list_conversation_model_evaluations", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.ListConversationModelEvaluations", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "ListConversationModelEvaluations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListConversationModelEvaluationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.conversation_models.pagers.ListConversationModelEvaluationsAsyncPager", + "shortName": "list_conversation_model_evaluations" + }, + "description": "Sample for ListConversationModelEvaluations", + "file": "dialogflow_v2_generated_conversation_models_list_conversation_model_evaluations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_ListConversationModelEvaluations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_list_conversation_model_evaluations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient", + "shortName": "ConversationModelsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient.list_conversation_model_evaluations", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.ListConversationModelEvaluations", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "ListConversationModelEvaluations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListConversationModelEvaluationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.conversation_models.pagers.ListConversationModelEvaluationsPager", + "shortName": "list_conversation_model_evaluations" + }, + "description": "Sample for ListConversationModelEvaluations", + "file": "dialogflow_v2_generated_conversation_models_list_conversation_model_evaluations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_ListConversationModelEvaluations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_list_conversation_model_evaluations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient", + "shortName": "ConversationModelsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient.list_conversation_models", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.ListConversationModels", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "ListConversationModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListConversationModelsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.conversation_models.pagers.ListConversationModelsAsyncPager", + "shortName": "list_conversation_models" + }, + "description": "Sample for ListConversationModels", + "file": "dialogflow_v2_generated_conversation_models_list_conversation_models_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_ListConversationModels_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_list_conversation_models_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient", + "shortName": "ConversationModelsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient.list_conversation_models", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.ListConversationModels", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "ListConversationModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListConversationModelsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.conversation_models.pagers.ListConversationModelsPager", + "shortName": "list_conversation_models" + }, + "description": "Sample for ListConversationModels", + "file": "dialogflow_v2_generated_conversation_models_list_conversation_models_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_ListConversationModels_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_list_conversation_models_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient", + "shortName": "ConversationModelsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsAsyncClient.undeploy_conversation_model", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.UndeployConversationModel", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "UndeployConversationModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UndeployConversationModelRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "undeploy_conversation_model" + }, + "description": "Sample for UndeployConversationModel", + "file": "dialogflow_v2_generated_conversation_models_undeploy_conversation_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_UndeployConversationModel_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_undeploy_conversation_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient", + "shortName": "ConversationModelsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationModelsClient.undeploy_conversation_model", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels.UndeployConversationModel", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationModels", + "shortName": "ConversationModels" + }, + "shortName": "UndeployConversationModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UndeployConversationModelRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "undeploy_conversation_model" + }, + "description": "Sample for UndeployConversationModel", + "file": "dialogflow_v2_generated_conversation_models_undeploy_conversation_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationModels_UndeployConversationModel_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_models_undeploy_conversation_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesAsyncClient", + "shortName": "ConversationProfilesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesAsyncClient.clear_suggestion_feature_config", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles.ClearSuggestionFeatureConfig", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "ClearSuggestionFeatureConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ClearSuggestionFeatureConfigRequest" + }, + { + "name": "conversation_profile", + "type": "str" + }, + { + "name": "participant_role", + "type": "google.cloud.dialogflow_v2.types.Participant.Role" + }, + { + "name": "suggestion_feature_type", + "type": "google.cloud.dialogflow_v2.types.SuggestionFeature.Type" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "clear_suggestion_feature_config" + }, + "description": "Sample for ClearSuggestionFeatureConfig", + "file": "dialogflow_v2_generated_conversation_profiles_clear_suggestion_feature_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationProfiles_ClearSuggestionFeatureConfig_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_profiles_clear_suggestion_feature_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesClient", + "shortName": "ConversationProfilesClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesClient.clear_suggestion_feature_config", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles.ClearSuggestionFeatureConfig", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "ClearSuggestionFeatureConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ClearSuggestionFeatureConfigRequest" + }, + { + "name": "conversation_profile", + "type": "str" + }, + { + "name": "participant_role", + "type": "google.cloud.dialogflow_v2.types.Participant.Role" + }, + { + "name": "suggestion_feature_type", + "type": "google.cloud.dialogflow_v2.types.SuggestionFeature.Type" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "clear_suggestion_feature_config" + }, + "description": "Sample for ClearSuggestionFeatureConfig", + "file": "dialogflow_v2_generated_conversation_profiles_clear_suggestion_feature_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationProfiles_ClearSuggestionFeatureConfig_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_profiles_clear_suggestion_feature_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesAsyncClient", + "shortName": "ConversationProfilesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesAsyncClient.create_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles.CreateConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "CreateConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateConversationProfileRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversation_profile", + "type": "google.cloud.dialogflow_v2.types.ConversationProfile" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.ConversationProfile", + "shortName": "create_conversation_profile" + }, + "description": "Sample for CreateConversationProfile", + "file": "dialogflow_v2_generated_conversation_profiles_create_conversation_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationProfiles_CreateConversationProfile_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_profiles_create_conversation_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesClient", + "shortName": "ConversationProfilesClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesClient.create_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles.CreateConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "CreateConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateConversationProfileRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversation_profile", + "type": "google.cloud.dialogflow_v2.types.ConversationProfile" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.ConversationProfile", + "shortName": "create_conversation_profile" + }, + "description": "Sample for CreateConversationProfile", + "file": "dialogflow_v2_generated_conversation_profiles_create_conversation_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationProfiles_CreateConversationProfile_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_profiles_create_conversation_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesAsyncClient", + "shortName": "ConversationProfilesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesAsyncClient.delete_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles.DeleteConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "DeleteConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteConversationProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_conversation_profile" + }, + "description": "Sample for DeleteConversationProfile", + "file": "dialogflow_v2_generated_conversation_profiles_delete_conversation_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationProfiles_DeleteConversationProfile_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_profiles_delete_conversation_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesClient", + "shortName": "ConversationProfilesClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesClient.delete_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles.DeleteConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "DeleteConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteConversationProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_conversation_profile" + }, + "description": "Sample for DeleteConversationProfile", + "file": "dialogflow_v2_generated_conversation_profiles_delete_conversation_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationProfiles_DeleteConversationProfile_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_profiles_delete_conversation_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesAsyncClient", + "shortName": "ConversationProfilesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesAsyncClient.get_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles.GetConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "GetConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetConversationProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.ConversationProfile", + "shortName": "get_conversation_profile" + }, + "description": "Sample for GetConversationProfile", + "file": "dialogflow_v2_generated_conversation_profiles_get_conversation_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationProfiles_GetConversationProfile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_profiles_get_conversation_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesClient", + "shortName": "ConversationProfilesClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesClient.get_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles.GetConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "GetConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetConversationProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.ConversationProfile", + "shortName": "get_conversation_profile" + }, + "description": "Sample for GetConversationProfile", + "file": "dialogflow_v2_generated_conversation_profiles_get_conversation_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationProfiles_GetConversationProfile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_profiles_get_conversation_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesAsyncClient", + "shortName": "ConversationProfilesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesAsyncClient.list_conversation_profiles", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles.ListConversationProfiles", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "ListConversationProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListConversationProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.conversation_profiles.pagers.ListConversationProfilesAsyncPager", + "shortName": "list_conversation_profiles" + }, + "description": "Sample for ListConversationProfiles", + "file": "dialogflow_v2_generated_conversation_profiles_list_conversation_profiles_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationProfiles_ListConversationProfiles_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_profiles_list_conversation_profiles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesClient", + "shortName": "ConversationProfilesClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesClient.list_conversation_profiles", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles.ListConversationProfiles", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "ListConversationProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListConversationProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.conversation_profiles.pagers.ListConversationProfilesPager", + "shortName": "list_conversation_profiles" + }, + "description": "Sample for ListConversationProfiles", + "file": "dialogflow_v2_generated_conversation_profiles_list_conversation_profiles_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationProfiles_ListConversationProfiles_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_profiles_list_conversation_profiles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesAsyncClient", + "shortName": "ConversationProfilesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesAsyncClient.set_suggestion_feature_config", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles.SetSuggestionFeatureConfig", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "SetSuggestionFeatureConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SetSuggestionFeatureConfigRequest" + }, + { + "name": "conversation_profile", + "type": "str" + }, + { + "name": "participant_role", + "type": "google.cloud.dialogflow_v2.types.Participant.Role" + }, + { + "name": "suggestion_feature_config", + "type": "google.cloud.dialogflow_v2.types.HumanAgentAssistantConfig.SuggestionFeatureConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "set_suggestion_feature_config" + }, + "description": "Sample for SetSuggestionFeatureConfig", + "file": "dialogflow_v2_generated_conversation_profiles_set_suggestion_feature_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationProfiles_SetSuggestionFeatureConfig_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_profiles_set_suggestion_feature_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesClient", + "shortName": "ConversationProfilesClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesClient.set_suggestion_feature_config", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles.SetSuggestionFeatureConfig", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "SetSuggestionFeatureConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SetSuggestionFeatureConfigRequest" + }, + { + "name": "conversation_profile", + "type": "str" + }, + { + "name": "participant_role", + "type": "google.cloud.dialogflow_v2.types.Participant.Role" + }, + { + "name": "suggestion_feature_config", + "type": "google.cloud.dialogflow_v2.types.HumanAgentAssistantConfig.SuggestionFeatureConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "set_suggestion_feature_config" + }, + "description": "Sample for SetSuggestionFeatureConfig", + "file": "dialogflow_v2_generated_conversation_profiles_set_suggestion_feature_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationProfiles_SetSuggestionFeatureConfig_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_profiles_set_suggestion_feature_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesAsyncClient", + "shortName": "ConversationProfilesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesAsyncClient.update_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles.UpdateConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "UpdateConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateConversationProfileRequest" + }, + { + "name": "conversation_profile", + "type": "google.cloud.dialogflow_v2.types.ConversationProfile" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.ConversationProfile", + "shortName": "update_conversation_profile" + }, + "description": "Sample for UpdateConversationProfile", + "file": "dialogflow_v2_generated_conversation_profiles_update_conversation_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationProfiles_UpdateConversationProfile_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_profiles_update_conversation_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesClient", + "shortName": "ConversationProfilesClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationProfilesClient.update_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles.UpdateConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "UpdateConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateConversationProfileRequest" + }, + { + "name": "conversation_profile", + "type": "google.cloud.dialogflow_v2.types.ConversationProfile" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.ConversationProfile", + "shortName": "update_conversation_profile" + }, + "description": "Sample for UpdateConversationProfile", + "file": "dialogflow_v2_generated_conversation_profiles_update_conversation_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_ConversationProfiles_UpdateConversationProfile_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversation_profiles_update_conversation_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient.complete_conversation", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.CompleteConversation", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "CompleteConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CompleteConversationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Conversation", + "shortName": "complete_conversation" + }, + "description": "Sample for CompleteConversation", + "file": "dialogflow_v2_generated_conversations_complete_conversation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_CompleteConversation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_complete_conversation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsClient.complete_conversation", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.CompleteConversation", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "CompleteConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CompleteConversationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Conversation", + "shortName": "complete_conversation" + }, + "description": "Sample for CompleteConversation", + "file": "dialogflow_v2_generated_conversations_complete_conversation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_CompleteConversation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_complete_conversation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient.create_conversation", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.CreateConversation", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "CreateConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateConversationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversation", + "type": "google.cloud.dialogflow_v2.types.Conversation" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Conversation", + "shortName": "create_conversation" + }, + "description": "Sample for CreateConversation", + "file": "dialogflow_v2_generated_conversations_create_conversation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_CreateConversation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_create_conversation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsClient.create_conversation", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.CreateConversation", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "CreateConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateConversationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversation", + "type": "google.cloud.dialogflow_v2.types.Conversation" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Conversation", + "shortName": "create_conversation" + }, + "description": "Sample for CreateConversation", + "file": "dialogflow_v2_generated_conversations_create_conversation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_CreateConversation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_create_conversation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient.generate_stateless_summary", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.GenerateStatelessSummary", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "GenerateStatelessSummary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GenerateStatelessSummaryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.GenerateStatelessSummaryResponse", + "shortName": "generate_stateless_summary" + }, + "description": "Sample for GenerateStatelessSummary", + "file": "dialogflow_v2_generated_conversations_generate_stateless_summary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_GenerateStatelessSummary_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_generate_stateless_summary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsClient.generate_stateless_summary", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.GenerateStatelessSummary", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "GenerateStatelessSummary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GenerateStatelessSummaryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.GenerateStatelessSummaryResponse", + "shortName": "generate_stateless_summary" + }, + "description": "Sample for GenerateStatelessSummary", + "file": "dialogflow_v2_generated_conversations_generate_stateless_summary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_GenerateStatelessSummary_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_generate_stateless_summary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient.get_conversation", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.GetConversation", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "GetConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetConversationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Conversation", + "shortName": "get_conversation" + }, + "description": "Sample for GetConversation", + "file": "dialogflow_v2_generated_conversations_get_conversation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_GetConversation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_get_conversation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsClient.get_conversation", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.GetConversation", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "GetConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetConversationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Conversation", + "shortName": "get_conversation" + }, + "description": "Sample for GetConversation", + "file": "dialogflow_v2_generated_conversations_get_conversation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_GetConversation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_get_conversation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient.list_conversations", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.ListConversations", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "ListConversations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListConversationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.conversations.pagers.ListConversationsAsyncPager", + "shortName": "list_conversations" + }, + "description": "Sample for ListConversations", + "file": "dialogflow_v2_generated_conversations_list_conversations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_ListConversations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_list_conversations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsClient.list_conversations", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.ListConversations", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "ListConversations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListConversationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.conversations.pagers.ListConversationsPager", + "shortName": "list_conversations" + }, + "description": "Sample for ListConversations", + "file": "dialogflow_v2_generated_conversations_list_conversations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_ListConversations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_list_conversations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient.list_messages", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.ListMessages", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "ListMessages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListMessagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.conversations.pagers.ListMessagesAsyncPager", + "shortName": "list_messages" + }, + "description": "Sample for ListMessages", + "file": "dialogflow_v2_generated_conversations_list_messages_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_ListMessages_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_list_messages_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsClient.list_messages", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.ListMessages", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "ListMessages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListMessagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.conversations.pagers.ListMessagesPager", + "shortName": "list_messages" + }, + "description": "Sample for ListMessages", + "file": "dialogflow_v2_generated_conversations_list_messages_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_ListMessages_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_list_messages_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient.search_knowledge", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.SearchKnowledge", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "SearchKnowledge" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SearchKnowledgeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SearchKnowledgeResponse", + "shortName": "search_knowledge" + }, + "description": "Sample for SearchKnowledge", + "file": "dialogflow_v2_generated_conversations_search_knowledge_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_SearchKnowledge_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_search_knowledge_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsClient.search_knowledge", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.SearchKnowledge", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "SearchKnowledge" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SearchKnowledgeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SearchKnowledgeResponse", + "shortName": "search_knowledge" + }, + "description": "Sample for SearchKnowledge", + "file": "dialogflow_v2_generated_conversations_search_knowledge_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_SearchKnowledge_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_search_knowledge_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsAsyncClient.suggest_conversation_summary", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.SuggestConversationSummary", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "SuggestConversationSummary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SuggestConversationSummaryRequest" + }, + { + "name": "conversation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SuggestConversationSummaryResponse", + "shortName": "suggest_conversation_summary" + }, + "description": "Sample for SuggestConversationSummary", + "file": "dialogflow_v2_generated_conversations_suggest_conversation_summary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_SuggestConversationSummary_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_suggest_conversation_summary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ConversationsClient.suggest_conversation_summary", + "method": { + "fullName": "google.cloud.dialogflow.v2.Conversations.SuggestConversationSummary", + "service": { + "fullName": "google.cloud.dialogflow.v2.Conversations", + "shortName": "Conversations" + }, + "shortName": "SuggestConversationSummary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SuggestConversationSummaryRequest" + }, + { + "name": "conversation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SuggestConversationSummaryResponse", + "shortName": "suggest_conversation_summary" + }, + "description": "Sample for SuggestConversationSummary", + "file": "dialogflow_v2_generated_conversations_suggest_conversation_summary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Conversations_SuggestConversationSummary_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_conversations_suggest_conversation_summary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient.create_document", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.CreateDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.cloud.dialogflow_v2.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "dialogflow_v2_generated_documents_create_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_CreateDocument_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_create_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsClient.create_document", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.CreateDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.cloud.dialogflow_v2.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "dialogflow_v2_generated_documents_create_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_CreateDocument_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_create_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient.delete_document", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.DeleteDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "dialogflow_v2_generated_documents_delete_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_DeleteDocument_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_delete_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsClient.delete_document", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.DeleteDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "dialogflow_v2_generated_documents_delete_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_DeleteDocument_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_delete_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient.export_document", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.ExportDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "ExportDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ExportDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_document" + }, + "description": "Sample for ExportDocument", + "file": "dialogflow_v2_generated_documents_export_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_ExportDocument_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_export_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsClient.export_document", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.ExportDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "ExportDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ExportDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_document" + }, + "description": "Sample for ExportDocument", + "file": "dialogflow_v2_generated_documents_export_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_ExportDocument_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_export_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient.get_document", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.GetDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "dialogflow_v2_generated_documents_get_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_GetDocument_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_get_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsClient.get_document", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.GetDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "dialogflow_v2_generated_documents_get_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_GetDocument_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_get_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient.import_documents", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.ImportDocuments", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "ImportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ImportDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_documents" + }, + "description": "Sample for ImportDocuments", + "file": "dialogflow_v2_generated_documents_import_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_ImportDocuments_async", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_import_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsClient.import_documents", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.ImportDocuments", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "ImportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ImportDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_documents" + }, + "description": "Sample for ImportDocuments", + "file": "dialogflow_v2_generated_documents_import_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_ImportDocuments_sync", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_import_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient.list_documents", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.ListDocuments", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.documents.pagers.ListDocumentsAsyncPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "dialogflow_v2_generated_documents_list_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_ListDocuments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_list_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsClient.list_documents", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.ListDocuments", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.documents.pagers.ListDocumentsPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "dialogflow_v2_generated_documents_list_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_ListDocuments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_list_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient.reload_document", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.ReloadDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "ReloadDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ReloadDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "content_uri", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "reload_document" + }, + "description": "Sample for ReloadDocument", + "file": "dialogflow_v2_generated_documents_reload_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_ReloadDocument_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_reload_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsClient.reload_document", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.ReloadDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "ReloadDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ReloadDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "content_uri", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "reload_document" + }, + "description": "Sample for ReloadDocument", + "file": "dialogflow_v2_generated_documents_reload_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_ReloadDocument_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_reload_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsAsyncClient.update_document", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.UpdateDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateDocumentRequest" + }, + { + "name": "document", + "type": "google.cloud.dialogflow_v2.types.Document" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "dialogflow_v2_generated_documents_update_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_UpdateDocument_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_update_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.DocumentsClient.update_document", + "method": { + "fullName": "google.cloud.dialogflow.v2.Documents.UpdateDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2.Documents", + "shortName": "Documents" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateDocumentRequest" + }, + { + "name": "document", + "type": "google.cloud.dialogflow_v2.types.Document" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "dialogflow_v2_generated_documents_update_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Documents_UpdateDocument_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_documents_update_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient.batch_create_entities", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.BatchCreateEntities", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchCreateEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.BatchCreateEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entities", + "type": "MutableSequence[google.cloud.dialogflow_v2.types.EntityType.Entity]" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_create_entities" + }, + "description": "Sample for BatchCreateEntities", + "file": "dialogflow_v2_generated_entity_types_batch_create_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_BatchCreateEntities_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_batch_create_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient.batch_create_entities", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.BatchCreateEntities", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchCreateEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.BatchCreateEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entities", + "type": "MutableSequence[google.cloud.dialogflow_v2.types.EntityType.Entity]" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_create_entities" + }, + "description": "Sample for BatchCreateEntities", + "file": "dialogflow_v2_generated_entity_types_batch_create_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_BatchCreateEntities_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_batch_create_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient.batch_delete_entities", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.BatchDeleteEntities", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchDeleteEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.BatchDeleteEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity_values", + "type": "MutableSequence[str]" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_delete_entities" + }, + "description": "Sample for BatchDeleteEntities", + "file": "dialogflow_v2_generated_entity_types_batch_delete_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_BatchDeleteEntities_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_batch_delete_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient.batch_delete_entities", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.BatchDeleteEntities", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchDeleteEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.BatchDeleteEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity_values", + "type": "MutableSequence[str]" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_delete_entities" + }, + "description": "Sample for BatchDeleteEntities", + "file": "dialogflow_v2_generated_entity_types_batch_delete_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_BatchDeleteEntities_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_batch_delete_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient.batch_delete_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.BatchDeleteEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchDeleteEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.BatchDeleteEntityTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity_type_names", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_delete_entity_types" + }, + "description": "Sample for BatchDeleteEntityTypes", + "file": "dialogflow_v2_generated_entity_types_batch_delete_entity_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_BatchDeleteEntityTypes_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_batch_delete_entity_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient.batch_delete_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.BatchDeleteEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchDeleteEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.BatchDeleteEntityTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity_type_names", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_delete_entity_types" + }, + "description": "Sample for BatchDeleteEntityTypes", + "file": "dialogflow_v2_generated_entity_types_batch_delete_entity_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_BatchDeleteEntityTypes_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_batch_delete_entity_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient.batch_update_entities", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.BatchUpdateEntities", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchUpdateEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.BatchUpdateEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entities", + "type": "MutableSequence[google.cloud.dialogflow_v2.types.EntityType.Entity]" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_update_entities" + }, + "description": "Sample for BatchUpdateEntities", + "file": "dialogflow_v2_generated_entity_types_batch_update_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_BatchUpdateEntities_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_batch_update_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient.batch_update_entities", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.BatchUpdateEntities", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchUpdateEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.BatchUpdateEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entities", + "type": "MutableSequence[google.cloud.dialogflow_v2.types.EntityType.Entity]" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_update_entities" + }, + "description": "Sample for BatchUpdateEntities", + "file": "dialogflow_v2_generated_entity_types_batch_update_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_BatchUpdateEntities_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_batch_update_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient.batch_update_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.BatchUpdateEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchUpdateEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.BatchUpdateEntityTypesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_update_entity_types" + }, + "description": "Sample for BatchUpdateEntityTypes", + "file": "dialogflow_v2_generated_entity_types_batch_update_entity_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_BatchUpdateEntityTypes_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_batch_update_entity_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient.batch_update_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.BatchUpdateEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchUpdateEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.BatchUpdateEntityTypesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_update_entity_types" + }, + "description": "Sample for BatchUpdateEntityTypes", + "file": "dialogflow_v2_generated_entity_types_batch_update_entity_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_BatchUpdateEntityTypes_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_batch_update_entity_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient.create_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.CreateEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "CreateEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateEntityTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity_type", + "type": "google.cloud.dialogflow_v2.types.EntityType" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.EntityType", + "shortName": "create_entity_type" + }, + "description": "Sample for CreateEntityType", + "file": "dialogflow_v2_generated_entity_types_create_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_CreateEntityType_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_create_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient.create_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.CreateEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "CreateEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateEntityTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity_type", + "type": "google.cloud.dialogflow_v2.types.EntityType" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.EntityType", + "shortName": "create_entity_type" + }, + "description": "Sample for CreateEntityType", + "file": "dialogflow_v2_generated_entity_types_create_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_CreateEntityType_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_create_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient.delete_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.DeleteEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "DeleteEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_entity_type" + }, + "description": "Sample for DeleteEntityType", + "file": "dialogflow_v2_generated_entity_types_delete_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_DeleteEntityType_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_delete_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient.delete_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.DeleteEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "DeleteEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_entity_type" + }, + "description": "Sample for DeleteEntityType", + "file": "dialogflow_v2_generated_entity_types_delete_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_DeleteEntityType_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_delete_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient.get_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.GetEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "GetEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.EntityType", + "shortName": "get_entity_type" + }, + "description": "Sample for GetEntityType", + "file": "dialogflow_v2_generated_entity_types_get_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_GetEntityType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_get_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient.get_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.GetEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "GetEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.EntityType", + "shortName": "get_entity_type" + }, + "description": "Sample for GetEntityType", + "file": "dialogflow_v2_generated_entity_types_get_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_GetEntityType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_get_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient.list_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.ListEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "ListEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListEntityTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.entity_types.pagers.ListEntityTypesAsyncPager", + "shortName": "list_entity_types" + }, + "description": "Sample for ListEntityTypes", + "file": "dialogflow_v2_generated_entity_types_list_entity_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_ListEntityTypes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_list_entity_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient.list_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.ListEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "ListEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListEntityTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.entity_types.pagers.ListEntityTypesPager", + "shortName": "list_entity_types" + }, + "description": "Sample for ListEntityTypes", + "file": "dialogflow_v2_generated_entity_types_list_entity_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_ListEntityTypes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_list_entity_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesAsyncClient.update_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.UpdateEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "UpdateEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateEntityTypeRequest" + }, + { + "name": "entity_type", + "type": "google.cloud.dialogflow_v2.types.EntityType" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.EntityType", + "shortName": "update_entity_type" + }, + "description": "Sample for UpdateEntityType", + "file": "dialogflow_v2_generated_entity_types_update_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_UpdateEntityType_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_update_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.EntityTypesClient.update_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes.UpdateEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "UpdateEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateEntityTypeRequest" + }, + { + "name": "entity_type", + "type": "google.cloud.dialogflow_v2.types.EntityType" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.EntityType", + "shortName": "update_entity_type" + }, + "description": "Sample for UpdateEntityType", + "file": "dialogflow_v2_generated_entity_types_update_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_EntityTypes_UpdateEntityType_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_entity_types_update_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EnvironmentsAsyncClient.create_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2.Environments.CreateEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2.Environments", + "shortName": "Environments" + }, + "shortName": "CreateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Environment", + "shortName": "create_environment" + }, + "description": "Sample for CreateEnvironment", + "file": "dialogflow_v2_generated_environments_create_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Environments_CreateEnvironment_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_environments_create_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.EnvironmentsClient.create_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2.Environments.CreateEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2.Environments", + "shortName": "Environments" + }, + "shortName": "CreateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Environment", + "shortName": "create_environment" + }, + "description": "Sample for CreateEnvironment", + "file": "dialogflow_v2_generated_environments_create_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Environments_CreateEnvironment_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_environments_create_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EnvironmentsAsyncClient.delete_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2.Environments.DeleteEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2.Environments", + "shortName": "Environments" + }, + "shortName": "DeleteEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_environment" + }, + "description": "Sample for DeleteEnvironment", + "file": "dialogflow_v2_generated_environments_delete_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Environments_DeleteEnvironment_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_environments_delete_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.EnvironmentsClient.delete_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2.Environments.DeleteEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2.Environments", + "shortName": "Environments" + }, + "shortName": "DeleteEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_environment" + }, + "description": "Sample for DeleteEnvironment", + "file": "dialogflow_v2_generated_environments_delete_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Environments_DeleteEnvironment_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_environments_delete_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EnvironmentsAsyncClient.get_environment_history", + "method": { + "fullName": "google.cloud.dialogflow.v2.Environments.GetEnvironmentHistory", + "service": { + "fullName": "google.cloud.dialogflow.v2.Environments", + "shortName": "Environments" + }, + "shortName": "GetEnvironmentHistory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetEnvironmentHistoryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.environments.pagers.GetEnvironmentHistoryAsyncPager", + "shortName": "get_environment_history" + }, + "description": "Sample for GetEnvironmentHistory", + "file": "dialogflow_v2_generated_environments_get_environment_history_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Environments_GetEnvironmentHistory_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_environments_get_environment_history_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.EnvironmentsClient.get_environment_history", + "method": { + "fullName": "google.cloud.dialogflow.v2.Environments.GetEnvironmentHistory", + "service": { + "fullName": "google.cloud.dialogflow.v2.Environments", + "shortName": "Environments" + }, + "shortName": "GetEnvironmentHistory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetEnvironmentHistoryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.environments.pagers.GetEnvironmentHistoryPager", + "shortName": "get_environment_history" + }, + "description": "Sample for GetEnvironmentHistory", + "file": "dialogflow_v2_generated_environments_get_environment_history_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Environments_GetEnvironmentHistory_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_environments_get_environment_history_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EnvironmentsAsyncClient.get_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2.Environments.GetEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2.Environments", + "shortName": "Environments" + }, + "shortName": "GetEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Environment", + "shortName": "get_environment" + }, + "description": "Sample for GetEnvironment", + "file": "dialogflow_v2_generated_environments_get_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Environments_GetEnvironment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_environments_get_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.EnvironmentsClient.get_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2.Environments.GetEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2.Environments", + "shortName": "Environments" + }, + "shortName": "GetEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Environment", + "shortName": "get_environment" + }, + "description": "Sample for GetEnvironment", + "file": "dialogflow_v2_generated_environments_get_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Environments_GetEnvironment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_environments_get_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EnvironmentsAsyncClient.list_environments", + "method": { + "fullName": "google.cloud.dialogflow.v2.Environments.ListEnvironments", + "service": { + "fullName": "google.cloud.dialogflow.v2.Environments", + "shortName": "Environments" + }, + "shortName": "ListEnvironments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListEnvironmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.environments.pagers.ListEnvironmentsAsyncPager", + "shortName": "list_environments" + }, + "description": "Sample for ListEnvironments", + "file": "dialogflow_v2_generated_environments_list_environments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Environments_ListEnvironments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_environments_list_environments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.EnvironmentsClient.list_environments", + "method": { + "fullName": "google.cloud.dialogflow.v2.Environments.ListEnvironments", + "service": { + "fullName": "google.cloud.dialogflow.v2.Environments", + "shortName": "Environments" + }, + "shortName": "ListEnvironments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListEnvironmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.environments.pagers.ListEnvironmentsPager", + "shortName": "list_environments" + }, + "description": "Sample for ListEnvironments", + "file": "dialogflow_v2_generated_environments_list_environments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Environments_ListEnvironments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_environments_list_environments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.EnvironmentsAsyncClient.update_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2.Environments.UpdateEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2.Environments", + "shortName": "Environments" + }, + "shortName": "UpdateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Environment", + "shortName": "update_environment" + }, + "description": "Sample for UpdateEnvironment", + "file": "dialogflow_v2_generated_environments_update_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Environments_UpdateEnvironment_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_environments_update_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.EnvironmentsClient.update_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2.Environments.UpdateEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2.Environments", + "shortName": "Environments" + }, + "shortName": "UpdateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Environment", + "shortName": "update_environment" + }, + "description": "Sample for UpdateEnvironment", + "file": "dialogflow_v2_generated_environments_update_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Environments_UpdateEnvironment_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_environments_update_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.FulfillmentsAsyncClient", + "shortName": "FulfillmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.FulfillmentsAsyncClient.get_fulfillment", + "method": { + "fullName": "google.cloud.dialogflow.v2.Fulfillments.GetFulfillment", + "service": { + "fullName": "google.cloud.dialogflow.v2.Fulfillments", + "shortName": "Fulfillments" + }, + "shortName": "GetFulfillment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetFulfillmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Fulfillment", + "shortName": "get_fulfillment" + }, + "description": "Sample for GetFulfillment", + "file": "dialogflow_v2_generated_fulfillments_get_fulfillment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Fulfillments_GetFulfillment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_fulfillments_get_fulfillment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.FulfillmentsClient", + "shortName": "FulfillmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.FulfillmentsClient.get_fulfillment", + "method": { + "fullName": "google.cloud.dialogflow.v2.Fulfillments.GetFulfillment", + "service": { + "fullName": "google.cloud.dialogflow.v2.Fulfillments", + "shortName": "Fulfillments" + }, + "shortName": "GetFulfillment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetFulfillmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Fulfillment", + "shortName": "get_fulfillment" + }, + "description": "Sample for GetFulfillment", + "file": "dialogflow_v2_generated_fulfillments_get_fulfillment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Fulfillments_GetFulfillment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_fulfillments_get_fulfillment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.FulfillmentsAsyncClient", + "shortName": "FulfillmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.FulfillmentsAsyncClient.update_fulfillment", + "method": { + "fullName": "google.cloud.dialogflow.v2.Fulfillments.UpdateFulfillment", + "service": { + "fullName": "google.cloud.dialogflow.v2.Fulfillments", + "shortName": "Fulfillments" + }, + "shortName": "UpdateFulfillment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateFulfillmentRequest" + }, + { + "name": "fulfillment", + "type": "google.cloud.dialogflow_v2.types.Fulfillment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Fulfillment", + "shortName": "update_fulfillment" + }, + "description": "Sample for UpdateFulfillment", + "file": "dialogflow_v2_generated_fulfillments_update_fulfillment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Fulfillments_UpdateFulfillment_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_fulfillments_update_fulfillment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.FulfillmentsClient", + "shortName": "FulfillmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.FulfillmentsClient.update_fulfillment", + "method": { + "fullName": "google.cloud.dialogflow.v2.Fulfillments.UpdateFulfillment", + "service": { + "fullName": "google.cloud.dialogflow.v2.Fulfillments", + "shortName": "Fulfillments" + }, + "shortName": "UpdateFulfillment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateFulfillmentRequest" + }, + { + "name": "fulfillment", + "type": "google.cloud.dialogflow_v2.types.Fulfillment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Fulfillment", + "shortName": "update_fulfillment" + }, + "description": "Sample for UpdateFulfillment", + "file": "dialogflow_v2_generated_fulfillments_update_fulfillment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Fulfillments_UpdateFulfillment_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_fulfillments_update_fulfillment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.IntentsAsyncClient.batch_delete_intents", + "method": { + "fullName": "google.cloud.dialogflow.v2.Intents.BatchDeleteIntents", + "service": { + "fullName": "google.cloud.dialogflow.v2.Intents", + "shortName": "Intents" + }, + "shortName": "BatchDeleteIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.BatchDeleteIntentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intents", + "type": "MutableSequence[google.cloud.dialogflow_v2.types.Intent]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_delete_intents" + }, + "description": "Sample for BatchDeleteIntents", + "file": "dialogflow_v2_generated_intents_batch_delete_intents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Intents_BatchDeleteIntents_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_intents_batch_delete_intents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.IntentsClient.batch_delete_intents", + "method": { + "fullName": "google.cloud.dialogflow.v2.Intents.BatchDeleteIntents", + "service": { + "fullName": "google.cloud.dialogflow.v2.Intents", + "shortName": "Intents" + }, + "shortName": "BatchDeleteIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.BatchDeleteIntentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intents", + "type": "MutableSequence[google.cloud.dialogflow_v2.types.Intent]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_delete_intents" + }, + "description": "Sample for BatchDeleteIntents", + "file": "dialogflow_v2_generated_intents_batch_delete_intents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Intents_BatchDeleteIntents_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_intents_batch_delete_intents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.IntentsAsyncClient.batch_update_intents", + "method": { + "fullName": "google.cloud.dialogflow.v2.Intents.BatchUpdateIntents", + "service": { + "fullName": "google.cloud.dialogflow.v2.Intents", + "shortName": "Intents" + }, + "shortName": "BatchUpdateIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.BatchUpdateIntentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intent_batch_uri", + "type": "str" + }, + { + "name": "intent_batch_inline", + "type": "google.cloud.dialogflow_v2.types.IntentBatch" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_update_intents" + }, + "description": "Sample for BatchUpdateIntents", + "file": "dialogflow_v2_generated_intents_batch_update_intents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Intents_BatchUpdateIntents_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_intents_batch_update_intents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.IntentsClient.batch_update_intents", + "method": { + "fullName": "google.cloud.dialogflow.v2.Intents.BatchUpdateIntents", + "service": { + "fullName": "google.cloud.dialogflow.v2.Intents", + "shortName": "Intents" + }, + "shortName": "BatchUpdateIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.BatchUpdateIntentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intent_batch_uri", + "type": "str" + }, + { + "name": "intent_batch_inline", + "type": "google.cloud.dialogflow_v2.types.IntentBatch" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_update_intents" + }, + "description": "Sample for BatchUpdateIntents", + "file": "dialogflow_v2_generated_intents_batch_update_intents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Intents_BatchUpdateIntents_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_intents_batch_update_intents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.IntentsAsyncClient.create_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Intents.CreateIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Intents", + "shortName": "Intents" + }, + "shortName": "CreateIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateIntentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intent", + "type": "google.cloud.dialogflow_v2.types.Intent" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Intent", + "shortName": "create_intent" + }, + "description": "Sample for CreateIntent", + "file": "dialogflow_v2_generated_intents_create_intent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Intents_CreateIntent_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_intents_create_intent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.IntentsClient.create_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Intents.CreateIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Intents", + "shortName": "Intents" + }, + "shortName": "CreateIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateIntentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intent", + "type": "google.cloud.dialogflow_v2.types.Intent" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Intent", + "shortName": "create_intent" + }, + "description": "Sample for CreateIntent", + "file": "dialogflow_v2_generated_intents_create_intent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Intents_CreateIntent_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_intents_create_intent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.IntentsAsyncClient.delete_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Intents.DeleteIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Intents", + "shortName": "Intents" + }, + "shortName": "DeleteIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteIntentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_intent" + }, + "description": "Sample for DeleteIntent", + "file": "dialogflow_v2_generated_intents_delete_intent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Intents_DeleteIntent_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_intents_delete_intent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.IntentsClient.delete_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Intents.DeleteIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Intents", + "shortName": "Intents" + }, + "shortName": "DeleteIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteIntentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_intent" + }, + "description": "Sample for DeleteIntent", + "file": "dialogflow_v2_generated_intents_delete_intent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Intents_DeleteIntent_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_intents_delete_intent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.IntentsAsyncClient.get_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Intents.GetIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Intents", + "shortName": "Intents" + }, + "shortName": "GetIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetIntentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Intent", + "shortName": "get_intent" + }, + "description": "Sample for GetIntent", + "file": "dialogflow_v2_generated_intents_get_intent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Intents_GetIntent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_intents_get_intent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.IntentsClient.get_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Intents.GetIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Intents", + "shortName": "Intents" + }, + "shortName": "GetIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetIntentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Intent", + "shortName": "get_intent" + }, + "description": "Sample for GetIntent", + "file": "dialogflow_v2_generated_intents_get_intent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Intents_GetIntent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_intents_get_intent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.IntentsAsyncClient.list_intents", + "method": { + "fullName": "google.cloud.dialogflow.v2.Intents.ListIntents", + "service": { + "fullName": "google.cloud.dialogflow.v2.Intents", + "shortName": "Intents" + }, + "shortName": "ListIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListIntentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.intents.pagers.ListIntentsAsyncPager", + "shortName": "list_intents" + }, + "description": "Sample for ListIntents", + "file": "dialogflow_v2_generated_intents_list_intents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Intents_ListIntents_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_intents_list_intents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.IntentsClient.list_intents", + "method": { + "fullName": "google.cloud.dialogflow.v2.Intents.ListIntents", + "service": { + "fullName": "google.cloud.dialogflow.v2.Intents", + "shortName": "Intents" + }, + "shortName": "ListIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListIntentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.intents.pagers.ListIntentsPager", + "shortName": "list_intents" + }, + "description": "Sample for ListIntents", + "file": "dialogflow_v2_generated_intents_list_intents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Intents_ListIntents_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_intents_list_intents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.IntentsAsyncClient.update_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Intents.UpdateIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Intents", + "shortName": "Intents" + }, + "shortName": "UpdateIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateIntentRequest" + }, + { + "name": "intent", + "type": "google.cloud.dialogflow_v2.types.Intent" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Intent", + "shortName": "update_intent" + }, + "description": "Sample for UpdateIntent", + "file": "dialogflow_v2_generated_intents_update_intent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Intents_UpdateIntent_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_intents_update_intent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflow_v2.IntentsClient.update_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Intents.UpdateIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Intents", + "shortName": "Intents" + }, + "shortName": "UpdateIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateIntentRequest" + }, + { + "name": "intent", + "type": "google.cloud.dialogflow_v2.types.Intent" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Intent", + "shortName": "update_intent" + }, + "description": "Sample for UpdateIntent", + "file": "dialogflow_v2_generated_intents_update_intent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Intents_UpdateIntent_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_intents_update_intent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesAsyncClient", + "shortName": "KnowledgeBasesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesAsyncClient.create_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases.CreateKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "CreateKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateKnowledgeBaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "knowledge_base", + "type": "google.cloud.dialogflow_v2.types.KnowledgeBase" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.KnowledgeBase", + "shortName": "create_knowledge_base" + }, + "description": "Sample for CreateKnowledgeBase", + "file": "dialogflow_v2_generated_knowledge_bases_create_knowledge_base_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_KnowledgeBases_CreateKnowledgeBase_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_knowledge_bases_create_knowledge_base_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesClient", + "shortName": "KnowledgeBasesClient" + }, + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesClient.create_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases.CreateKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "CreateKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateKnowledgeBaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "knowledge_base", + "type": "google.cloud.dialogflow_v2.types.KnowledgeBase" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.KnowledgeBase", + "shortName": "create_knowledge_base" + }, + "description": "Sample for CreateKnowledgeBase", + "file": "dialogflow_v2_generated_knowledge_bases_create_knowledge_base_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_KnowledgeBases_CreateKnowledgeBase_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_knowledge_bases_create_knowledge_base_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesAsyncClient", + "shortName": "KnowledgeBasesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesAsyncClient.delete_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases.DeleteKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "DeleteKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteKnowledgeBaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_knowledge_base" + }, + "description": "Sample for DeleteKnowledgeBase", + "file": "dialogflow_v2_generated_knowledge_bases_delete_knowledge_base_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_KnowledgeBases_DeleteKnowledgeBase_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_knowledge_bases_delete_knowledge_base_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesClient", + "shortName": "KnowledgeBasesClient" + }, + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesClient.delete_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases.DeleteKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "DeleteKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteKnowledgeBaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_knowledge_base" + }, + "description": "Sample for DeleteKnowledgeBase", + "file": "dialogflow_v2_generated_knowledge_bases_delete_knowledge_base_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_KnowledgeBases_DeleteKnowledgeBase_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_knowledge_bases_delete_knowledge_base_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesAsyncClient", + "shortName": "KnowledgeBasesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesAsyncClient.get_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases.GetKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "GetKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetKnowledgeBaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.KnowledgeBase", + "shortName": "get_knowledge_base" + }, + "description": "Sample for GetKnowledgeBase", + "file": "dialogflow_v2_generated_knowledge_bases_get_knowledge_base_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_KnowledgeBases_GetKnowledgeBase_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_knowledge_bases_get_knowledge_base_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesClient", + "shortName": "KnowledgeBasesClient" + }, + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesClient.get_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases.GetKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "GetKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetKnowledgeBaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.KnowledgeBase", + "shortName": "get_knowledge_base" + }, + "description": "Sample for GetKnowledgeBase", + "file": "dialogflow_v2_generated_knowledge_bases_get_knowledge_base_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_KnowledgeBases_GetKnowledgeBase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_knowledge_bases_get_knowledge_base_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesAsyncClient", + "shortName": "KnowledgeBasesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesAsyncClient.list_knowledge_bases", + "method": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases.ListKnowledgeBases", + "service": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "ListKnowledgeBases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListKnowledgeBasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.knowledge_bases.pagers.ListKnowledgeBasesAsyncPager", + "shortName": "list_knowledge_bases" + }, + "description": "Sample for ListKnowledgeBases", + "file": "dialogflow_v2_generated_knowledge_bases_list_knowledge_bases_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_KnowledgeBases_ListKnowledgeBases_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_knowledge_bases_list_knowledge_bases_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesClient", + "shortName": "KnowledgeBasesClient" + }, + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesClient.list_knowledge_bases", + "method": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases.ListKnowledgeBases", + "service": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "ListKnowledgeBases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListKnowledgeBasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.knowledge_bases.pagers.ListKnowledgeBasesPager", + "shortName": "list_knowledge_bases" + }, + "description": "Sample for ListKnowledgeBases", + "file": "dialogflow_v2_generated_knowledge_bases_list_knowledge_bases_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_KnowledgeBases_ListKnowledgeBases_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_knowledge_bases_list_knowledge_bases_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesAsyncClient", + "shortName": "KnowledgeBasesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesAsyncClient.update_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases.UpdateKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "UpdateKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateKnowledgeBaseRequest" + }, + { + "name": "knowledge_base", + "type": "google.cloud.dialogflow_v2.types.KnowledgeBase" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.KnowledgeBase", + "shortName": "update_knowledge_base" + }, + "description": "Sample for UpdateKnowledgeBase", + "file": "dialogflow_v2_generated_knowledge_bases_update_knowledge_base_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_KnowledgeBases_UpdateKnowledgeBase_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_knowledge_bases_update_knowledge_base_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesClient", + "shortName": "KnowledgeBasesClient" + }, + "fullName": "google.cloud.dialogflow_v2.KnowledgeBasesClient.update_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases.UpdateKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "UpdateKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateKnowledgeBaseRequest" + }, + { + "name": "knowledge_base", + "type": "google.cloud.dialogflow_v2.types.KnowledgeBase" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.KnowledgeBase", + "shortName": "update_knowledge_base" + }, + "description": "Sample for UpdateKnowledgeBase", + "file": "dialogflow_v2_generated_knowledge_bases_update_knowledge_base_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_KnowledgeBases_UpdateKnowledgeBase_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_knowledge_bases_update_knowledge_base_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient.analyze_content", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.AnalyzeContent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "AnalyzeContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.AnalyzeContentRequest" + }, + { + "name": "participant", + "type": "str" + }, + { + "name": "text_input", + "type": "google.cloud.dialogflow_v2.types.TextInput" + }, + { + "name": "event_input", + "type": "google.cloud.dialogflow_v2.types.EventInput" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.AnalyzeContentResponse", + "shortName": "analyze_content" + }, + "description": "Sample for AnalyzeContent", + "file": "dialogflow_v2_generated_participants_analyze_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_AnalyzeContent_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_analyze_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient.analyze_content", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.AnalyzeContent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "AnalyzeContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.AnalyzeContentRequest" + }, + { + "name": "participant", + "type": "str" + }, + { + "name": "text_input", + "type": "google.cloud.dialogflow_v2.types.TextInput" + }, + { + "name": "event_input", + "type": "google.cloud.dialogflow_v2.types.EventInput" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.AnalyzeContentResponse", + "shortName": "analyze_content" + }, + "description": "Sample for AnalyzeContent", + "file": "dialogflow_v2_generated_participants_analyze_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_AnalyzeContent_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_analyze_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient.create_participant", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.CreateParticipant", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "CreateParticipant" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateParticipantRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "participant", + "type": "google.cloud.dialogflow_v2.types.Participant" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Participant", + "shortName": "create_participant" + }, + "description": "Sample for CreateParticipant", + "file": "dialogflow_v2_generated_participants_create_participant_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_CreateParticipant_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_create_participant_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient.create_participant", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.CreateParticipant", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "CreateParticipant" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateParticipantRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "participant", + "type": "google.cloud.dialogflow_v2.types.Participant" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Participant", + "shortName": "create_participant" + }, + "description": "Sample for CreateParticipant", + "file": "dialogflow_v2_generated_participants_create_participant_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_CreateParticipant_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_create_participant_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient.get_participant", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.GetParticipant", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "GetParticipant" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetParticipantRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Participant", + "shortName": "get_participant" + }, + "description": "Sample for GetParticipant", + "file": "dialogflow_v2_generated_participants_get_participant_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_GetParticipant_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_get_participant_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient.get_participant", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.GetParticipant", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "GetParticipant" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetParticipantRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Participant", + "shortName": "get_participant" + }, + "description": "Sample for GetParticipant", + "file": "dialogflow_v2_generated_participants_get_participant_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_GetParticipant_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_get_participant_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient.list_participants", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.ListParticipants", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "ListParticipants" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListParticipantsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.participants.pagers.ListParticipantsAsyncPager", + "shortName": "list_participants" + }, + "description": "Sample for ListParticipants", + "file": "dialogflow_v2_generated_participants_list_participants_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_ListParticipants_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_list_participants_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient.list_participants", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.ListParticipants", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "ListParticipants" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListParticipantsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.participants.pagers.ListParticipantsPager", + "shortName": "list_participants" + }, + "description": "Sample for ListParticipants", + "file": "dialogflow_v2_generated_participants_list_participants_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_ListParticipants_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_list_participants_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient.streaming_analyze_content", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.StreamingAnalyzeContent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "StreamingAnalyzeContent" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.dialogflow_v2.types.StreamingAnalyzeContentRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.dialogflow_v2.types.StreamingAnalyzeContentResponse]", + "shortName": "streaming_analyze_content" + }, + "description": "Sample for StreamingAnalyzeContent", + "file": "dialogflow_v2_generated_participants_streaming_analyze_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_StreamingAnalyzeContent_async", + "segments": [ + { + "end": 69, + "start": 27, + "type": "FULL" + }, + { + "end": 69, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 62, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 65, + "start": 63, + "type": "REQUEST_EXECUTION" + }, + { + "end": 70, + "start": 66, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_streaming_analyze_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient.streaming_analyze_content", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.StreamingAnalyzeContent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "StreamingAnalyzeContent" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.dialogflow_v2.types.StreamingAnalyzeContentRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.dialogflow_v2.types.StreamingAnalyzeContentResponse]", + "shortName": "streaming_analyze_content" + }, + "description": "Sample for StreamingAnalyzeContent", + "file": "dialogflow_v2_generated_participants_streaming_analyze_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_StreamingAnalyzeContent_sync", + "segments": [ + { + "end": 69, + "start": 27, + "type": "FULL" + }, + { + "end": 69, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 62, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 65, + "start": 63, + "type": "REQUEST_EXECUTION" + }, + { + "end": 70, + "start": 66, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_streaming_analyze_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient.suggest_articles", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.SuggestArticles", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "SuggestArticles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SuggestArticlesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SuggestArticlesResponse", + "shortName": "suggest_articles" + }, + "description": "Sample for SuggestArticles", + "file": "dialogflow_v2_generated_participants_suggest_articles_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_SuggestArticles_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_suggest_articles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient.suggest_articles", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.SuggestArticles", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "SuggestArticles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SuggestArticlesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SuggestArticlesResponse", + "shortName": "suggest_articles" + }, + "description": "Sample for SuggestArticles", + "file": "dialogflow_v2_generated_participants_suggest_articles_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_SuggestArticles_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_suggest_articles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient.suggest_faq_answers", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.SuggestFaqAnswers", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "SuggestFaqAnswers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SuggestFaqAnswersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SuggestFaqAnswersResponse", + "shortName": "suggest_faq_answers" + }, + "description": "Sample for SuggestFaqAnswers", + "file": "dialogflow_v2_generated_participants_suggest_faq_answers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_SuggestFaqAnswers_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_suggest_faq_answers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient.suggest_faq_answers", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.SuggestFaqAnswers", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "SuggestFaqAnswers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SuggestFaqAnswersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SuggestFaqAnswersResponse", + "shortName": "suggest_faq_answers" + }, + "description": "Sample for SuggestFaqAnswers", + "file": "dialogflow_v2_generated_participants_suggest_faq_answers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_SuggestFaqAnswers_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_suggest_faq_answers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient.suggest_smart_replies", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.SuggestSmartReplies", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "SuggestSmartReplies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SuggestSmartRepliesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SuggestSmartRepliesResponse", + "shortName": "suggest_smart_replies" + }, + "description": "Sample for SuggestSmartReplies", + "file": "dialogflow_v2_generated_participants_suggest_smart_replies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_SuggestSmartReplies_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_suggest_smart_replies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient.suggest_smart_replies", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.SuggestSmartReplies", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "SuggestSmartReplies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.SuggestSmartRepliesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SuggestSmartRepliesResponse", + "shortName": "suggest_smart_replies" + }, + "description": "Sample for SuggestSmartReplies", + "file": "dialogflow_v2_generated_participants_suggest_smart_replies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_SuggestSmartReplies_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_suggest_smart_replies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsAsyncClient.update_participant", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.UpdateParticipant", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "UpdateParticipant" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateParticipantRequest" + }, + { + "name": "participant", + "type": "google.cloud.dialogflow_v2.types.Participant" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Participant", + "shortName": "update_participant" + }, + "description": "Sample for UpdateParticipant", + "file": "dialogflow_v2_generated_participants_update_participant_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_UpdateParticipant_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_update_participant_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2.ParticipantsClient.update_participant", + "method": { + "fullName": "google.cloud.dialogflow.v2.Participants.UpdateParticipant", + "service": { + "fullName": "google.cloud.dialogflow.v2.Participants", + "shortName": "Participants" + }, + "shortName": "UpdateParticipant" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateParticipantRequest" + }, + { + "name": "participant", + "type": "google.cloud.dialogflow_v2.types.Participant" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Participant", + "shortName": "update_participant" + }, + "description": "Sample for UpdateParticipant", + "file": "dialogflow_v2_generated_participants_update_participant_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Participants_UpdateParticipant_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_participants_update_participant_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesAsyncClient", + "shortName": "SessionEntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesAsyncClient.create_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes.CreateSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "CreateSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateSessionEntityTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "session_entity_type", + "type": "google.cloud.dialogflow_v2.types.SessionEntityType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SessionEntityType", + "shortName": "create_session_entity_type" + }, + "description": "Sample for CreateSessionEntityType", + "file": "dialogflow_v2_generated_session_entity_types_create_session_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_SessionEntityTypes_CreateSessionEntityType_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_session_entity_types_create_session_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesClient", + "shortName": "SessionEntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesClient.create_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes.CreateSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "CreateSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateSessionEntityTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "session_entity_type", + "type": "google.cloud.dialogflow_v2.types.SessionEntityType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SessionEntityType", + "shortName": "create_session_entity_type" + }, + "description": "Sample for CreateSessionEntityType", + "file": "dialogflow_v2_generated_session_entity_types_create_session_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_SessionEntityTypes_CreateSessionEntityType_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_session_entity_types_create_session_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesAsyncClient", + "shortName": "SessionEntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesAsyncClient.delete_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes.DeleteSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "DeleteSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteSessionEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_session_entity_type" + }, + "description": "Sample for DeleteSessionEntityType", + "file": "dialogflow_v2_generated_session_entity_types_delete_session_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_SessionEntityTypes_DeleteSessionEntityType_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_session_entity_types_delete_session_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesClient", + "shortName": "SessionEntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesClient.delete_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes.DeleteSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "DeleteSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteSessionEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_session_entity_type" + }, + "description": "Sample for DeleteSessionEntityType", + "file": "dialogflow_v2_generated_session_entity_types_delete_session_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_SessionEntityTypes_DeleteSessionEntityType_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_session_entity_types_delete_session_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesAsyncClient", + "shortName": "SessionEntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesAsyncClient.get_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes.GetSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "GetSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetSessionEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SessionEntityType", + "shortName": "get_session_entity_type" + }, + "description": "Sample for GetSessionEntityType", + "file": "dialogflow_v2_generated_session_entity_types_get_session_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_SessionEntityTypes_GetSessionEntityType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_session_entity_types_get_session_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesClient", + "shortName": "SessionEntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesClient.get_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes.GetSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "GetSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetSessionEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SessionEntityType", + "shortName": "get_session_entity_type" + }, + "description": "Sample for GetSessionEntityType", + "file": "dialogflow_v2_generated_session_entity_types_get_session_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_SessionEntityTypes_GetSessionEntityType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_session_entity_types_get_session_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesAsyncClient", + "shortName": "SessionEntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesAsyncClient.list_session_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes.ListSessionEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "ListSessionEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListSessionEntityTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.session_entity_types.pagers.ListSessionEntityTypesAsyncPager", + "shortName": "list_session_entity_types" + }, + "description": "Sample for ListSessionEntityTypes", + "file": "dialogflow_v2_generated_session_entity_types_list_session_entity_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_SessionEntityTypes_ListSessionEntityTypes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_session_entity_types_list_session_entity_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesClient", + "shortName": "SessionEntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesClient.list_session_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes.ListSessionEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "ListSessionEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListSessionEntityTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.session_entity_types.pagers.ListSessionEntityTypesPager", + "shortName": "list_session_entity_types" + }, + "description": "Sample for ListSessionEntityTypes", + "file": "dialogflow_v2_generated_session_entity_types_list_session_entity_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_SessionEntityTypes_ListSessionEntityTypes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_session_entity_types_list_session_entity_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesAsyncClient", + "shortName": "SessionEntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesAsyncClient.update_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes.UpdateSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "UpdateSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateSessionEntityTypeRequest" + }, + { + "name": "session_entity_type", + "type": "google.cloud.dialogflow_v2.types.SessionEntityType" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SessionEntityType", + "shortName": "update_session_entity_type" + }, + "description": "Sample for UpdateSessionEntityType", + "file": "dialogflow_v2_generated_session_entity_types_update_session_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_SessionEntityTypes_UpdateSessionEntityType_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_session_entity_types_update_session_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesClient", + "shortName": "SessionEntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2.SessionEntityTypesClient.update_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes.UpdateSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "UpdateSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateSessionEntityTypeRequest" + }, + { + "name": "session_entity_type", + "type": "google.cloud.dialogflow_v2.types.SessionEntityType" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.SessionEntityType", + "shortName": "update_session_entity_type" + }, + "description": "Sample for UpdateSessionEntityType", + "file": "dialogflow_v2_generated_session_entity_types_update_session_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_SessionEntityTypes_UpdateSessionEntityType_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_session_entity_types_update_session_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.SessionsAsyncClient", + "shortName": "SessionsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.SessionsAsyncClient.detect_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Sessions.DetectIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Sessions", + "shortName": "Sessions" + }, + "shortName": "DetectIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DetectIntentRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "query_input", + "type": "google.cloud.dialogflow_v2.types.QueryInput" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.DetectIntentResponse", + "shortName": "detect_intent" + }, + "description": "Sample for DetectIntent", + "file": "dialogflow_v2_generated_sessions_detect_intent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Sessions_DetectIntent_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_sessions_detect_intent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.SessionsClient", + "shortName": "SessionsClient" + }, + "fullName": "google.cloud.dialogflow_v2.SessionsClient.detect_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Sessions.DetectIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Sessions", + "shortName": "Sessions" + }, + "shortName": "DetectIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DetectIntentRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "query_input", + "type": "google.cloud.dialogflow_v2.types.QueryInput" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.DetectIntentResponse", + "shortName": "detect_intent" + }, + "description": "Sample for DetectIntent", + "file": "dialogflow_v2_generated_sessions_detect_intent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Sessions_DetectIntent_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_sessions_detect_intent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.SessionsAsyncClient", + "shortName": "SessionsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.SessionsAsyncClient.streaming_detect_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Sessions.StreamingDetectIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Sessions", + "shortName": "Sessions" + }, + "shortName": "StreamingDetectIntent" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.dialogflow_v2.types.StreamingDetectIntentRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.dialogflow_v2.types.StreamingDetectIntentResponse]", + "shortName": "streaming_detect_intent" + }, + "description": "Sample for StreamingDetectIntent", + "file": "dialogflow_v2_generated_sessions_streaming_detect_intent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Sessions_StreamingDetectIntent_async", + "segments": [ + { + "end": 68, + "start": 27, + "type": "FULL" + }, + { + "end": 68, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 61, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 64, + "start": 62, + "type": "REQUEST_EXECUTION" + }, + { + "end": 69, + "start": 65, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_sessions_streaming_detect_intent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.SessionsClient", + "shortName": "SessionsClient" + }, + "fullName": "google.cloud.dialogflow_v2.SessionsClient.streaming_detect_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2.Sessions.StreamingDetectIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2.Sessions", + "shortName": "Sessions" + }, + "shortName": "StreamingDetectIntent" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.dialogflow_v2.types.StreamingDetectIntentRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.dialogflow_v2.types.StreamingDetectIntentResponse]", + "shortName": "streaming_detect_intent" + }, + "description": "Sample for StreamingDetectIntent", + "file": "dialogflow_v2_generated_sessions_streaming_detect_intent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Sessions_StreamingDetectIntent_sync", + "segments": [ + { + "end": 68, + "start": 27, + "type": "FULL" + }, + { + "end": 68, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 61, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 64, + "start": 62, + "type": "REQUEST_EXECUTION" + }, + { + "end": 69, + "start": 65, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_sessions_streaming_detect_intent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.VersionsAsyncClient", + "shortName": "VersionsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.VersionsAsyncClient.create_version", + "method": { + "fullName": "google.cloud.dialogflow.v2.Versions.CreateVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2.Versions", + "shortName": "Versions" + }, + "shortName": "CreateVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateVersionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "version", + "type": "google.cloud.dialogflow_v2.types.Version" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Version", + "shortName": "create_version" + }, + "description": "Sample for CreateVersion", + "file": "dialogflow_v2_generated_versions_create_version_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Versions_CreateVersion_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_versions_create_version_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.VersionsClient", + "shortName": "VersionsClient" + }, + "fullName": "google.cloud.dialogflow_v2.VersionsClient.create_version", + "method": { + "fullName": "google.cloud.dialogflow.v2.Versions.CreateVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2.Versions", + "shortName": "Versions" + }, + "shortName": "CreateVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.CreateVersionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "version", + "type": "google.cloud.dialogflow_v2.types.Version" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Version", + "shortName": "create_version" + }, + "description": "Sample for CreateVersion", + "file": "dialogflow_v2_generated_versions_create_version_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Versions_CreateVersion_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_versions_create_version_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.VersionsAsyncClient", + "shortName": "VersionsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.VersionsAsyncClient.delete_version", + "method": { + "fullName": "google.cloud.dialogflow.v2.Versions.DeleteVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2.Versions", + "shortName": "Versions" + }, + "shortName": "DeleteVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteVersionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_version" + }, + "description": "Sample for DeleteVersion", + "file": "dialogflow_v2_generated_versions_delete_version_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Versions_DeleteVersion_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_versions_delete_version_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.VersionsClient", + "shortName": "VersionsClient" + }, + "fullName": "google.cloud.dialogflow_v2.VersionsClient.delete_version", + "method": { + "fullName": "google.cloud.dialogflow.v2.Versions.DeleteVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2.Versions", + "shortName": "Versions" + }, + "shortName": "DeleteVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.DeleteVersionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_version" + }, + "description": "Sample for DeleteVersion", + "file": "dialogflow_v2_generated_versions_delete_version_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Versions_DeleteVersion_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_versions_delete_version_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.VersionsAsyncClient", + "shortName": "VersionsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.VersionsAsyncClient.get_version", + "method": { + "fullName": "google.cloud.dialogflow.v2.Versions.GetVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2.Versions", + "shortName": "Versions" + }, + "shortName": "GetVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetVersionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Version", + "shortName": "get_version" + }, + "description": "Sample for GetVersion", + "file": "dialogflow_v2_generated_versions_get_version_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Versions_GetVersion_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_versions_get_version_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.VersionsClient", + "shortName": "VersionsClient" + }, + "fullName": "google.cloud.dialogflow_v2.VersionsClient.get_version", + "method": { + "fullName": "google.cloud.dialogflow.v2.Versions.GetVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2.Versions", + "shortName": "Versions" + }, + "shortName": "GetVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.GetVersionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Version", + "shortName": "get_version" + }, + "description": "Sample for GetVersion", + "file": "dialogflow_v2_generated_versions_get_version_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Versions_GetVersion_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_versions_get_version_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.VersionsAsyncClient", + "shortName": "VersionsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.VersionsAsyncClient.list_versions", + "method": { + "fullName": "google.cloud.dialogflow.v2.Versions.ListVersions", + "service": { + "fullName": "google.cloud.dialogflow.v2.Versions", + "shortName": "Versions" + }, + "shortName": "ListVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.versions.pagers.ListVersionsAsyncPager", + "shortName": "list_versions" + }, + "description": "Sample for ListVersions", + "file": "dialogflow_v2_generated_versions_list_versions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Versions_ListVersions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_versions_list_versions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.VersionsClient", + "shortName": "VersionsClient" + }, + "fullName": "google.cloud.dialogflow_v2.VersionsClient.list_versions", + "method": { + "fullName": "google.cloud.dialogflow.v2.Versions.ListVersions", + "service": { + "fullName": "google.cloud.dialogflow.v2.Versions", + "shortName": "Versions" + }, + "shortName": "ListVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.ListVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.services.versions.pagers.ListVersionsPager", + "shortName": "list_versions" + }, + "description": "Sample for ListVersions", + "file": "dialogflow_v2_generated_versions_list_versions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Versions_ListVersions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_versions_list_versions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2.VersionsAsyncClient", + "shortName": "VersionsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2.VersionsAsyncClient.update_version", + "method": { + "fullName": "google.cloud.dialogflow.v2.Versions.UpdateVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2.Versions", + "shortName": "Versions" + }, + "shortName": "UpdateVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateVersionRequest" + }, + { + "name": "version", + "type": "google.cloud.dialogflow_v2.types.Version" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Version", + "shortName": "update_version" + }, + "description": "Sample for UpdateVersion", + "file": "dialogflow_v2_generated_versions_update_version_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Versions_UpdateVersion_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_versions_update_version_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2.VersionsClient", + "shortName": "VersionsClient" + }, + "fullName": "google.cloud.dialogflow_v2.VersionsClient.update_version", + "method": { + "fullName": "google.cloud.dialogflow.v2.Versions.UpdateVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2.Versions", + "shortName": "Versions" + }, + "shortName": "UpdateVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2.types.UpdateVersionRequest" + }, + { + "name": "version", + "type": "google.cloud.dialogflow_v2.types.Version" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2.types.Version", + "shortName": "update_version" + }, + "description": "Sample for UpdateVersion", + "file": "dialogflow_v2_generated_versions_update_version_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2_generated_Versions_UpdateVersion_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2_generated_versions_update_version_sync.py" + } + ] +} diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json new file mode 100644 index 000000000000..11feb40f02d8 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json @@ -0,0 +1,15337 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.dialogflow.v2beta1", + "version": "v2beta1" + } + ], + "language": "PYTHON", + "name": "google-cloud-dialogflow", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient.delete_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.DeleteAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "DeleteAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_agent" + }, + "description": "Sample for DeleteAgent", + "file": "dialogflow_v2beta1_generated_agents_delete_agent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_DeleteAgent_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_delete_agent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient.delete_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.DeleteAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "DeleteAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_agent" + }, + "description": "Sample for DeleteAgent", + "file": "dialogflow_v2beta1_generated_agents_delete_agent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_DeleteAgent_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_delete_agent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient.export_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.ExportAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "ExportAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ExportAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_agent" + }, + "description": "Sample for ExportAgent", + "file": "dialogflow_v2beta1_generated_agents_export_agent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_ExportAgent_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_export_agent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient.export_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.ExportAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "ExportAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ExportAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_agent" + }, + "description": "Sample for ExportAgent", + "file": "dialogflow_v2beta1_generated_agents_export_agent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_ExportAgent_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_export_agent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient.get_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.GetAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "GetAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Agent", + "shortName": "get_agent" + }, + "description": "Sample for GetAgent", + "file": "dialogflow_v2beta1_generated_agents_get_agent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_GetAgent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_get_agent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient.get_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.GetAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "GetAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Agent", + "shortName": "get_agent" + }, + "description": "Sample for GetAgent", + "file": "dialogflow_v2beta1_generated_agents_get_agent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_GetAgent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_get_agent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient.get_validation_result", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.GetValidationResult", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "GetValidationResult" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetValidationResultRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.ValidationResult", + "shortName": "get_validation_result" + }, + "description": "Sample for GetValidationResult", + "file": "dialogflow_v2beta1_generated_agents_get_validation_result_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_GetValidationResult_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_get_validation_result_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient.get_validation_result", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.GetValidationResult", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "GetValidationResult" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetValidationResultRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.ValidationResult", + "shortName": "get_validation_result" + }, + "description": "Sample for GetValidationResult", + "file": "dialogflow_v2beta1_generated_agents_get_validation_result_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_GetValidationResult_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_get_validation_result_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient.import_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.ImportAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "ImportAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ImportAgentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_agent" + }, + "description": "Sample for ImportAgent", + "file": "dialogflow_v2beta1_generated_agents_import_agent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_ImportAgent_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_import_agent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient.import_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.ImportAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "ImportAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ImportAgentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_agent" + }, + "description": "Sample for ImportAgent", + "file": "dialogflow_v2beta1_generated_agents_import_agent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_ImportAgent_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_import_agent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient.restore_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.RestoreAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "RestoreAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.RestoreAgentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_agent" + }, + "description": "Sample for RestoreAgent", + "file": "dialogflow_v2beta1_generated_agents_restore_agent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_RestoreAgent_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_restore_agent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient.restore_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.RestoreAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "RestoreAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.RestoreAgentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_agent" + }, + "description": "Sample for RestoreAgent", + "file": "dialogflow_v2beta1_generated_agents_restore_agent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_RestoreAgent_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_restore_agent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient.search_agents", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.SearchAgents", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "SearchAgents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SearchAgentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.agents.pagers.SearchAgentsAsyncPager", + "shortName": "search_agents" + }, + "description": "Sample for SearchAgents", + "file": "dialogflow_v2beta1_generated_agents_search_agents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_SearchAgents_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_search_agents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient.search_agents", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.SearchAgents", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "SearchAgents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SearchAgentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.agents.pagers.SearchAgentsPager", + "shortName": "search_agents" + }, + "description": "Sample for SearchAgents", + "file": "dialogflow_v2beta1_generated_agents_search_agents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_SearchAgents_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_search_agents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient.set_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.SetAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "SetAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SetAgentRequest" + }, + { + "name": "agent", + "type": "google.cloud.dialogflow_v2beta1.types.Agent" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Agent", + "shortName": "set_agent" + }, + "description": "Sample for SetAgent", + "file": "dialogflow_v2beta1_generated_agents_set_agent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_SetAgent_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_set_agent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient.set_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.SetAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "SetAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SetAgentRequest" + }, + { + "name": "agent", + "type": "google.cloud.dialogflow_v2beta1.types.Agent" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Agent", + "shortName": "set_agent" + }, + "description": "Sample for SetAgent", + "file": "dialogflow_v2beta1_generated_agents_set_agent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_SetAgent_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_set_agent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient", + "shortName": "AgentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsAsyncClient.train_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.TrainAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "TrainAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.TrainAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "train_agent" + }, + "description": "Sample for TrainAgent", + "file": "dialogflow_v2beta1_generated_agents_train_agent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_TrainAgent_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_train_agent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient", + "shortName": "AgentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AgentsClient.train_agent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents.TrainAgent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Agents", + "shortName": "Agents" + }, + "shortName": "TrainAgent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.TrainAgentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "train_agent" + }, + "description": "Sample for TrainAgent", + "file": "dialogflow_v2beta1_generated_agents_train_agent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Agents_TrainAgent_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_agents_train_agent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AnswerRecordsAsyncClient", + "shortName": "AnswerRecordsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AnswerRecordsAsyncClient.get_answer_record", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.AnswerRecords.GetAnswerRecord", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.AnswerRecords", + "shortName": "AnswerRecords" + }, + "shortName": "GetAnswerRecord" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetAnswerRecordRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.AnswerRecord", + "shortName": "get_answer_record" + }, + "description": "Sample for GetAnswerRecord", + "file": "dialogflow_v2beta1_generated_answer_records_get_answer_record_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_AnswerRecords_GetAnswerRecord_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_answer_records_get_answer_record_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AnswerRecordsClient", + "shortName": "AnswerRecordsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AnswerRecordsClient.get_answer_record", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.AnswerRecords.GetAnswerRecord", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.AnswerRecords", + "shortName": "AnswerRecords" + }, + "shortName": "GetAnswerRecord" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetAnswerRecordRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.AnswerRecord", + "shortName": "get_answer_record" + }, + "description": "Sample for GetAnswerRecord", + "file": "dialogflow_v2beta1_generated_answer_records_get_answer_record_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_AnswerRecords_GetAnswerRecord_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_answer_records_get_answer_record_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AnswerRecordsAsyncClient", + "shortName": "AnswerRecordsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AnswerRecordsAsyncClient.list_answer_records", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.AnswerRecords.ListAnswerRecords", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.AnswerRecords", + "shortName": "AnswerRecords" + }, + "shortName": "ListAnswerRecords" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListAnswerRecordsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.answer_records.pagers.ListAnswerRecordsAsyncPager", + "shortName": "list_answer_records" + }, + "description": "Sample for ListAnswerRecords", + "file": "dialogflow_v2beta1_generated_answer_records_list_answer_records_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_AnswerRecords_ListAnswerRecords_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_answer_records_list_answer_records_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AnswerRecordsClient", + "shortName": "AnswerRecordsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AnswerRecordsClient.list_answer_records", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.AnswerRecords.ListAnswerRecords", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.AnswerRecords", + "shortName": "AnswerRecords" + }, + "shortName": "ListAnswerRecords" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListAnswerRecordsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.answer_records.pagers.ListAnswerRecordsPager", + "shortName": "list_answer_records" + }, + "description": "Sample for ListAnswerRecords", + "file": "dialogflow_v2beta1_generated_answer_records_list_answer_records_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_AnswerRecords_ListAnswerRecords_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_answer_records_list_answer_records_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AnswerRecordsAsyncClient", + "shortName": "AnswerRecordsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AnswerRecordsAsyncClient.update_answer_record", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.AnswerRecords.UpdateAnswerRecord", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.AnswerRecords", + "shortName": "AnswerRecords" + }, + "shortName": "UpdateAnswerRecord" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateAnswerRecordRequest" + }, + { + "name": "answer_record", + "type": "google.cloud.dialogflow_v2beta1.types.AnswerRecord" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.AnswerRecord", + "shortName": "update_answer_record" + }, + "description": "Sample for UpdateAnswerRecord", + "file": "dialogflow_v2beta1_generated_answer_records_update_answer_record_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_AnswerRecords_UpdateAnswerRecord_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_answer_records_update_answer_record_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.AnswerRecordsClient", + "shortName": "AnswerRecordsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.AnswerRecordsClient.update_answer_record", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.AnswerRecords.UpdateAnswerRecord", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.AnswerRecords", + "shortName": "AnswerRecords" + }, + "shortName": "UpdateAnswerRecord" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateAnswerRecordRequest" + }, + { + "name": "answer_record", + "type": "google.cloud.dialogflow_v2beta1.types.AnswerRecord" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.AnswerRecord", + "shortName": "update_answer_record" + }, + "description": "Sample for UpdateAnswerRecord", + "file": "dialogflow_v2beta1_generated_answer_records_update_answer_record_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_AnswerRecords_UpdateAnswerRecord_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_answer_records_update_answer_record_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ContextsAsyncClient", + "shortName": "ContextsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ContextsAsyncClient.create_context", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts.CreateContext", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts", + "shortName": "Contexts" + }, + "shortName": "CreateContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateContextRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "context", + "type": "google.cloud.dialogflow_v2beta1.types.Context" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Context", + "shortName": "create_context" + }, + "description": "Sample for CreateContext", + "file": "dialogflow_v2beta1_generated_contexts_create_context_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Contexts_CreateContext_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_contexts_create_context_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ContextsClient", + "shortName": "ContextsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ContextsClient.create_context", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts.CreateContext", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts", + "shortName": "Contexts" + }, + "shortName": "CreateContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateContextRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "context", + "type": "google.cloud.dialogflow_v2beta1.types.Context" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Context", + "shortName": "create_context" + }, + "description": "Sample for CreateContext", + "file": "dialogflow_v2beta1_generated_contexts_create_context_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Contexts_CreateContext_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_contexts_create_context_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ContextsAsyncClient", + "shortName": "ContextsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ContextsAsyncClient.delete_all_contexts", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts.DeleteAllContexts", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts", + "shortName": "Contexts" + }, + "shortName": "DeleteAllContexts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteAllContextsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_all_contexts" + }, + "description": "Sample for DeleteAllContexts", + "file": "dialogflow_v2beta1_generated_contexts_delete_all_contexts_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Contexts_DeleteAllContexts_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_contexts_delete_all_contexts_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ContextsClient", + "shortName": "ContextsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ContextsClient.delete_all_contexts", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts.DeleteAllContexts", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts", + "shortName": "Contexts" + }, + "shortName": "DeleteAllContexts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteAllContextsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_all_contexts" + }, + "description": "Sample for DeleteAllContexts", + "file": "dialogflow_v2beta1_generated_contexts_delete_all_contexts_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Contexts_DeleteAllContexts_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_contexts_delete_all_contexts_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ContextsAsyncClient", + "shortName": "ContextsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ContextsAsyncClient.delete_context", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts.DeleteContext", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts", + "shortName": "Contexts" + }, + "shortName": "DeleteContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteContextRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_context" + }, + "description": "Sample for DeleteContext", + "file": "dialogflow_v2beta1_generated_contexts_delete_context_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Contexts_DeleteContext_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_contexts_delete_context_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ContextsClient", + "shortName": "ContextsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ContextsClient.delete_context", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts.DeleteContext", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts", + "shortName": "Contexts" + }, + "shortName": "DeleteContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteContextRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_context" + }, + "description": "Sample for DeleteContext", + "file": "dialogflow_v2beta1_generated_contexts_delete_context_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Contexts_DeleteContext_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_contexts_delete_context_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ContextsAsyncClient", + "shortName": "ContextsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ContextsAsyncClient.get_context", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts.GetContext", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts", + "shortName": "Contexts" + }, + "shortName": "GetContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetContextRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Context", + "shortName": "get_context" + }, + "description": "Sample for GetContext", + "file": "dialogflow_v2beta1_generated_contexts_get_context_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Contexts_GetContext_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_contexts_get_context_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ContextsClient", + "shortName": "ContextsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ContextsClient.get_context", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts.GetContext", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts", + "shortName": "Contexts" + }, + "shortName": "GetContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetContextRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Context", + "shortName": "get_context" + }, + "description": "Sample for GetContext", + "file": "dialogflow_v2beta1_generated_contexts_get_context_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Contexts_GetContext_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_contexts_get_context_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ContextsAsyncClient", + "shortName": "ContextsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ContextsAsyncClient.list_contexts", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts.ListContexts", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts", + "shortName": "Contexts" + }, + "shortName": "ListContexts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListContextsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.contexts.pagers.ListContextsAsyncPager", + "shortName": "list_contexts" + }, + "description": "Sample for ListContexts", + "file": "dialogflow_v2beta1_generated_contexts_list_contexts_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Contexts_ListContexts_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_contexts_list_contexts_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ContextsClient", + "shortName": "ContextsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ContextsClient.list_contexts", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts.ListContexts", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts", + "shortName": "Contexts" + }, + "shortName": "ListContexts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListContextsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.contexts.pagers.ListContextsPager", + "shortName": "list_contexts" + }, + "description": "Sample for ListContexts", + "file": "dialogflow_v2beta1_generated_contexts_list_contexts_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Contexts_ListContexts_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_contexts_list_contexts_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ContextsAsyncClient", + "shortName": "ContextsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ContextsAsyncClient.update_context", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts.UpdateContext", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts", + "shortName": "Contexts" + }, + "shortName": "UpdateContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateContextRequest" + }, + { + "name": "context", + "type": "google.cloud.dialogflow_v2beta1.types.Context" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Context", + "shortName": "update_context" + }, + "description": "Sample for UpdateContext", + "file": "dialogflow_v2beta1_generated_contexts_update_context_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Contexts_UpdateContext_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_contexts_update_context_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ContextsClient", + "shortName": "ContextsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ContextsClient.update_context", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts.UpdateContext", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Contexts", + "shortName": "Contexts" + }, + "shortName": "UpdateContext" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateContextRequest" + }, + { + "name": "context", + "type": "google.cloud.dialogflow_v2beta1.types.Context" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Context", + "shortName": "update_context" + }, + "description": "Sample for UpdateContext", + "file": "dialogflow_v2beta1_generated_contexts_update_context_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Contexts_UpdateContext_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_contexts_update_context_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesAsyncClient", + "shortName": "ConversationProfilesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesAsyncClient.clear_suggestion_feature_config", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles.ClearSuggestionFeatureConfig", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "ClearSuggestionFeatureConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ClearSuggestionFeatureConfigRequest" + }, + { + "name": "conversation_profile", + "type": "str" + }, + { + "name": "participant_role", + "type": "google.cloud.dialogflow_v2beta1.types.Participant.Role" + }, + { + "name": "suggestion_feature_type", + "type": "google.cloud.dialogflow_v2beta1.types.SuggestionFeature.Type" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "clear_suggestion_feature_config" + }, + "description": "Sample for ClearSuggestionFeatureConfig", + "file": "dialogflow_v2beta1_generated_conversation_profiles_clear_suggestion_feature_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_ConversationProfiles_ClearSuggestionFeatureConfig_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversation_profiles_clear_suggestion_feature_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesClient", + "shortName": "ConversationProfilesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesClient.clear_suggestion_feature_config", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles.ClearSuggestionFeatureConfig", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "ClearSuggestionFeatureConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ClearSuggestionFeatureConfigRequest" + }, + { + "name": "conversation_profile", + "type": "str" + }, + { + "name": "participant_role", + "type": "google.cloud.dialogflow_v2beta1.types.Participant.Role" + }, + { + "name": "suggestion_feature_type", + "type": "google.cloud.dialogflow_v2beta1.types.SuggestionFeature.Type" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "clear_suggestion_feature_config" + }, + "description": "Sample for ClearSuggestionFeatureConfig", + "file": "dialogflow_v2beta1_generated_conversation_profiles_clear_suggestion_feature_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_ConversationProfiles_ClearSuggestionFeatureConfig_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversation_profiles_clear_suggestion_feature_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesAsyncClient", + "shortName": "ConversationProfilesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesAsyncClient.create_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles.CreateConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "CreateConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateConversationProfileRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversation_profile", + "type": "google.cloud.dialogflow_v2beta1.types.ConversationProfile" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.ConversationProfile", + "shortName": "create_conversation_profile" + }, + "description": "Sample for CreateConversationProfile", + "file": "dialogflow_v2beta1_generated_conversation_profiles_create_conversation_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_ConversationProfiles_CreateConversationProfile_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversation_profiles_create_conversation_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesClient", + "shortName": "ConversationProfilesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesClient.create_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles.CreateConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "CreateConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateConversationProfileRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversation_profile", + "type": "google.cloud.dialogflow_v2beta1.types.ConversationProfile" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.ConversationProfile", + "shortName": "create_conversation_profile" + }, + "description": "Sample for CreateConversationProfile", + "file": "dialogflow_v2beta1_generated_conversation_profiles_create_conversation_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_ConversationProfiles_CreateConversationProfile_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversation_profiles_create_conversation_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesAsyncClient", + "shortName": "ConversationProfilesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesAsyncClient.delete_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles.DeleteConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "DeleteConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteConversationProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_conversation_profile" + }, + "description": "Sample for DeleteConversationProfile", + "file": "dialogflow_v2beta1_generated_conversation_profiles_delete_conversation_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_ConversationProfiles_DeleteConversationProfile_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversation_profiles_delete_conversation_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesClient", + "shortName": "ConversationProfilesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesClient.delete_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles.DeleteConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "DeleteConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteConversationProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_conversation_profile" + }, + "description": "Sample for DeleteConversationProfile", + "file": "dialogflow_v2beta1_generated_conversation_profiles_delete_conversation_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_ConversationProfiles_DeleteConversationProfile_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversation_profiles_delete_conversation_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesAsyncClient", + "shortName": "ConversationProfilesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesAsyncClient.get_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles.GetConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "GetConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetConversationProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.ConversationProfile", + "shortName": "get_conversation_profile" + }, + "description": "Sample for GetConversationProfile", + "file": "dialogflow_v2beta1_generated_conversation_profiles_get_conversation_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_ConversationProfiles_GetConversationProfile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversation_profiles_get_conversation_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesClient", + "shortName": "ConversationProfilesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesClient.get_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles.GetConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "GetConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetConversationProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.ConversationProfile", + "shortName": "get_conversation_profile" + }, + "description": "Sample for GetConversationProfile", + "file": "dialogflow_v2beta1_generated_conversation_profiles_get_conversation_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_ConversationProfiles_GetConversationProfile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversation_profiles_get_conversation_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesAsyncClient", + "shortName": "ConversationProfilesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesAsyncClient.list_conversation_profiles", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles.ListConversationProfiles", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "ListConversationProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListConversationProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.conversation_profiles.pagers.ListConversationProfilesAsyncPager", + "shortName": "list_conversation_profiles" + }, + "description": "Sample for ListConversationProfiles", + "file": "dialogflow_v2beta1_generated_conversation_profiles_list_conversation_profiles_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_ConversationProfiles_ListConversationProfiles_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversation_profiles_list_conversation_profiles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesClient", + "shortName": "ConversationProfilesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesClient.list_conversation_profiles", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles.ListConversationProfiles", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "ListConversationProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListConversationProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.conversation_profiles.pagers.ListConversationProfilesPager", + "shortName": "list_conversation_profiles" + }, + "description": "Sample for ListConversationProfiles", + "file": "dialogflow_v2beta1_generated_conversation_profiles_list_conversation_profiles_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_ConversationProfiles_ListConversationProfiles_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversation_profiles_list_conversation_profiles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesAsyncClient", + "shortName": "ConversationProfilesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesAsyncClient.set_suggestion_feature_config", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles.SetSuggestionFeatureConfig", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "SetSuggestionFeatureConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SetSuggestionFeatureConfigRequest" + }, + { + "name": "conversation_profile", + "type": "str" + }, + { + "name": "participant_role", + "type": "google.cloud.dialogflow_v2beta1.types.Participant.Role" + }, + { + "name": "suggestion_feature_config", + "type": "google.cloud.dialogflow_v2beta1.types.HumanAgentAssistantConfig.SuggestionFeatureConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "set_suggestion_feature_config" + }, + "description": "Sample for SetSuggestionFeatureConfig", + "file": "dialogflow_v2beta1_generated_conversation_profiles_set_suggestion_feature_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_ConversationProfiles_SetSuggestionFeatureConfig_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversation_profiles_set_suggestion_feature_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesClient", + "shortName": "ConversationProfilesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesClient.set_suggestion_feature_config", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles.SetSuggestionFeatureConfig", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "SetSuggestionFeatureConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SetSuggestionFeatureConfigRequest" + }, + { + "name": "conversation_profile", + "type": "str" + }, + { + "name": "participant_role", + "type": "google.cloud.dialogflow_v2beta1.types.Participant.Role" + }, + { + "name": "suggestion_feature_config", + "type": "google.cloud.dialogflow_v2beta1.types.HumanAgentAssistantConfig.SuggestionFeatureConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "set_suggestion_feature_config" + }, + "description": "Sample for SetSuggestionFeatureConfig", + "file": "dialogflow_v2beta1_generated_conversation_profiles_set_suggestion_feature_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_ConversationProfiles_SetSuggestionFeatureConfig_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversation_profiles_set_suggestion_feature_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesAsyncClient", + "shortName": "ConversationProfilesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesAsyncClient.update_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles.UpdateConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "UpdateConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateConversationProfileRequest" + }, + { + "name": "conversation_profile", + "type": "google.cloud.dialogflow_v2beta1.types.ConversationProfile" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.ConversationProfile", + "shortName": "update_conversation_profile" + }, + "description": "Sample for UpdateConversationProfile", + "file": "dialogflow_v2beta1_generated_conversation_profiles_update_conversation_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_ConversationProfiles_UpdateConversationProfile_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversation_profiles_update_conversation_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesClient", + "shortName": "ConversationProfilesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationProfilesClient.update_conversation_profile", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles.UpdateConversationProfile", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.ConversationProfiles", + "shortName": "ConversationProfiles" + }, + "shortName": "UpdateConversationProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateConversationProfileRequest" + }, + { + "name": "conversation_profile", + "type": "google.cloud.dialogflow_v2beta1.types.ConversationProfile" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.ConversationProfile", + "shortName": "update_conversation_profile" + }, + "description": "Sample for UpdateConversationProfile", + "file": "dialogflow_v2beta1_generated_conversation_profiles_update_conversation_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_ConversationProfiles_UpdateConversationProfile_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversation_profiles_update_conversation_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient.batch_create_messages", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.BatchCreateMessages", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "BatchCreateMessages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchCreateMessagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.BatchCreateMessagesResponse", + "shortName": "batch_create_messages" + }, + "description": "Sample for BatchCreateMessages", + "file": "dialogflow_v2beta1_generated_conversations_batch_create_messages_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_BatchCreateMessages_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_batch_create_messages_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient.batch_create_messages", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.BatchCreateMessages", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "BatchCreateMessages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchCreateMessagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.BatchCreateMessagesResponse", + "shortName": "batch_create_messages" + }, + "description": "Sample for BatchCreateMessages", + "file": "dialogflow_v2beta1_generated_conversations_batch_create_messages_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_BatchCreateMessages_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_batch_create_messages_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient.complete_conversation", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.CompleteConversation", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "CompleteConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CompleteConversationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Conversation", + "shortName": "complete_conversation" + }, + "description": "Sample for CompleteConversation", + "file": "dialogflow_v2beta1_generated_conversations_complete_conversation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_CompleteConversation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_complete_conversation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient.complete_conversation", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.CompleteConversation", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "CompleteConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CompleteConversationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Conversation", + "shortName": "complete_conversation" + }, + "description": "Sample for CompleteConversation", + "file": "dialogflow_v2beta1_generated_conversations_complete_conversation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_CompleteConversation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_complete_conversation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient.create_conversation", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.CreateConversation", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "CreateConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateConversationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversation", + "type": "google.cloud.dialogflow_v2beta1.types.Conversation" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Conversation", + "shortName": "create_conversation" + }, + "description": "Sample for CreateConversation", + "file": "dialogflow_v2beta1_generated_conversations_create_conversation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_CreateConversation_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_create_conversation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient.create_conversation", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.CreateConversation", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "CreateConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateConversationRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversation", + "type": "google.cloud.dialogflow_v2beta1.types.Conversation" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Conversation", + "shortName": "create_conversation" + }, + "description": "Sample for CreateConversation", + "file": "dialogflow_v2beta1_generated_conversations_create_conversation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_CreateConversation_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_create_conversation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient.generate_stateless_summary", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.GenerateStatelessSummary", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "GenerateStatelessSummary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GenerateStatelessSummaryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.GenerateStatelessSummaryResponse", + "shortName": "generate_stateless_summary" + }, + "description": "Sample for GenerateStatelessSummary", + "file": "dialogflow_v2beta1_generated_conversations_generate_stateless_summary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_GenerateStatelessSummary_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_generate_stateless_summary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient.generate_stateless_summary", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.GenerateStatelessSummary", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "GenerateStatelessSummary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GenerateStatelessSummaryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.GenerateStatelessSummaryResponse", + "shortName": "generate_stateless_summary" + }, + "description": "Sample for GenerateStatelessSummary", + "file": "dialogflow_v2beta1_generated_conversations_generate_stateless_summary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_GenerateStatelessSummary_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_generate_stateless_summary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient.get_conversation", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.GetConversation", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "GetConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetConversationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Conversation", + "shortName": "get_conversation" + }, + "description": "Sample for GetConversation", + "file": "dialogflow_v2beta1_generated_conversations_get_conversation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_GetConversation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_get_conversation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient.get_conversation", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.GetConversation", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "GetConversation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetConversationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Conversation", + "shortName": "get_conversation" + }, + "description": "Sample for GetConversation", + "file": "dialogflow_v2beta1_generated_conversations_get_conversation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_GetConversation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_get_conversation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient.list_conversations", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.ListConversations", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "ListConversations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListConversationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.conversations.pagers.ListConversationsAsyncPager", + "shortName": "list_conversations" + }, + "description": "Sample for ListConversations", + "file": "dialogflow_v2beta1_generated_conversations_list_conversations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_ListConversations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_list_conversations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient.list_conversations", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.ListConversations", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "ListConversations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListConversationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.conversations.pagers.ListConversationsPager", + "shortName": "list_conversations" + }, + "description": "Sample for ListConversations", + "file": "dialogflow_v2beta1_generated_conversations_list_conversations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_ListConversations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_list_conversations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient.list_messages", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.ListMessages", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "ListMessages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListMessagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.conversations.pagers.ListMessagesAsyncPager", + "shortName": "list_messages" + }, + "description": "Sample for ListMessages", + "file": "dialogflow_v2beta1_generated_conversations_list_messages_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_ListMessages_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_list_messages_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient.list_messages", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.ListMessages", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "ListMessages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListMessagesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.conversations.pagers.ListMessagesPager", + "shortName": "list_messages" + }, + "description": "Sample for ListMessages", + "file": "dialogflow_v2beta1_generated_conversations_list_messages_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_ListMessages_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_list_messages_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient.search_knowledge", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.SearchKnowledge", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "SearchKnowledge" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SearchKnowledgeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SearchKnowledgeResponse", + "shortName": "search_knowledge" + }, + "description": "Sample for SearchKnowledge", + "file": "dialogflow_v2beta1_generated_conversations_search_knowledge_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_SearchKnowledge_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_search_knowledge_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient.search_knowledge", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.SearchKnowledge", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "SearchKnowledge" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SearchKnowledgeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SearchKnowledgeResponse", + "shortName": "search_knowledge" + }, + "description": "Sample for SearchKnowledge", + "file": "dialogflow_v2beta1_generated_conversations_search_knowledge_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_SearchKnowledge_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_search_knowledge_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient", + "shortName": "ConversationsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsAsyncClient.suggest_conversation_summary", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.SuggestConversationSummary", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "SuggestConversationSummary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SuggestConversationSummaryRequest" + }, + { + "name": "conversation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SuggestConversationSummaryResponse", + "shortName": "suggest_conversation_summary" + }, + "description": "Sample for SuggestConversationSummary", + "file": "dialogflow_v2beta1_generated_conversations_suggest_conversation_summary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_SuggestConversationSummary_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_suggest_conversation_summary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient", + "shortName": "ConversationsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ConversationsClient.suggest_conversation_summary", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations.SuggestConversationSummary", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Conversations", + "shortName": "Conversations" + }, + "shortName": "SuggestConversationSummary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SuggestConversationSummaryRequest" + }, + { + "name": "conversation", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SuggestConversationSummaryResponse", + "shortName": "suggest_conversation_summary" + }, + "description": "Sample for SuggestConversationSummary", + "file": "dialogflow_v2beta1_generated_conversations_suggest_conversation_summary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Conversations_SuggestConversationSummary_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_conversations_suggest_conversation_summary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsAsyncClient.create_document", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents.CreateDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents", + "shortName": "Documents" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.cloud.dialogflow_v2beta1.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "dialogflow_v2beta1_generated_documents_create_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Documents_CreateDocument_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_documents_create_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsClient.create_document", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents.CreateDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents", + "shortName": "Documents" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.cloud.dialogflow_v2beta1.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "dialogflow_v2beta1_generated_documents_create_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Documents_CreateDocument_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_documents_create_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsAsyncClient.delete_document", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents.DeleteDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents", + "shortName": "Documents" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "dialogflow_v2beta1_generated_documents_delete_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Documents_DeleteDocument_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_documents_delete_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsClient.delete_document", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents.DeleteDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents", + "shortName": "Documents" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "dialogflow_v2beta1_generated_documents_delete_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Documents_DeleteDocument_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_documents_delete_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsAsyncClient.get_document", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents.GetDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents", + "shortName": "Documents" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "dialogflow_v2beta1_generated_documents_get_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Documents_GetDocument_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_documents_get_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsClient.get_document", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents.GetDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents", + "shortName": "Documents" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "dialogflow_v2beta1_generated_documents_get_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Documents_GetDocument_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_documents_get_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsAsyncClient.import_documents", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents.ImportDocuments", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents", + "shortName": "Documents" + }, + "shortName": "ImportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ImportDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_documents" + }, + "description": "Sample for ImportDocuments", + "file": "dialogflow_v2beta1_generated_documents_import_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Documents_ImportDocuments_async", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_documents_import_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsClient.import_documents", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents.ImportDocuments", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents", + "shortName": "Documents" + }, + "shortName": "ImportDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ImportDocumentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_documents" + }, + "description": "Sample for ImportDocuments", + "file": "dialogflow_v2beta1_generated_documents_import_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Documents_ImportDocuments_sync", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_documents_import_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsAsyncClient.list_documents", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents.ListDocuments", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents", + "shortName": "Documents" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.documents.pagers.ListDocumentsAsyncPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "dialogflow_v2beta1_generated_documents_list_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Documents_ListDocuments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_documents_list_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsClient.list_documents", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents.ListDocuments", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents", + "shortName": "Documents" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.documents.pagers.ListDocumentsPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "dialogflow_v2beta1_generated_documents_list_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Documents_ListDocuments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_documents_list_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsAsyncClient.reload_document", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents.ReloadDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents", + "shortName": "Documents" + }, + "shortName": "ReloadDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ReloadDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "gcs_source", + "type": "google.cloud.dialogflow_v2beta1.types.GcsSource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "reload_document" + }, + "description": "Sample for ReloadDocument", + "file": "dialogflow_v2beta1_generated_documents_reload_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Documents_ReloadDocument_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_documents_reload_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsClient.reload_document", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents.ReloadDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents", + "shortName": "Documents" + }, + "shortName": "ReloadDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ReloadDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "gcs_source", + "type": "google.cloud.dialogflow_v2beta1.types.GcsSource" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "reload_document" + }, + "description": "Sample for ReloadDocument", + "file": "dialogflow_v2beta1_generated_documents_reload_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Documents_ReloadDocument_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_documents_reload_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsAsyncClient", + "shortName": "DocumentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsAsyncClient.update_document", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents.UpdateDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents", + "shortName": "Documents" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateDocumentRequest" + }, + { + "name": "document", + "type": "google.cloud.dialogflow_v2beta1.types.Document" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "dialogflow_v2beta1_generated_documents_update_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Documents_UpdateDocument_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_documents_update_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsClient", + "shortName": "DocumentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.DocumentsClient.update_document", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents.UpdateDocument", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Documents", + "shortName": "Documents" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateDocumentRequest" + }, + { + "name": "document", + "type": "google.cloud.dialogflow_v2beta1.types.Document" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "dialogflow_v2beta1_generated_documents_update_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Documents_UpdateDocument_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_documents_update_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient.batch_create_entities", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.BatchCreateEntities", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchCreateEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchCreateEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entities", + "type": "MutableSequence[google.cloud.dialogflow_v2beta1.types.EntityType.Entity]" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_create_entities" + }, + "description": "Sample for BatchCreateEntities", + "file": "dialogflow_v2beta1_generated_entity_types_batch_create_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_BatchCreateEntities_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_batch_create_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient.batch_create_entities", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.BatchCreateEntities", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchCreateEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchCreateEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entities", + "type": "MutableSequence[google.cloud.dialogflow_v2beta1.types.EntityType.Entity]" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_create_entities" + }, + "description": "Sample for BatchCreateEntities", + "file": "dialogflow_v2beta1_generated_entity_types_batch_create_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_BatchCreateEntities_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_batch_create_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient.batch_delete_entities", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.BatchDeleteEntities", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchDeleteEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchDeleteEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity_values", + "type": "MutableSequence[str]" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_delete_entities" + }, + "description": "Sample for BatchDeleteEntities", + "file": "dialogflow_v2beta1_generated_entity_types_batch_delete_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_BatchDeleteEntities_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_batch_delete_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient.batch_delete_entities", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.BatchDeleteEntities", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchDeleteEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchDeleteEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity_values", + "type": "MutableSequence[str]" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_delete_entities" + }, + "description": "Sample for BatchDeleteEntities", + "file": "dialogflow_v2beta1_generated_entity_types_batch_delete_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_BatchDeleteEntities_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_batch_delete_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient.batch_delete_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.BatchDeleteEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchDeleteEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchDeleteEntityTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity_type_names", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_delete_entity_types" + }, + "description": "Sample for BatchDeleteEntityTypes", + "file": "dialogflow_v2beta1_generated_entity_types_batch_delete_entity_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_BatchDeleteEntityTypes_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_batch_delete_entity_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient.batch_delete_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.BatchDeleteEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchDeleteEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchDeleteEntityTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity_type_names", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_delete_entity_types" + }, + "description": "Sample for BatchDeleteEntityTypes", + "file": "dialogflow_v2beta1_generated_entity_types_batch_delete_entity_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_BatchDeleteEntityTypes_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_batch_delete_entity_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient.batch_update_entities", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.BatchUpdateEntities", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchUpdateEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchUpdateEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entities", + "type": "MutableSequence[google.cloud.dialogflow_v2beta1.types.EntityType.Entity]" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_update_entities" + }, + "description": "Sample for BatchUpdateEntities", + "file": "dialogflow_v2beta1_generated_entity_types_batch_update_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_BatchUpdateEntities_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_batch_update_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient.batch_update_entities", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.BatchUpdateEntities", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchUpdateEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchUpdateEntitiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entities", + "type": "MutableSequence[google.cloud.dialogflow_v2beta1.types.EntityType.Entity]" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_update_entities" + }, + "description": "Sample for BatchUpdateEntities", + "file": "dialogflow_v2beta1_generated_entity_types_batch_update_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_BatchUpdateEntities_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_batch_update_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient.batch_update_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.BatchUpdateEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchUpdateEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchUpdateEntityTypesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_update_entity_types" + }, + "description": "Sample for BatchUpdateEntityTypes", + "file": "dialogflow_v2beta1_generated_entity_types_batch_update_entity_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_BatchUpdateEntityTypes_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_batch_update_entity_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient.batch_update_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.BatchUpdateEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "BatchUpdateEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchUpdateEntityTypesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_update_entity_types" + }, + "description": "Sample for BatchUpdateEntityTypes", + "file": "dialogflow_v2beta1_generated_entity_types_batch_update_entity_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_BatchUpdateEntityTypes_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_batch_update_entity_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient.create_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.CreateEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "CreateEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateEntityTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity_type", + "type": "google.cloud.dialogflow_v2beta1.types.EntityType" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.EntityType", + "shortName": "create_entity_type" + }, + "description": "Sample for CreateEntityType", + "file": "dialogflow_v2beta1_generated_entity_types_create_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_CreateEntityType_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_create_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient.create_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.CreateEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "CreateEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateEntityTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entity_type", + "type": "google.cloud.dialogflow_v2beta1.types.EntityType" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.EntityType", + "shortName": "create_entity_type" + }, + "description": "Sample for CreateEntityType", + "file": "dialogflow_v2beta1_generated_entity_types_create_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_CreateEntityType_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_create_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient.delete_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.DeleteEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "DeleteEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_entity_type" + }, + "description": "Sample for DeleteEntityType", + "file": "dialogflow_v2beta1_generated_entity_types_delete_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_DeleteEntityType_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_delete_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient.delete_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.DeleteEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "DeleteEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_entity_type" + }, + "description": "Sample for DeleteEntityType", + "file": "dialogflow_v2beta1_generated_entity_types_delete_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_DeleteEntityType_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_delete_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient.get_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.GetEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "GetEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.EntityType", + "shortName": "get_entity_type" + }, + "description": "Sample for GetEntityType", + "file": "dialogflow_v2beta1_generated_entity_types_get_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_GetEntityType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_get_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient.get_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.GetEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "GetEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.EntityType", + "shortName": "get_entity_type" + }, + "description": "Sample for GetEntityType", + "file": "dialogflow_v2beta1_generated_entity_types_get_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_GetEntityType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_get_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient.list_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.ListEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "ListEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListEntityTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.entity_types.pagers.ListEntityTypesAsyncPager", + "shortName": "list_entity_types" + }, + "description": "Sample for ListEntityTypes", + "file": "dialogflow_v2beta1_generated_entity_types_list_entity_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_ListEntityTypes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_list_entity_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient.list_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.ListEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "ListEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListEntityTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.entity_types.pagers.ListEntityTypesPager", + "shortName": "list_entity_types" + }, + "description": "Sample for ListEntityTypes", + "file": "dialogflow_v2beta1_generated_entity_types_list_entity_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_ListEntityTypes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_list_entity_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient", + "shortName": "EntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesAsyncClient.update_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.UpdateEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "UpdateEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateEntityTypeRequest" + }, + { + "name": "entity_type", + "type": "google.cloud.dialogflow_v2beta1.types.EntityType" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.EntityType", + "shortName": "update_entity_type" + }, + "description": "Sample for UpdateEntityType", + "file": "dialogflow_v2beta1_generated_entity_types_update_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_UpdateEntityType_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_update_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient", + "shortName": "EntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EntityTypesClient.update_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes.UpdateEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.EntityTypes", + "shortName": "EntityTypes" + }, + "shortName": "UpdateEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateEntityTypeRequest" + }, + { + "name": "entity_type", + "type": "google.cloud.dialogflow_v2beta1.types.EntityType" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.EntityType", + "shortName": "update_entity_type" + }, + "description": "Sample for UpdateEntityType", + "file": "dialogflow_v2beta1_generated_entity_types_update_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_EntityTypes_UpdateEntityType_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_entity_types_update_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsAsyncClient.create_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments.CreateEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments", + "shortName": "Environments" + }, + "shortName": "CreateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Environment", + "shortName": "create_environment" + }, + "description": "Sample for CreateEnvironment", + "file": "dialogflow_v2beta1_generated_environments_create_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Environments_CreateEnvironment_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_environments_create_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsClient.create_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments.CreateEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments", + "shortName": "Environments" + }, + "shortName": "CreateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Environment", + "shortName": "create_environment" + }, + "description": "Sample for CreateEnvironment", + "file": "dialogflow_v2beta1_generated_environments_create_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Environments_CreateEnvironment_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_environments_create_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsAsyncClient.delete_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments.DeleteEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments", + "shortName": "Environments" + }, + "shortName": "DeleteEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_environment" + }, + "description": "Sample for DeleteEnvironment", + "file": "dialogflow_v2beta1_generated_environments_delete_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Environments_DeleteEnvironment_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_environments_delete_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsClient.delete_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments.DeleteEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments", + "shortName": "Environments" + }, + "shortName": "DeleteEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_environment" + }, + "description": "Sample for DeleteEnvironment", + "file": "dialogflow_v2beta1_generated_environments_delete_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Environments_DeleteEnvironment_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_environments_delete_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsAsyncClient.get_environment_history", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments.GetEnvironmentHistory", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments", + "shortName": "Environments" + }, + "shortName": "GetEnvironmentHistory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetEnvironmentHistoryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.environments.pagers.GetEnvironmentHistoryAsyncPager", + "shortName": "get_environment_history" + }, + "description": "Sample for GetEnvironmentHistory", + "file": "dialogflow_v2beta1_generated_environments_get_environment_history_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Environments_GetEnvironmentHistory_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_environments_get_environment_history_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsClient.get_environment_history", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments.GetEnvironmentHistory", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments", + "shortName": "Environments" + }, + "shortName": "GetEnvironmentHistory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetEnvironmentHistoryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.environments.pagers.GetEnvironmentHistoryPager", + "shortName": "get_environment_history" + }, + "description": "Sample for GetEnvironmentHistory", + "file": "dialogflow_v2beta1_generated_environments_get_environment_history_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Environments_GetEnvironmentHistory_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_environments_get_environment_history_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsAsyncClient.get_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments.GetEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments", + "shortName": "Environments" + }, + "shortName": "GetEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Environment", + "shortName": "get_environment" + }, + "description": "Sample for GetEnvironment", + "file": "dialogflow_v2beta1_generated_environments_get_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Environments_GetEnvironment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_environments_get_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsClient.get_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments.GetEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments", + "shortName": "Environments" + }, + "shortName": "GetEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Environment", + "shortName": "get_environment" + }, + "description": "Sample for GetEnvironment", + "file": "dialogflow_v2beta1_generated_environments_get_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Environments_GetEnvironment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_environments_get_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsAsyncClient.list_environments", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments.ListEnvironments", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments", + "shortName": "Environments" + }, + "shortName": "ListEnvironments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListEnvironmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.environments.pagers.ListEnvironmentsAsyncPager", + "shortName": "list_environments" + }, + "description": "Sample for ListEnvironments", + "file": "dialogflow_v2beta1_generated_environments_list_environments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Environments_ListEnvironments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_environments_list_environments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsClient.list_environments", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments.ListEnvironments", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments", + "shortName": "Environments" + }, + "shortName": "ListEnvironments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListEnvironmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.environments.pagers.ListEnvironmentsPager", + "shortName": "list_environments" + }, + "description": "Sample for ListEnvironments", + "file": "dialogflow_v2beta1_generated_environments_list_environments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Environments_ListEnvironments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_environments_list_environments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsAsyncClient.update_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments.UpdateEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments", + "shortName": "Environments" + }, + "shortName": "UpdateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Environment", + "shortName": "update_environment" + }, + "description": "Sample for UpdateEnvironment", + "file": "dialogflow_v2beta1_generated_environments_update_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Environments_UpdateEnvironment_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_environments_update_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.EnvironmentsClient.update_environment", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments.UpdateEnvironment", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Environments", + "shortName": "Environments" + }, + "shortName": "UpdateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateEnvironmentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Environment", + "shortName": "update_environment" + }, + "description": "Sample for UpdateEnvironment", + "file": "dialogflow_v2beta1_generated_environments_update_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Environments_UpdateEnvironment_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_environments_update_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.FulfillmentsAsyncClient", + "shortName": "FulfillmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.FulfillmentsAsyncClient.get_fulfillment", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Fulfillments.GetFulfillment", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Fulfillments", + "shortName": "Fulfillments" + }, + "shortName": "GetFulfillment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetFulfillmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Fulfillment", + "shortName": "get_fulfillment" + }, + "description": "Sample for GetFulfillment", + "file": "dialogflow_v2beta1_generated_fulfillments_get_fulfillment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Fulfillments_GetFulfillment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_fulfillments_get_fulfillment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.FulfillmentsClient", + "shortName": "FulfillmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.FulfillmentsClient.get_fulfillment", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Fulfillments.GetFulfillment", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Fulfillments", + "shortName": "Fulfillments" + }, + "shortName": "GetFulfillment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetFulfillmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Fulfillment", + "shortName": "get_fulfillment" + }, + "description": "Sample for GetFulfillment", + "file": "dialogflow_v2beta1_generated_fulfillments_get_fulfillment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Fulfillments_GetFulfillment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_fulfillments_get_fulfillment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.FulfillmentsAsyncClient", + "shortName": "FulfillmentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.FulfillmentsAsyncClient.update_fulfillment", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Fulfillments.UpdateFulfillment", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Fulfillments", + "shortName": "Fulfillments" + }, + "shortName": "UpdateFulfillment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateFulfillmentRequest" + }, + { + "name": "fulfillment", + "type": "google.cloud.dialogflow_v2beta1.types.Fulfillment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Fulfillment", + "shortName": "update_fulfillment" + }, + "description": "Sample for UpdateFulfillment", + "file": "dialogflow_v2beta1_generated_fulfillments_update_fulfillment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Fulfillments_UpdateFulfillment_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_fulfillments_update_fulfillment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.FulfillmentsClient", + "shortName": "FulfillmentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.FulfillmentsClient.update_fulfillment", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Fulfillments.UpdateFulfillment", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Fulfillments", + "shortName": "Fulfillments" + }, + "shortName": "UpdateFulfillment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateFulfillmentRequest" + }, + { + "name": "fulfillment", + "type": "google.cloud.dialogflow_v2beta1.types.Fulfillment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Fulfillment", + "shortName": "update_fulfillment" + }, + "description": "Sample for UpdateFulfillment", + "file": "dialogflow_v2beta1_generated_fulfillments_update_fulfillment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Fulfillments_UpdateFulfillment_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_fulfillments_update_fulfillment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.IntentsAsyncClient.batch_delete_intents", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents.BatchDeleteIntents", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents", + "shortName": "Intents" + }, + "shortName": "BatchDeleteIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchDeleteIntentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intents", + "type": "MutableSequence[google.cloud.dialogflow_v2beta1.types.Intent]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_delete_intents" + }, + "description": "Sample for BatchDeleteIntents", + "file": "dialogflow_v2beta1_generated_intents_batch_delete_intents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Intents_BatchDeleteIntents_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_intents_batch_delete_intents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.IntentsClient.batch_delete_intents", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents.BatchDeleteIntents", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents", + "shortName": "Intents" + }, + "shortName": "BatchDeleteIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchDeleteIntentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intents", + "type": "MutableSequence[google.cloud.dialogflow_v2beta1.types.Intent]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_delete_intents" + }, + "description": "Sample for BatchDeleteIntents", + "file": "dialogflow_v2beta1_generated_intents_batch_delete_intents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Intents_BatchDeleteIntents_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_intents_batch_delete_intents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.IntentsAsyncClient.batch_update_intents", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents.BatchUpdateIntents", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents", + "shortName": "Intents" + }, + "shortName": "BatchUpdateIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchUpdateIntentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intent_batch_uri", + "type": "str" + }, + { + "name": "intent_batch_inline", + "type": "google.cloud.dialogflow_v2beta1.types.IntentBatch" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_update_intents" + }, + "description": "Sample for BatchUpdateIntents", + "file": "dialogflow_v2beta1_generated_intents_batch_update_intents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Intents_BatchUpdateIntents_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_intents_batch_update_intents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.IntentsClient.batch_update_intents", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents.BatchUpdateIntents", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents", + "shortName": "Intents" + }, + "shortName": "BatchUpdateIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.BatchUpdateIntentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intent_batch_uri", + "type": "str" + }, + { + "name": "intent_batch_inline", + "type": "google.cloud.dialogflow_v2beta1.types.IntentBatch" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_update_intents" + }, + "description": "Sample for BatchUpdateIntents", + "file": "dialogflow_v2beta1_generated_intents_batch_update_intents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Intents_BatchUpdateIntents_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_intents_batch_update_intents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.IntentsAsyncClient.create_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents.CreateIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents", + "shortName": "Intents" + }, + "shortName": "CreateIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateIntentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intent", + "type": "google.cloud.dialogflow_v2beta1.types.Intent" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Intent", + "shortName": "create_intent" + }, + "description": "Sample for CreateIntent", + "file": "dialogflow_v2beta1_generated_intents_create_intent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Intents_CreateIntent_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_intents_create_intent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.IntentsClient.create_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents.CreateIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents", + "shortName": "Intents" + }, + "shortName": "CreateIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateIntentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "intent", + "type": "google.cloud.dialogflow_v2beta1.types.Intent" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Intent", + "shortName": "create_intent" + }, + "description": "Sample for CreateIntent", + "file": "dialogflow_v2beta1_generated_intents_create_intent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Intents_CreateIntent_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_intents_create_intent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.IntentsAsyncClient.delete_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents.DeleteIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents", + "shortName": "Intents" + }, + "shortName": "DeleteIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteIntentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_intent" + }, + "description": "Sample for DeleteIntent", + "file": "dialogflow_v2beta1_generated_intents_delete_intent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Intents_DeleteIntent_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_intents_delete_intent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.IntentsClient.delete_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents.DeleteIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents", + "shortName": "Intents" + }, + "shortName": "DeleteIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteIntentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_intent" + }, + "description": "Sample for DeleteIntent", + "file": "dialogflow_v2beta1_generated_intents_delete_intent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Intents_DeleteIntent_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_intents_delete_intent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.IntentsAsyncClient.get_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents.GetIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents", + "shortName": "Intents" + }, + "shortName": "GetIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetIntentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Intent", + "shortName": "get_intent" + }, + "description": "Sample for GetIntent", + "file": "dialogflow_v2beta1_generated_intents_get_intent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Intents_GetIntent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_intents_get_intent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.IntentsClient.get_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents.GetIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents", + "shortName": "Intents" + }, + "shortName": "GetIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetIntentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Intent", + "shortName": "get_intent" + }, + "description": "Sample for GetIntent", + "file": "dialogflow_v2beta1_generated_intents_get_intent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Intents_GetIntent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_intents_get_intent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.IntentsAsyncClient.list_intents", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents.ListIntents", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents", + "shortName": "Intents" + }, + "shortName": "ListIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListIntentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.intents.pagers.ListIntentsAsyncPager", + "shortName": "list_intents" + }, + "description": "Sample for ListIntents", + "file": "dialogflow_v2beta1_generated_intents_list_intents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Intents_ListIntents_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_intents_list_intents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.IntentsClient.list_intents", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents.ListIntents", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents", + "shortName": "Intents" + }, + "shortName": "ListIntents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListIntentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.intents.pagers.ListIntentsPager", + "shortName": "list_intents" + }, + "description": "Sample for ListIntents", + "file": "dialogflow_v2beta1_generated_intents_list_intents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Intents_ListIntents_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_intents_list_intents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.IntentsAsyncClient", + "shortName": "IntentsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.IntentsAsyncClient.update_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents.UpdateIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents", + "shortName": "Intents" + }, + "shortName": "UpdateIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateIntentRequest" + }, + { + "name": "intent", + "type": "google.cloud.dialogflow_v2beta1.types.Intent" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Intent", + "shortName": "update_intent" + }, + "description": "Sample for UpdateIntent", + "file": "dialogflow_v2beta1_generated_intents_update_intent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Intents_UpdateIntent_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_intents_update_intent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.IntentsClient", + "shortName": "IntentsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.IntentsClient.update_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents.UpdateIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Intents", + "shortName": "Intents" + }, + "shortName": "UpdateIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateIntentRequest" + }, + { + "name": "intent", + "type": "google.cloud.dialogflow_v2beta1.types.Intent" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "language_code", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Intent", + "shortName": "update_intent" + }, + "description": "Sample for UpdateIntent", + "file": "dialogflow_v2beta1_generated_intents_update_intent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Intents_UpdateIntent_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_intents_update_intent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesAsyncClient", + "shortName": "KnowledgeBasesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesAsyncClient.create_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases.CreateKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "CreateKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateKnowledgeBaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "knowledge_base", + "type": "google.cloud.dialogflow_v2beta1.types.KnowledgeBase" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.KnowledgeBase", + "shortName": "create_knowledge_base" + }, + "description": "Sample for CreateKnowledgeBase", + "file": "dialogflow_v2beta1_generated_knowledge_bases_create_knowledge_base_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_KnowledgeBases_CreateKnowledgeBase_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_knowledge_bases_create_knowledge_base_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesClient", + "shortName": "KnowledgeBasesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesClient.create_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases.CreateKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "CreateKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateKnowledgeBaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "knowledge_base", + "type": "google.cloud.dialogflow_v2beta1.types.KnowledgeBase" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.KnowledgeBase", + "shortName": "create_knowledge_base" + }, + "description": "Sample for CreateKnowledgeBase", + "file": "dialogflow_v2beta1_generated_knowledge_bases_create_knowledge_base_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_KnowledgeBases_CreateKnowledgeBase_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_knowledge_bases_create_knowledge_base_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesAsyncClient", + "shortName": "KnowledgeBasesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesAsyncClient.delete_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases.DeleteKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "DeleteKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteKnowledgeBaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_knowledge_base" + }, + "description": "Sample for DeleteKnowledgeBase", + "file": "dialogflow_v2beta1_generated_knowledge_bases_delete_knowledge_base_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_KnowledgeBases_DeleteKnowledgeBase_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_knowledge_bases_delete_knowledge_base_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesClient", + "shortName": "KnowledgeBasesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesClient.delete_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases.DeleteKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "DeleteKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteKnowledgeBaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_knowledge_base" + }, + "description": "Sample for DeleteKnowledgeBase", + "file": "dialogflow_v2beta1_generated_knowledge_bases_delete_knowledge_base_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_KnowledgeBases_DeleteKnowledgeBase_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_knowledge_bases_delete_knowledge_base_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesAsyncClient", + "shortName": "KnowledgeBasesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesAsyncClient.get_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases.GetKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "GetKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetKnowledgeBaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.KnowledgeBase", + "shortName": "get_knowledge_base" + }, + "description": "Sample for GetKnowledgeBase", + "file": "dialogflow_v2beta1_generated_knowledge_bases_get_knowledge_base_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_KnowledgeBases_GetKnowledgeBase_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_knowledge_bases_get_knowledge_base_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesClient", + "shortName": "KnowledgeBasesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesClient.get_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases.GetKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "GetKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetKnowledgeBaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.KnowledgeBase", + "shortName": "get_knowledge_base" + }, + "description": "Sample for GetKnowledgeBase", + "file": "dialogflow_v2beta1_generated_knowledge_bases_get_knowledge_base_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_KnowledgeBases_GetKnowledgeBase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_knowledge_bases_get_knowledge_base_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesAsyncClient", + "shortName": "KnowledgeBasesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesAsyncClient.list_knowledge_bases", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases.ListKnowledgeBases", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "ListKnowledgeBases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListKnowledgeBasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.knowledge_bases.pagers.ListKnowledgeBasesAsyncPager", + "shortName": "list_knowledge_bases" + }, + "description": "Sample for ListKnowledgeBases", + "file": "dialogflow_v2beta1_generated_knowledge_bases_list_knowledge_bases_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_KnowledgeBases_ListKnowledgeBases_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_knowledge_bases_list_knowledge_bases_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesClient", + "shortName": "KnowledgeBasesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesClient.list_knowledge_bases", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases.ListKnowledgeBases", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "ListKnowledgeBases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListKnowledgeBasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.knowledge_bases.pagers.ListKnowledgeBasesPager", + "shortName": "list_knowledge_bases" + }, + "description": "Sample for ListKnowledgeBases", + "file": "dialogflow_v2beta1_generated_knowledge_bases_list_knowledge_bases_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_KnowledgeBases_ListKnowledgeBases_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_knowledge_bases_list_knowledge_bases_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesAsyncClient", + "shortName": "KnowledgeBasesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesAsyncClient.update_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases.UpdateKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "UpdateKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateKnowledgeBaseRequest" + }, + { + "name": "knowledge_base", + "type": "google.cloud.dialogflow_v2beta1.types.KnowledgeBase" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.KnowledgeBase", + "shortName": "update_knowledge_base" + }, + "description": "Sample for UpdateKnowledgeBase", + "file": "dialogflow_v2beta1_generated_knowledge_bases_update_knowledge_base_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_KnowledgeBases_UpdateKnowledgeBase_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_knowledge_bases_update_knowledge_base_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesClient", + "shortName": "KnowledgeBasesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.KnowledgeBasesClient.update_knowledge_base", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases.UpdateKnowledgeBase", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.KnowledgeBases", + "shortName": "KnowledgeBases" + }, + "shortName": "UpdateKnowledgeBase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateKnowledgeBaseRequest" + }, + { + "name": "knowledge_base", + "type": "google.cloud.dialogflow_v2beta1.types.KnowledgeBase" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.KnowledgeBase", + "shortName": "update_knowledge_base" + }, + "description": "Sample for UpdateKnowledgeBase", + "file": "dialogflow_v2beta1_generated_knowledge_bases_update_knowledge_base_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_KnowledgeBases_UpdateKnowledgeBase_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_knowledge_bases_update_knowledge_base_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient.analyze_content", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.AnalyzeContent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "AnalyzeContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.AnalyzeContentRequest" + }, + { + "name": "participant", + "type": "str" + }, + { + "name": "text_input", + "type": "google.cloud.dialogflow_v2beta1.types.TextInput" + }, + { + "name": "audio_input", + "type": "google.cloud.dialogflow_v2beta1.types.AudioInput" + }, + { + "name": "event_input", + "type": "google.cloud.dialogflow_v2beta1.types.EventInput" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.AnalyzeContentResponse", + "shortName": "analyze_content" + }, + "description": "Sample for AnalyzeContent", + "file": "dialogflow_v2beta1_generated_participants_analyze_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_AnalyzeContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_analyze_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient.analyze_content", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.AnalyzeContent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "AnalyzeContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.AnalyzeContentRequest" + }, + { + "name": "participant", + "type": "str" + }, + { + "name": "text_input", + "type": "google.cloud.dialogflow_v2beta1.types.TextInput" + }, + { + "name": "audio_input", + "type": "google.cloud.dialogflow_v2beta1.types.AudioInput" + }, + { + "name": "event_input", + "type": "google.cloud.dialogflow_v2beta1.types.EventInput" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.AnalyzeContentResponse", + "shortName": "analyze_content" + }, + "description": "Sample for AnalyzeContent", + "file": "dialogflow_v2beta1_generated_participants_analyze_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_AnalyzeContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_analyze_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient.compile_suggestion", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.CompileSuggestion", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "CompileSuggestion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CompileSuggestionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.CompileSuggestionResponse", + "shortName": "compile_suggestion" + }, + "description": "Sample for CompileSuggestion", + "file": "dialogflow_v2beta1_generated_participants_compile_suggestion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_CompileSuggestion_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_compile_suggestion_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient.compile_suggestion", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.CompileSuggestion", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "CompileSuggestion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CompileSuggestionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.CompileSuggestionResponse", + "shortName": "compile_suggestion" + }, + "description": "Sample for CompileSuggestion", + "file": "dialogflow_v2beta1_generated_participants_compile_suggestion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_CompileSuggestion_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_compile_suggestion_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient.create_participant", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.CreateParticipant", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "CreateParticipant" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateParticipantRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "participant", + "type": "google.cloud.dialogflow_v2beta1.types.Participant" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Participant", + "shortName": "create_participant" + }, + "description": "Sample for CreateParticipant", + "file": "dialogflow_v2beta1_generated_participants_create_participant_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_CreateParticipant_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_create_participant_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient.create_participant", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.CreateParticipant", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "CreateParticipant" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateParticipantRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "participant", + "type": "google.cloud.dialogflow_v2beta1.types.Participant" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Participant", + "shortName": "create_participant" + }, + "description": "Sample for CreateParticipant", + "file": "dialogflow_v2beta1_generated_participants_create_participant_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_CreateParticipant_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_create_participant_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient.get_participant", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.GetParticipant", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "GetParticipant" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetParticipantRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Participant", + "shortName": "get_participant" + }, + "description": "Sample for GetParticipant", + "file": "dialogflow_v2beta1_generated_participants_get_participant_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_GetParticipant_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_get_participant_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient.get_participant", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.GetParticipant", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "GetParticipant" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetParticipantRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Participant", + "shortName": "get_participant" + }, + "description": "Sample for GetParticipant", + "file": "dialogflow_v2beta1_generated_participants_get_participant_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_GetParticipant_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_get_participant_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient.list_participants", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.ListParticipants", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "ListParticipants" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListParticipantsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.participants.pagers.ListParticipantsAsyncPager", + "shortName": "list_participants" + }, + "description": "Sample for ListParticipants", + "file": "dialogflow_v2beta1_generated_participants_list_participants_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_ListParticipants_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_list_participants_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient.list_participants", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.ListParticipants", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "ListParticipants" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListParticipantsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.participants.pagers.ListParticipantsPager", + "shortName": "list_participants" + }, + "description": "Sample for ListParticipants", + "file": "dialogflow_v2beta1_generated_participants_list_participants_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_ListParticipants_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_list_participants_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient.list_suggestions", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.ListSuggestions", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "ListSuggestions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListSuggestionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.participants.pagers.ListSuggestionsAsyncPager", + "shortName": "list_suggestions" + }, + "description": "Sample for ListSuggestions", + "file": "dialogflow_v2beta1_generated_participants_list_suggestions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_ListSuggestions_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_list_suggestions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient.list_suggestions", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.ListSuggestions", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "ListSuggestions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListSuggestionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.participants.pagers.ListSuggestionsPager", + "shortName": "list_suggestions" + }, + "description": "Sample for ListSuggestions", + "file": "dialogflow_v2beta1_generated_participants_list_suggestions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_ListSuggestions_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_list_suggestions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient.streaming_analyze_content", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.StreamingAnalyzeContent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "StreamingAnalyzeContent" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.dialogflow_v2beta1.types.StreamingAnalyzeContentRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.dialogflow_v2beta1.types.StreamingAnalyzeContentResponse]", + "shortName": "streaming_analyze_content" + }, + "description": "Sample for StreamingAnalyzeContent", + "file": "dialogflow_v2beta1_generated_participants_streaming_analyze_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_StreamingAnalyzeContent_async", + "segments": [ + { + "end": 69, + "start": 27, + "type": "FULL" + }, + { + "end": 69, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 62, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 65, + "start": 63, + "type": "REQUEST_EXECUTION" + }, + { + "end": 70, + "start": 66, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_streaming_analyze_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient.streaming_analyze_content", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.StreamingAnalyzeContent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "StreamingAnalyzeContent" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.dialogflow_v2beta1.types.StreamingAnalyzeContentRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.dialogflow_v2beta1.types.StreamingAnalyzeContentResponse]", + "shortName": "streaming_analyze_content" + }, + "description": "Sample for StreamingAnalyzeContent", + "file": "dialogflow_v2beta1_generated_participants_streaming_analyze_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_StreamingAnalyzeContent_sync", + "segments": [ + { + "end": 69, + "start": 27, + "type": "FULL" + }, + { + "end": 69, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 62, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 65, + "start": 63, + "type": "REQUEST_EXECUTION" + }, + { + "end": 70, + "start": 66, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_streaming_analyze_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient.suggest_articles", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.SuggestArticles", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "SuggestArticles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SuggestArticlesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SuggestArticlesResponse", + "shortName": "suggest_articles" + }, + "description": "Sample for SuggestArticles", + "file": "dialogflow_v2beta1_generated_participants_suggest_articles_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_SuggestArticles_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_suggest_articles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient.suggest_articles", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.SuggestArticles", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "SuggestArticles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SuggestArticlesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SuggestArticlesResponse", + "shortName": "suggest_articles" + }, + "description": "Sample for SuggestArticles", + "file": "dialogflow_v2beta1_generated_participants_suggest_articles_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_SuggestArticles_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_suggest_articles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient.suggest_faq_answers", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.SuggestFaqAnswers", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "SuggestFaqAnswers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SuggestFaqAnswersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SuggestFaqAnswersResponse", + "shortName": "suggest_faq_answers" + }, + "description": "Sample for SuggestFaqAnswers", + "file": "dialogflow_v2beta1_generated_participants_suggest_faq_answers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_SuggestFaqAnswers_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_suggest_faq_answers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient.suggest_faq_answers", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.SuggestFaqAnswers", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "SuggestFaqAnswers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SuggestFaqAnswersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SuggestFaqAnswersResponse", + "shortName": "suggest_faq_answers" + }, + "description": "Sample for SuggestFaqAnswers", + "file": "dialogflow_v2beta1_generated_participants_suggest_faq_answers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_SuggestFaqAnswers_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_suggest_faq_answers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient.suggest_smart_replies", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.SuggestSmartReplies", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "SuggestSmartReplies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SuggestSmartRepliesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SuggestSmartRepliesResponse", + "shortName": "suggest_smart_replies" + }, + "description": "Sample for SuggestSmartReplies", + "file": "dialogflow_v2beta1_generated_participants_suggest_smart_replies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_SuggestSmartReplies_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_suggest_smart_replies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient.suggest_smart_replies", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.SuggestSmartReplies", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "SuggestSmartReplies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.SuggestSmartRepliesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SuggestSmartRepliesResponse", + "shortName": "suggest_smart_replies" + }, + "description": "Sample for SuggestSmartReplies", + "file": "dialogflow_v2beta1_generated_participants_suggest_smart_replies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_SuggestSmartReplies_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_suggest_smart_replies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient", + "shortName": "ParticipantsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsAsyncClient.update_participant", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.UpdateParticipant", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "UpdateParticipant" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateParticipantRequest" + }, + { + "name": "participant", + "type": "google.cloud.dialogflow_v2beta1.types.Participant" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Participant", + "shortName": "update_participant" + }, + "description": "Sample for UpdateParticipant", + "file": "dialogflow_v2beta1_generated_participants_update_participant_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_UpdateParticipant_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_update_participant_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient", + "shortName": "ParticipantsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.ParticipantsClient.update_participant", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants.UpdateParticipant", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Participants", + "shortName": "Participants" + }, + "shortName": "UpdateParticipant" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateParticipantRequest" + }, + { + "name": "participant", + "type": "google.cloud.dialogflow_v2beta1.types.Participant" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Participant", + "shortName": "update_participant" + }, + "description": "Sample for UpdateParticipant", + "file": "dialogflow_v2beta1_generated_participants_update_participant_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Participants_UpdateParticipant_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_participants_update_participant_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesAsyncClient", + "shortName": "SessionEntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesAsyncClient.create_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes.CreateSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "CreateSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateSessionEntityTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "session_entity_type", + "type": "google.cloud.dialogflow_v2beta1.types.SessionEntityType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SessionEntityType", + "shortName": "create_session_entity_type" + }, + "description": "Sample for CreateSessionEntityType", + "file": "dialogflow_v2beta1_generated_session_entity_types_create_session_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_SessionEntityTypes_CreateSessionEntityType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_session_entity_types_create_session_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesClient", + "shortName": "SessionEntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesClient.create_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes.CreateSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "CreateSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateSessionEntityTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "session_entity_type", + "type": "google.cloud.dialogflow_v2beta1.types.SessionEntityType" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SessionEntityType", + "shortName": "create_session_entity_type" + }, + "description": "Sample for CreateSessionEntityType", + "file": "dialogflow_v2beta1_generated_session_entity_types_create_session_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_SessionEntityTypes_CreateSessionEntityType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_session_entity_types_create_session_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesAsyncClient", + "shortName": "SessionEntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesAsyncClient.delete_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes.DeleteSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "DeleteSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteSessionEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_session_entity_type" + }, + "description": "Sample for DeleteSessionEntityType", + "file": "dialogflow_v2beta1_generated_session_entity_types_delete_session_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_SessionEntityTypes_DeleteSessionEntityType_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_session_entity_types_delete_session_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesClient", + "shortName": "SessionEntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesClient.delete_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes.DeleteSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "DeleteSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteSessionEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_session_entity_type" + }, + "description": "Sample for DeleteSessionEntityType", + "file": "dialogflow_v2beta1_generated_session_entity_types_delete_session_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_SessionEntityTypes_DeleteSessionEntityType_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_session_entity_types_delete_session_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesAsyncClient", + "shortName": "SessionEntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesAsyncClient.get_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes.GetSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "GetSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetSessionEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SessionEntityType", + "shortName": "get_session_entity_type" + }, + "description": "Sample for GetSessionEntityType", + "file": "dialogflow_v2beta1_generated_session_entity_types_get_session_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_SessionEntityTypes_GetSessionEntityType_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_session_entity_types_get_session_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesClient", + "shortName": "SessionEntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesClient.get_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes.GetSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "GetSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetSessionEntityTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SessionEntityType", + "shortName": "get_session_entity_type" + }, + "description": "Sample for GetSessionEntityType", + "file": "dialogflow_v2beta1_generated_session_entity_types_get_session_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_SessionEntityTypes_GetSessionEntityType_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_session_entity_types_get_session_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesAsyncClient", + "shortName": "SessionEntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesAsyncClient.list_session_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes.ListSessionEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "ListSessionEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListSessionEntityTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.session_entity_types.pagers.ListSessionEntityTypesAsyncPager", + "shortName": "list_session_entity_types" + }, + "description": "Sample for ListSessionEntityTypes", + "file": "dialogflow_v2beta1_generated_session_entity_types_list_session_entity_types_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_SessionEntityTypes_ListSessionEntityTypes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_session_entity_types_list_session_entity_types_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesClient", + "shortName": "SessionEntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesClient.list_session_entity_types", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes.ListSessionEntityTypes", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "ListSessionEntityTypes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListSessionEntityTypesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.session_entity_types.pagers.ListSessionEntityTypesPager", + "shortName": "list_session_entity_types" + }, + "description": "Sample for ListSessionEntityTypes", + "file": "dialogflow_v2beta1_generated_session_entity_types_list_session_entity_types_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_SessionEntityTypes_ListSessionEntityTypes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_session_entity_types_list_session_entity_types_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesAsyncClient", + "shortName": "SessionEntityTypesAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesAsyncClient.update_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes.UpdateSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "UpdateSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateSessionEntityTypeRequest" + }, + { + "name": "session_entity_type", + "type": "google.cloud.dialogflow_v2beta1.types.SessionEntityType" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SessionEntityType", + "shortName": "update_session_entity_type" + }, + "description": "Sample for UpdateSessionEntityType", + "file": "dialogflow_v2beta1_generated_session_entity_types_update_session_entity_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_SessionEntityTypes_UpdateSessionEntityType_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_session_entity_types_update_session_entity_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesClient", + "shortName": "SessionEntityTypesClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.SessionEntityTypesClient.update_session_entity_type", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes.UpdateSessionEntityType", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.SessionEntityTypes", + "shortName": "SessionEntityTypes" + }, + "shortName": "UpdateSessionEntityType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateSessionEntityTypeRequest" + }, + { + "name": "session_entity_type", + "type": "google.cloud.dialogflow_v2beta1.types.SessionEntityType" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.SessionEntityType", + "shortName": "update_session_entity_type" + }, + "description": "Sample for UpdateSessionEntityType", + "file": "dialogflow_v2beta1_generated_session_entity_types_update_session_entity_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_SessionEntityTypes_UpdateSessionEntityType_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_session_entity_types_update_session_entity_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.SessionsAsyncClient", + "shortName": "SessionsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.SessionsAsyncClient.detect_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Sessions.DetectIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Sessions", + "shortName": "Sessions" + }, + "shortName": "DetectIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DetectIntentRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "query_input", + "type": "google.cloud.dialogflow_v2beta1.types.QueryInput" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.DetectIntentResponse", + "shortName": "detect_intent" + }, + "description": "Sample for DetectIntent", + "file": "dialogflow_v2beta1_generated_sessions_detect_intent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Sessions_DetectIntent_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_sessions_detect_intent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.SessionsClient", + "shortName": "SessionsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.SessionsClient.detect_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Sessions.DetectIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Sessions", + "shortName": "Sessions" + }, + "shortName": "DetectIntent" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DetectIntentRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "query_input", + "type": "google.cloud.dialogflow_v2beta1.types.QueryInput" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.DetectIntentResponse", + "shortName": "detect_intent" + }, + "description": "Sample for DetectIntent", + "file": "dialogflow_v2beta1_generated_sessions_detect_intent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Sessions_DetectIntent_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_sessions_detect_intent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.SessionsAsyncClient", + "shortName": "SessionsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.SessionsAsyncClient.streaming_detect_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Sessions.StreamingDetectIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Sessions", + "shortName": "Sessions" + }, + "shortName": "StreamingDetectIntent" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.dialogflow_v2beta1.types.StreamingDetectIntentRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.dialogflow_v2beta1.types.StreamingDetectIntentResponse]", + "shortName": "streaming_detect_intent" + }, + "description": "Sample for StreamingDetectIntent", + "file": "dialogflow_v2beta1_generated_sessions_streaming_detect_intent_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Sessions_StreamingDetectIntent_async", + "segments": [ + { + "end": 68, + "start": 27, + "type": "FULL" + }, + { + "end": 68, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 61, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 64, + "start": 62, + "type": "REQUEST_EXECUTION" + }, + { + "end": 69, + "start": 65, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_sessions_streaming_detect_intent_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.SessionsClient", + "shortName": "SessionsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.SessionsClient.streaming_detect_intent", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Sessions.StreamingDetectIntent", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Sessions", + "shortName": "Sessions" + }, + "shortName": "StreamingDetectIntent" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.dialogflow_v2beta1.types.StreamingDetectIntentRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.dialogflow_v2beta1.types.StreamingDetectIntentResponse]", + "shortName": "streaming_detect_intent" + }, + "description": "Sample for StreamingDetectIntent", + "file": "dialogflow_v2beta1_generated_sessions_streaming_detect_intent_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Sessions_StreamingDetectIntent_sync", + "segments": [ + { + "end": 68, + "start": 27, + "type": "FULL" + }, + { + "end": 68, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 61, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 64, + "start": 62, + "type": "REQUEST_EXECUTION" + }, + { + "end": 69, + "start": 65, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_sessions_streaming_detect_intent_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.VersionsAsyncClient", + "shortName": "VersionsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.VersionsAsyncClient.create_version", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions.CreateVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions", + "shortName": "Versions" + }, + "shortName": "CreateVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateVersionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "version", + "type": "google.cloud.dialogflow_v2beta1.types.Version" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Version", + "shortName": "create_version" + }, + "description": "Sample for CreateVersion", + "file": "dialogflow_v2beta1_generated_versions_create_version_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Versions_CreateVersion_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_versions_create_version_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.VersionsClient", + "shortName": "VersionsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.VersionsClient.create_version", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions.CreateVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions", + "shortName": "Versions" + }, + "shortName": "CreateVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.CreateVersionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "version", + "type": "google.cloud.dialogflow_v2beta1.types.Version" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Version", + "shortName": "create_version" + }, + "description": "Sample for CreateVersion", + "file": "dialogflow_v2beta1_generated_versions_create_version_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Versions_CreateVersion_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_versions_create_version_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.VersionsAsyncClient", + "shortName": "VersionsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.VersionsAsyncClient.delete_version", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions.DeleteVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions", + "shortName": "Versions" + }, + "shortName": "DeleteVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteVersionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_version" + }, + "description": "Sample for DeleteVersion", + "file": "dialogflow_v2beta1_generated_versions_delete_version_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Versions_DeleteVersion_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_versions_delete_version_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.VersionsClient", + "shortName": "VersionsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.VersionsClient.delete_version", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions.DeleteVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions", + "shortName": "Versions" + }, + "shortName": "DeleteVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeleteVersionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_version" + }, + "description": "Sample for DeleteVersion", + "file": "dialogflow_v2beta1_generated_versions_delete_version_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Versions_DeleteVersion_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_versions_delete_version_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.VersionsAsyncClient", + "shortName": "VersionsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.VersionsAsyncClient.get_version", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions.GetVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions", + "shortName": "Versions" + }, + "shortName": "GetVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetVersionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Version", + "shortName": "get_version" + }, + "description": "Sample for GetVersion", + "file": "dialogflow_v2beta1_generated_versions_get_version_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Versions_GetVersion_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_versions_get_version_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.VersionsClient", + "shortName": "VersionsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.VersionsClient.get_version", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions.GetVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions", + "shortName": "Versions" + }, + "shortName": "GetVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.GetVersionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Version", + "shortName": "get_version" + }, + "description": "Sample for GetVersion", + "file": "dialogflow_v2beta1_generated_versions_get_version_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Versions_GetVersion_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_versions_get_version_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.VersionsAsyncClient", + "shortName": "VersionsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.VersionsAsyncClient.list_versions", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions.ListVersions", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions", + "shortName": "Versions" + }, + "shortName": "ListVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.versions.pagers.ListVersionsAsyncPager", + "shortName": "list_versions" + }, + "description": "Sample for ListVersions", + "file": "dialogflow_v2beta1_generated_versions_list_versions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Versions_ListVersions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_versions_list_versions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.VersionsClient", + "shortName": "VersionsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.VersionsClient.list_versions", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions.ListVersions", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions", + "shortName": "Versions" + }, + "shortName": "ListVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.versions.pagers.ListVersionsPager", + "shortName": "list_versions" + }, + "description": "Sample for ListVersions", + "file": "dialogflow_v2beta1_generated_versions_list_versions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Versions_ListVersions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_versions_list_versions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.VersionsAsyncClient", + "shortName": "VersionsAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.VersionsAsyncClient.update_version", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions.UpdateVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions", + "shortName": "Versions" + }, + "shortName": "UpdateVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateVersionRequest" + }, + { + "name": "version", + "type": "google.cloud.dialogflow_v2beta1.types.Version" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Version", + "shortName": "update_version" + }, + "description": "Sample for UpdateVersion", + "file": "dialogflow_v2beta1_generated_versions_update_version_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Versions_UpdateVersion_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_versions_update_version_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.VersionsClient", + "shortName": "VersionsClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.VersionsClient.update_version", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions.UpdateVersion", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.Versions", + "shortName": "Versions" + }, + "shortName": "UpdateVersion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdateVersionRequest" + }, + { + "name": "version", + "type": "google.cloud.dialogflow_v2beta1.types.Version" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.Version", + "shortName": "update_version" + }, + "description": "Sample for UpdateVersion", + "file": "dialogflow_v2beta1_generated_versions_update_version_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_Versions_UpdateVersion_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_versions_update_version_sync.py" + } + ] +} diff --git a/packages/google-cloud-dialogflow/scripts/fixup_dialogflow_v2_keywords.py b/packages/google-cloud-dialogflow/scripts/fixup_dialogflow_v2_keywords.py index 584f1e81436b..9c6224a8face 100644 --- a/packages/google-cloud-dialogflow/scripts/fixup_dialogflow_v2_keywords.py +++ b/packages/google-cloud-dialogflow/scripts/fixup_dialogflow_v2_keywords.py @@ -124,7 +124,7 @@ class dialogflowCallTransformer(cst.CSTTransformer): 'search_knowledge': ('query', 'conversation_profile', 'parent', 'session_id', 'conversation', 'latest_message', ), 'set_agent': ('agent', 'update_mask', ), 'set_suggestion_feature_config': ('conversation_profile', 'participant_role', 'suggestion_feature_config', ), - 'streaming_analyze_content': ('participant', 'audio_config', 'text_config', 'reply_audio_config', 'input_audio', 'input_text', 'input_dtmf', 'query_params', 'assist_query_params', 'cx_parameters', 'enable_partial_automated_agent_reply', 'enable_debugging_info', ), + 'streaming_analyze_content': ('participant', 'audio_config', 'text_config', 'reply_audio_config', 'input_audio', 'input_text', 'input_dtmf', 'query_params', 'assist_query_params', 'cx_parameters', 'enable_extended_streaming', 'enable_partial_automated_agent_reply', 'enable_debugging_info', ), 'streaming_detect_intent': ('session', 'query_input', 'query_params', 'single_utterance', 'output_audio_config', 'output_audio_config_mask', 'input_audio', 'enable_debugging_info', ), 'suggest_articles': ('parent', 'latest_message', 'context_size', 'assist_query_params', ), 'suggest_conversation_summary': ('conversation', 'latest_message', 'context_size', 'assist_query_params', ), diff --git a/packages/google-cloud-dialogflow/scripts/fixup_dialogflow_v2beta1_keywords.py b/packages/google-cloud-dialogflow/scripts/fixup_dialogflow_v2beta1_keywords.py index 26a9e1d9206b..eb470b1de1e7 100644 --- a/packages/google-cloud-dialogflow/scripts/fixup_dialogflow_v2beta1_keywords.py +++ b/packages/google-cloud-dialogflow/scripts/fixup_dialogflow_v2beta1_keywords.py @@ -114,7 +114,7 @@ class dialogflowCallTransformer(cst.CSTTransformer): 'search_knowledge': ('query', 'conversation_profile', 'parent', 'session_id', 'conversation', 'latest_message', ), 'set_agent': ('agent', 'update_mask', ), 'set_suggestion_feature_config': ('conversation_profile', 'participant_role', 'suggestion_feature_config', ), - 'streaming_analyze_content': ('participant', 'audio_config', 'text_config', 'reply_audio_config', 'input_audio', 'input_text', 'input_dtmf', 'query_params', 'assist_query_params', 'cx_parameters', 'cx_current_page', 'enable_partial_automated_agent_reply', 'enable_debugging_info', ), + 'streaming_analyze_content': ('participant', 'audio_config', 'text_config', 'reply_audio_config', 'input_audio', 'input_text', 'input_dtmf', 'query_params', 'assist_query_params', 'cx_parameters', 'cx_current_page', 'enable_extended_streaming', 'enable_partial_automated_agent_reply', 'enable_debugging_info', ), 'streaming_detect_intent': ('session', 'query_input', 'query_params', 'single_utterance', 'output_audio_config', 'output_audio_config_mask', 'input_audio', 'enable_debugging_info', ), 'suggest_articles': ('parent', 'latest_message', 'context_size', 'assist_query_params', ), 'suggest_conversation_summary': ('conversation', 'latest_message', 'context_size', 'assist_query_params', ), diff --git a/packages/google-cloud-dlp/.OwlBot.yaml b/packages/google-cloud-dlp/.OwlBot.yaml new file mode 100644 index 000000000000..4e7ea79cc128 --- /dev/null +++ b/packages/google-cloud-dlp/.OwlBot.yaml @@ -0,0 +1,24 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/privacy/dlp/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-dlp/$1 + +begin-after-commit-hash: ee56c3493ec6aeb237ff515ecea949710944a20f + diff --git a/packages/google-cloud-dlp/.coveragerc b/packages/google-cloud-dlp/.coveragerc new file mode 100644 index 000000000000..76798ec25cc0 --- /dev/null +++ b/packages/google-cloud-dlp/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/dlp/__init__.py + google/cloud/dlp/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-dlp/.flake8 b/packages/google-cloud-dlp/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-dlp/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-dlp/.gitignore b/packages/google-cloud-dlp/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-dlp/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-dlp/.repo-metadata.json b/packages/google-cloud-dlp/.repo-metadata.json new file mode 100644 index 000000000000..2e2ad34caad0 --- /dev/null +++ b/packages/google-cloud-dlp/.repo-metadata.json @@ -0,0 +1,18 @@ +{ + "name": "dlp", + "name_pretty": "Cloud Data Loss Prevention", + "product_documentation": "https://cloud.google.com/dlp/docs/", + "client_documentation": "https://cloud.google.com/python/docs/reference/dlp/latest", + "issue_tracker": "", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-dlp", + "api_id": "dlp.googleapis.com", + "requires_billing": true, + "default_version": "v2", + "codeowner_team": "", + "api_shortname": "dlp", + "api_description": "provides programmatic access to a powerful detection engine for personally identifiable information and other privacy-sensitive data in unstructured data streams, like text blocks and images." +} diff --git a/packages/google-cloud-dlp/CHANGELOG.md b/packages/google-cloud-dlp/CHANGELOG.md new file mode 100644 index 000000000000..9ea8cdf35542 --- /dev/null +++ b/packages/google-cloud-dlp/CHANGELOG.md @@ -0,0 +1,551 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-dlp/#history + +## [3.12.3](https://github.com/googleapis/python-dlp/compare/v3.12.2...v3.12.3) (2023-09-13) + + +### Documentation + +* Minor formatting ([#520](https://github.com/googleapis/python-dlp/issues/520)) ([d8a3639](https://github.com/googleapis/python-dlp/commit/d8a363969e1d19e36f6a5c57ec2d439300c95082)) + +## [3.12.2](https://github.com/googleapis/python-dlp/compare/v3.12.1...v3.12.2) (2023-07-04) + + +### Bug Fixes + +* Add async context manager return types ([#505](https://github.com/googleapis/python-dlp/issues/505)) ([a4099cc](https://github.com/googleapis/python-dlp/commit/a4099cca37464c6b94531d327bdfd7ab600f7159)) + +## [3.12.1](https://github.com/googleapis/python-dlp/compare/v3.12.0...v3.12.1) (2023-03-23) + + +### Documentation + +* Fix formatting of request arg in docstring ([#497](https://github.com/googleapis/python-dlp/issues/497)) ([60cafe1](https://github.com/googleapis/python-dlp/commit/60cafe18f98c9b46553c4cba521b95e4b1c5e6d1)) + +## [3.12.0](https://github.com/googleapis/python-dlp/compare/v3.11.1...v3.12.0) (2023-02-28) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#491](https://github.com/googleapis/python-dlp/issues/491)) ([4265240](https://github.com/googleapis/python-dlp/commit/4265240f00974b83a354300c49010f03ac7c8b7d)) + +## [3.11.1](https://github.com/googleapis/python-dlp/compare/v3.11.0...v3.11.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([29a4548](https://github.com/googleapis/python-dlp/commit/29a45484fc9397330b6166e451bb3efcc29d6f01)) + + +### Documentation + +* Add documentation for enums ([29a4548](https://github.com/googleapis/python-dlp/commit/29a45484fc9397330b6166e451bb3efcc29d6f01)) + +## [3.11.0](https://github.com/googleapis/python-dlp/compare/v3.10.1...v3.11.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#474](https://github.com/googleapis/python-dlp/issues/474)) ([5e88d1e](https://github.com/googleapis/python-dlp/commit/5e88d1e128459fae385dc3d855623cc86e6c99be)) + +## [3.10.1](https://github.com/googleapis/python-dlp/compare/v3.10.0...v3.10.1) (2023-01-05) + + +### Documentation + +* **samples:** Adding a missing line as suggested by feedback ([#469](https://github.com/googleapis/python-dlp/issues/469)) ([8216416](https://github.com/googleapis/python-dlp/commit/8216416485fe2e450e622b1eba24a097c20ada19)) + +## [3.10.0](https://github.com/googleapis/python-dlp/compare/v3.9.2...v3.10.0) (2022-12-15) + + +### Features + +* Add support for `google.cloud.dlp.__version__` ([bf3e815](https://github.com/googleapis/python-dlp/commit/bf3e8155d4a56b3016e9313c6b543f2d356eab6b)) +* Add typing to proto.Message based class attributes ([bf3e815](https://github.com/googleapis/python-dlp/commit/bf3e8155d4a56b3016e9313c6b543f2d356eab6b)) +* ExcludeByHotword added as an ExclusionRule type ([bf3e815](https://github.com/googleapis/python-dlp/commit/bf3e8155d4a56b3016e9313c6b543f2d356eab6b)) +* NEW_ZEALAND added as a LocationCategory value ([bf3e815](https://github.com/googleapis/python-dlp/commit/bf3e8155d4a56b3016e9313c6b543f2d356eab6b)) + + +### Bug Fixes + +* Add dict typing for client_options ([bf3e815](https://github.com/googleapis/python-dlp/commit/bf3e8155d4a56b3016e9313c6b543f2d356eab6b)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([2e9826d](https://github.com/googleapis/python-dlp/commit/2e9826d20e10916a07be781c5098caf47a0b0b10)) +* Drop usage of pkg_resources ([2e9826d](https://github.com/googleapis/python-dlp/commit/2e9826d20e10916a07be781c5098caf47a0b0b10)) +* Fix timeout default values ([2e9826d](https://github.com/googleapis/python-dlp/commit/2e9826d20e10916a07be781c5098caf47a0b0b10)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([bf3e815](https://github.com/googleapis/python-dlp/commit/bf3e8155d4a56b3016e9313c6b543f2d356eab6b)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([2e9826d](https://github.com/googleapis/python-dlp/commit/2e9826d20e10916a07be781c5098caf47a0b0b10)) + +## [3.9.2](https://github.com/googleapis/python-dlp/compare/v3.9.1...v3.9.2) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#451](https://github.com/googleapis/python-dlp/issues/451)) ([a1e1b92](https://github.com/googleapis/python-dlp/commit/a1e1b9278e47ecff3f6d92f21dfc55368c7d35e1)) + +## [3.9.1](https://github.com/googleapis/python-dlp/compare/v3.9.0...v3.9.1) (2022-10-04) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#446](https://github.com/googleapis/python-dlp/issues/446)) ([2d63fa5](https://github.com/googleapis/python-dlp/commit/2d63fa547e8f593773722d049614570e01bba42a)) + + +### Documentation + +* Deprecate extra field to avoid confusion ([#447](https://github.com/googleapis/python-dlp/issues/447)) ([8ee182f](https://github.com/googleapis/python-dlp/commit/8ee182f1c5022bc7ef7e4d2295106e8d5ef247f5)) + +## [3.9.0](https://github.com/googleapis/python-dlp/compare/v3.8.1...v3.9.0) (2022-09-06) + + +### Features + +* Add Deidentify action ([#438](https://github.com/googleapis/python-dlp/issues/438)) ([c28073b](https://github.com/googleapis/python-dlp/commit/c28073b945d5539518bedaaf3903fa71d4ddb0d4)) + +## [3.8.1](https://github.com/googleapis/python-dlp/compare/v3.8.0...v3.8.1) (2022-08-12) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#422](https://github.com/googleapis/python-dlp/issues/422)) ([c179361](https://github.com/googleapis/python-dlp/commit/c179361d1686a8f66c7c812d7a171b1813d46f3c)) +* **deps:** require proto-plus >= 1.22.0 ([c179361](https://github.com/googleapis/python-dlp/commit/c179361d1686a8f66c7c812d7a171b1813d46f3c)) + +## [3.8.0](https://github.com/googleapis/python-dlp/compare/v3.7.1...v3.8.0) (2022-07-16) + + +### Features + +* add audience parameter ([6a3d7ec](https://github.com/googleapis/python-dlp/commit/6a3d7ec17783fd6b3486b2bd5a04cb33d65acb3e)) +* InfoType categories were added to built-in infoTypes ([#409](https://github.com/googleapis/python-dlp/issues/409)) ([6a3d7ec](https://github.com/googleapis/python-dlp/commit/6a3d7ec17783fd6b3486b2bd5a04cb33d65acb3e)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([6a3d7ec](https://github.com/googleapis/python-dlp/commit/6a3d7ec17783fd6b3486b2bd5a04cb33d65acb3e)) +* require python 3.7+ ([#411](https://github.com/googleapis/python-dlp/issues/411)) ([232001d](https://github.com/googleapis/python-dlp/commit/232001d2c15731c20d2b98f837906799b35309b6)) + +## [3.7.1](https://github.com/googleapis/python-dlp/compare/v3.7.0...v3.7.1) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#395](https://github.com/googleapis/python-dlp/issues/395)) ([d8760a1](https://github.com/googleapis/python-dlp/commit/d8760a12f4d566cb64df4e4aec3641cb6aa8e588)) +* drop dependency pytz ([d8760a1](https://github.com/googleapis/python-dlp/commit/d8760a12f4d566cb64df4e4aec3641cb6aa8e588)) + + +### Documentation + +* fix changelog header to consistent size ([#396](https://github.com/googleapis/python-dlp/issues/396)) ([d09ac69](https://github.com/googleapis/python-dlp/commit/d09ac693f6b356bf5da1e26e522168bc2376872e)) + +## [3.7.0](https://github.com/googleapis/python-dlp/compare/v3.6.2...v3.7.0) (2022-05-12) + + +### Features + +* add DataProfilePubSubMessage supporting pub/sub integration ([#363](https://github.com/googleapis/python-dlp/issues/363)) ([15a4653](https://github.com/googleapis/python-dlp/commit/15a4653426b2a614a22152ca0a4b457fd8696d3a)) +* new Bytes and File types POWERPOINT and EXCEL ([#355](https://github.com/googleapis/python-dlp/issues/355)) ([be8c8b1](https://github.com/googleapis/python-dlp/commit/be8c8b145d8ecad24a9c56f4ab26520700b157a8)) + +## [3.6.2](https://github.com/googleapis/python-dlp/compare/v3.6.1...v3.6.2) (2022-03-05) + + +### Bug Fixes + +* **deps:** require proto-plus>=1.15.0 ([#342](https://github.com/googleapis/python-dlp/issues/342)) ([81ae7b6](https://github.com/googleapis/python-dlp/commit/81ae7b6c25071f18c356b62d2df4234f43fe1fec)) + +## [3.6.1](https://github.com/googleapis/python-dlp/compare/v3.6.0...v3.6.1) (2022-02-26) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([#325](https://github.com/googleapis/python-dlp/issues/325)) ([676f1d7](https://github.com/googleapis/python-dlp/commit/676f1d76158c6c0951e75362d5eb34f57d901712)) + + +### Documentation + +* **dlp-samples:** modified region tags and fixed comment ([#330](https://github.com/googleapis/python-dlp/issues/330)) ([6375f90](https://github.com/googleapis/python-dlp/commit/6375f90805c5e30c995c47d1538fb08882afb518)) + +## [3.6.0](https://github.com/googleapis/python-dlp/compare/v3.5.0...v3.6.0) (2022-01-26) + + +### Features + +* add api key support ([#320](https://github.com/googleapis/python-dlp/issues/320)) ([ac2fe87](https://github.com/googleapis/python-dlp/commit/ac2fe8702b31f687935938b9fb089953e9a3af48)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >2.3.0 ([#322](https://github.com/googleapis/python-dlp/issues/322)) ([24d07e3](https://github.com/googleapis/python-dlp/commit/24d07e3af30b694b4b73b40fb2a5f19c276d6d98)) + +## [3.5.0](https://github.com/googleapis/python-dlp/compare/v3.4.0...v3.5.0) (2022-01-16) + + +### Features + +* add support for Python 3.9 / 3.10 ([#300](https://github.com/googleapis/python-dlp/issues/300)) ([ac58bde](https://github.com/googleapis/python-dlp/commit/ac58bde1f9d361f56ecf942319d1c427159a02e9)) + +## [3.4.0](https://www.github.com/googleapis/python-dlp/compare/v3.3.1...v3.4.0) (2021-12-03) + + +### Features + +* added deidentify replacement dictionaries ([#296](https://www.github.com/googleapis/python-dlp/issues/296)) ([63e9661](https://www.github.com/googleapis/python-dlp/commit/63e96614ba72e4ae8e0eafe4139d5329e75a3c18)) +* added field for BigQuery inspect template inclusion lists ([63e9661](https://www.github.com/googleapis/python-dlp/commit/63e96614ba72e4ae8e0eafe4139d5329e75a3c18)) +* added field to support infotype versioning ([63e9661](https://www.github.com/googleapis/python-dlp/commit/63e96614ba72e4ae8e0eafe4139d5329e75a3c18)) + +## [3.3.1](https://www.github.com/googleapis/python-dlp/compare/v3.3.0...v3.3.1) (2021-11-05) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([84181e9](https://www.github.com/googleapis/python-dlp/commit/84181e971ee04b46a603119d44410816fd7f04be)) +* **deps:** require google-api-core >= 1.28.0 ([84181e9](https://www.github.com/googleapis/python-dlp/commit/84181e971ee04b46a603119d44410816fd7f04be)) +* fix extras_require typo in setup.py ([84181e9](https://www.github.com/googleapis/python-dlp/commit/84181e971ee04b46a603119d44410816fd7f04be)) + + +### Documentation + +* list oneofs in docstring ([84181e9](https://www.github.com/googleapis/python-dlp/commit/84181e971ee04b46a603119d44410816fd7f04be)) + +## [3.3.0](https://www.github.com/googleapis/python-dlp/compare/v3.2.4...v3.3.0) (2021-10-26) + + +### Features + +* add context manager support in client ([#272](https://www.github.com/googleapis/python-dlp/issues/272)) ([c0ba4eb](https://www.github.com/googleapis/python-dlp/commit/c0ba4eb27304c4e216864f6707693b27dc22c214)) + +## [3.2.4](https://www.github.com/googleapis/python-dlp/compare/v3.2.3...v3.2.4) (2021-10-05) + + +### Bug Fixes + +* improper types in pagers generation ([164977f](https://www.github.com/googleapis/python-dlp/commit/164977fda1fff85a245869ff197c3ca9f200f544)) + +## [3.2.3](https://www.github.com/googleapis/python-dlp/compare/v3.2.2...v3.2.3) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([ff98215](https://www.github.com/googleapis/python-dlp/commit/ff98215e7dc3fc6a2e8b04e3b8e570cd72556f4f)) + +## [3.2.2](https://www.github.com/googleapis/python-dlp/compare/v3.2.1...v3.2.2) (2021-07-27) + + +### Bug Fixes + +* enable self signed jwt for grpc ([#218](https://www.github.com/googleapis/python-dlp/issues/218)) ([584a887](https://www.github.com/googleapis/python-dlp/commit/584a887ac2bb648ebac439d4044f3fd8f12a01f4)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#210](https://www.github.com/googleapis/python-dlp/issues/210)) ([566827b](https://www.github.com/googleapis/python-dlp/commit/566827ba4cead4a5237fed370da132dd6fb55602)) + + +### Miscellaneous Chores + +* release as 3.2.2 ([#219](https://www.github.com/googleapis/python-dlp/issues/219)) ([5618115](https://www.github.com/googleapis/python-dlp/commit/56181152dbc1e48a70583e81dbe0fc089725f463)) + +## [3.2.1](https://www.github.com/googleapis/python-dlp/compare/v3.2.0...v3.2.1) (2021-07-21) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#209](https://www.github.com/googleapis/python-dlp/issues/209)) ([a016e6b](https://www.github.com/googleapis/python-dlp/commit/a016e6bd69a04b1e68efe48dd77493bd5267fbe5)) + +## [3.2.0](https://www.github.com/googleapis/python-dlp/compare/v3.1.1...v3.2.0) (2021-07-12) + + +### Features + +* add always_use_jwt_access ([#172](https://www.github.com/googleapis/python-dlp/issues/172)) ([fb86805](https://www.github.com/googleapis/python-dlp/commit/fb8680580a16b088fd680355e85f12593372b9a4)) + + +### Bug Fixes + +* disable always_use_jwt_access ([#177](https://www.github.com/googleapis/python-dlp/issues/177)) ([15f189f](https://www.github.com/googleapis/python-dlp/commit/15f189fdbbb8f9445bd88e3675c3f1e65da84aad)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-dlp/issues/1127)) ([#166](https://www.github.com/googleapis/python-dlp/issues/166)) ([e2e1c90](https://www.github.com/googleapis/python-dlp/commit/e2e1c90d65a2e2e9c1be1ed7921e138059401519)) + +## [3.1.1](https://www.github.com/googleapis/python-dlp/compare/v3.1.0...v3.1.1) (2021-06-16) + + +### Bug Fixes + +* **deps:** add packaging requirement ([#162](https://www.github.com/googleapis/python-dlp/issues/162)) ([e857e15](https://www.github.com/googleapis/python-dlp/commit/e857e1522d9fd59c1b4c5d9936c7371ddf8018b1)) + +## [3.1.0](https://www.github.com/googleapis/python-dlp/compare/v3.0.1...v3.1.0) (2021-05-28) + + +### Features + +* crypto_deterministic_config ([#108](https://www.github.com/googleapis/python-dlp/issues/108)) ([#119](https://www.github.com/googleapis/python-dlp/issues/119)) ([396804d](https://www.github.com/googleapis/python-dlp/commit/396804d65e40c1ae9ced16aa0f04ef4bdffa54c5)) +* support self-signed JWT flow for service accounts ([cdea974](https://www.github.com/googleapis/python-dlp/commit/cdea9744d0bc7244a42894acc1446080a16b2dab)) + + +### Bug Fixes + +* add async client ([cdea974](https://www.github.com/googleapis/python-dlp/commit/cdea9744d0bc7244a42894acc1446080a16b2dab)) +* require google-api-core>=1.22.2 ([d146cf5](https://www.github.com/googleapis/python-dlp/commit/d146cf59db14b3c3afbef72d7a86419532ad347e)) +* use correct retry deadlines ([#96](https://www.github.com/googleapis/python-dlp/issues/96)) ([d146cf5](https://www.github.com/googleapis/python-dlp/commit/d146cf59db14b3c3afbef72d7a86419532ad347e)) + +## [3.0.1](https://www.github.com/googleapis/python-dlp/compare/v3.0.0...v3.0.1) (2021-01-28) + + +### Bug Fixes + +* remove gRPC send/recv limits; add enums to `types/__init__.py` ([#89](https://www.github.com/googleapis/python-dlp/issues/89)) ([76e0439](https://www.github.com/googleapis/python-dlp/commit/76e0439b3acfdacf9303595107c03c1d49eac8b6)) + +## [3.0.0](https://www.github.com/googleapis/python-dlp/compare/v2.0.0...v3.0.0) (2020-12-02) + + +### ⚠ BREAKING CHANGES +* rename fields that collide with builtins (#75) + * `ByteContentItem.type` -> `ByteContentItem.type_` + * `MetadataLocation.type` -> `MetadataLocation.type_` + * `Container.type` -> `Container.type_` + * `Bucket.min` -> `Bucket.min_` + * `Bucket.max `-> `Bucket.max_` + * `DlpJob.type` -> `DlpJob.type_` + * `GetDlpJobRequest.type` -> `GetDlpJobRequest.type_` + +### Bug Fixes + +* rename fields that collide with builtins; retrieve job config for risk analysis jobs ([#75](https://www.github.com/googleapis/python-dlp/issues/75)) ([4f3148e](https://www.github.com/googleapis/python-dlp/commit/4f3148e93ec3dfc9395aa38a3afc62498500a055)) + + +### Documentation + +* **samples:** fix README to accurately reflect the new repo after the move ([#72](https://www.github.com/googleapis/python-dlp/issues/72)) ([dc56806](https://www.github.com/googleapis/python-dlp/commit/dc56806b47f92227e396969d8a583b881aa41fd1)) + +## [2.0.0](https://www.github.com/googleapis/python-dlp/compare/v1.0.0...v2.0.0) (2020-08-18) + + +### ⚠ BREAKING CHANGES + +* migrate to use microgen (#34) + +### Features + +* migrate to use microgen ([#34](https://www.github.com/googleapis/python-dlp/issues/34)) ([c6001e2](https://www.github.com/googleapis/python-dlp/commit/c6001e20facb0bba957794c674c7b1121dc1774a)) + +## [1.0.0](https://www.github.com/googleapis/python-dlp/compare/v0.15.0...v1.0.0) (2020-06-10) + + +### Features + +* set release_status to production/stable ([#9](https://www.github.com/googleapis/python-dlp/issues/9)) ([a7f22a5](https://www.github.com/googleapis/python-dlp/commit/a7f22a5c29d2393ed89a65c3423c590f4454d1c9)) + +## [0.15.0](https://www.github.com/googleapis/python-dlp/compare/v0.14.0...v0.15.0) (2020-05-14) + + +### Features + +* add file types and metadata location enums (via synth) ([#16](https://www.github.com/googleapis/python-dlp/issues/16)) ([442bd9f](https://www.github.com/googleapis/python-dlp/commit/442bd9f57fdc7f186e34958ac422fa39eadf03c2)) +* add support for hybrid jobs (via synth) ([#10](https://www.github.com/googleapis/python-dlp/issues/10)) ([ffad36e](https://www.github.com/googleapis/python-dlp/commit/ffad36ec37e62648f81830ecabbccb1d57e49036)) + +## [0.14.0](https://www.github.com/googleapis/python-dlp/compare/v0.13.0...v0.14.0) (2020-02-21) + + +### Features + +* **dlp:** undeprecate resource name helper methods, add 2.7 deprecation warning (via synth) ([#10040](https://www.github.com/googleapis/python-dlp/issues/10040)) ([b30d7c1](https://www.github.com/googleapis/python-dlp/commit/b30d7c1cd48fba47fdddb7b9232e421261108a52)) + +## 0.13.0 + +12-06-2019 14:29 PST + + +### Implementation Changes +- Remove send/recv msg size limit (via synth). ([#8953](https://github.com/googleapis/google-cloud-python/pull/8953)) + +### New Features +- Add `location_id` in preparation for regionalization; deprecate resource name helper functions (via synth). ([#9856](https://github.com/googleapis/google-cloud-python/pull/9856)) + +### Documentation +- Add python 2 sunset banner to documentation. ([#9036](https://github.com/googleapis/google-cloud-python/pull/9036)) +- Change requests intersphinx ref (via synth). ([#9403](https://github.com/googleapis/google-cloud-python/pull/9403)) +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + +### Internal / Testing Changes +- Normalize VPCSC configuration in systests. ([#9608](https://github.com/googleapis/google-cloud-python/pull/9608)) +- Ensure env is always set; fix typo in `test_deidentify_content`. ([#9479](https://github.com/googleapis/google-cloud-python/pull/9479)) +- Exclude 'noxfile.py' from synth. ([#9284](https://github.com/googleapis/google-cloud-python/pull/9284)) +- Ensure `GOOGLE_CLOUD_TESTS_IN_VPCSC` is down cast for env variables. ([#9274](https://github.com/googleapis/google-cloud-python/pull/9274)) +- Add VPCSC tests. ([#9249](https://github.com/googleapis/google-cloud-python/pull/9249)) + +## 0.12.1 + +07-24-2019 16:16 PDT + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Fix docs navigation issues. ([#8723](https://github.com/googleapis/google-cloud-python/pull/8723)) +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) +- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) + +## 0.12.0 + +07-09-2019 13:20 PDT + +### New Features +- Add support for publishing findings to GCS; deprecate 'DetectionRule' message (via synth). ([#8610](https://github.com/googleapis/google-cloud-python/pull/8610)) +- Add 'client_options' support, update list method docstrings (via synth). ([#8507](https://github.com/googleapis/google-cloud-python/pull/8507)) +- Allow kwargs to be passed to create_channel; expose support for AVRO files (via synth). ([#8443](https://github.com/googleapis/google-cloud-python/pull/8443)) + +### Internal / Testing Changes +- Pin black version (via synth). ([#8581](https://github.com/googleapis/google-cloud-python/pull/8581)) +- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) +- Update docstrings, format protos, update noxfile (via synth). ([#8239](https://github.com/googleapis/google-cloud-python/pull/8239)) +- Fix coverage in 'types.py' (via synth). ([#8153](https://github.com/googleapis/google-cloud-python/pull/8153)) +- Blacken noxfile.py, setup.py (via synth). ([#8121](https://github.com/googleapis/google-cloud-python/pull/8121)) +- Add empty lines (via synth). ([#8056](https://github.com/googleapis/google-cloud-python/pull/8056)) +- Add nox session `docs`, reorder methods (via synth). ([#7769](https://github.com/googleapis/google-cloud-python/pull/7769)) + +## 0.11.0 + +04-15-2019 15:05 PDT + + +### Implementation Changes +- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) +- Remove unused message exports. ([#7267](https://github.com/googleapis/google-cloud-python/pull/7267)) +- Protoc-generated serialization update. ([#7081](https://github.com/googleapis/google-cloud-python/pull/7081)) + +### New Features +- Add support for filtering job triggers; add CryptoDeterministicConfig; update docs/conf.py. (via synth). ([#7390](https://github.com/googleapis/google-cloud-python/pull/7390)) + +### Documentation +- Updated client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) +- Update copyright headers +- Pick up stub docstring fix in GAPIC generator. ([#6969](https://github.com/googleapis/google-cloud-python/pull/6969)) + +### Internal / Testing Changes +- Copy in proto files. ([#7227](https://github.com/googleapis/google-cloud-python/pull/7227)) +- Add protos as an artifact to library ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) + +## 0.10.0 + +12-17-2018 18:07 PST + + +### Implementation Changes +- Import `iam.policy` from `google.api_core`. ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) +- Pick up enum fixes in the GAPIC generator. ([#6611](https://github.com/googleapis/google-cloud-python/pull/6611)) +- Pick up fixes in GAPIC generator. ([#6495](https://github.com/googleapis/google-cloud-python/pull/6495)) +- Fix `client_info` bug, update docstrings via synth. ([#6440](https://github.com/googleapis/google-cloud-python/pull/6440)) +- Assorted synth fixups / cleanups ([#6400](https://github.com/googleapis/google-cloud-python/pull/6400)) + +### New Features +- Add `BigQueryOptions.excluded_fields`. ([#6312](https://github.com/googleapis/google-cloud-python/pull/6312)) + +### Dependencies +- Bump minimum `api_core` version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391)) + +### Documentation +- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) +- Pick up docstring fix via synth. ([#6874](https://github.com/googleapis/google-cloud-python/pull/6874)) + +### Internal / Testing Changes +- Update noxfile. +- Blacken all gen'd libs ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) +- Omit local deps ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) +- Run black at end of synth.py ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) +- Run Black on Generated libraries ([#6666](https://github.com/googleapis/google-cloud-python/pull/6666)) +- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) +- Add synth metadata. ([#6565](https://github.com/googleapis/google-cloud-python/pull/6565)) + +## 0.9.0 + +10-18-2018 10:44 PDT + +### New Features + +- Added `stored_info_type` methods to v2. ([#6221](https://github.com/googleapis/google-cloud-python/pull/6221)) + +### Documentation + +- Docs: normalize use of support level badges ([#6159](https://github.com/googleapis/google-cloud-python/pull/6159)) +- Add / fix badges for PyPI / versions. ([#6158](https://github.com/googleapis/google-cloud-python/pull/6158)) + +### Internal / Testing Changes + +- Use new Nox ([#6175](https://github.com/googleapis/google-cloud-python/pull/6175)) +- Avoid replacing/scribbling on 'setup.py' during synth. ([#6125](https://github.com/googleapis/google-cloud-python/pull/6125)) + +## 0.8.0 + +### New Features +- Add support for exclude findings. ([#6091](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6091)) +- Add support for stored info type support. ([#5950](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5950)) + +### Documentation +- Fix docs issue in DLP generation. ([#5668](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5668), [#5815](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5815)) +- Docs: Replace links to '/stable/' with '/latest/'. ([#5901](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5901)) + +## 0.7.0 + +### New Features +- Add StoredInfoTypes (#5809) + +## 0.6.0 + +### New Features +- Regenerate DLP v2 endpoint (redact image, delta presence) (#5666) + +### Internal / Testing Changes +- Avoid overwriting '__module__' of messages from shared modules. (#5364) +- Add Test runs for Python 3.7 and remove 3.4 (#5295) +- Modify system tests to use prerelease versions of grpcio (#5304) + +## 0.5.0 + +### New Features +- Add PublishSummaryToCscc (#5246) +- Add configurable row limit (#5246) +- Add EntityID added to risk stats (#5246) +- Add dictionaries via GCS (#5246) + +## 0.4.0 + +### Implementation Changes + +- Remove DLP client version V2Beta1 (#5155) + +## 0.3.0 + +### Implementation changes + +- The library has been regenerated to pick up changes from the API's proto definition. (#5131) + +## 0.2.0 + +### Interface additions + +- Add DLP v2 (#5059) + +## 0.1.1 + +### Dependencies + +- Update dependency range for api-core to include v1.0.0 releases (#4944) + +### Testing and internal changes + +- Normalize all setup.py files (#4909) + +## 0.1.0 + +Initial release of the DLP (Data Loss Prevention) client library. (#4879) diff --git a/packages/google-cloud-dlp/CODE_OF_CONDUCT.md b/packages/google-cloud-dlp/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-dlp/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-dlp/CONTRIBUTING.rst b/packages/google-cloud-dlp/CONTRIBUTING.rst new file mode 100644 index 000000000000..1c2972e2e62b --- /dev/null +++ b/packages/google-cloud-dlp/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.11 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-dlp + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-dlp/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-dlp/LICENSE b/packages/google-cloud-dlp/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-dlp/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-dlp/MANIFEST.in b/packages/google-cloud-dlp/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-dlp/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-dlp/README.rst b/packages/google-cloud-dlp/README.rst new file mode 100644 index 000000000000..bcf8a936ed75 --- /dev/null +++ b/packages/google-cloud-dlp/README.rst @@ -0,0 +1,108 @@ +Python Client for Cloud Data Loss Prevention +============================================ + +|stable| |pypi| |versions| + +`Cloud Data Loss Prevention`_: provides programmatic access to a powerful detection engine for personally identifiable information and other privacy-sensitive data in unstructured data streams, like text blocks and images. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-dlp.svg + :target: https://pypi.org/project/google-cloud-dlp/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-dlp.svg + :target: https://pypi.org/project/google-cloud-dlp/ +.. _Cloud Data Loss Prevention: https://cloud.google.com/dlp/docs/ +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/dlp/latest +.. _Product Documentation: https://cloud.google.com/dlp/docs/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud Data Loss Prevention.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud Data Loss Prevention.: https://cloud.google.com/dlp/docs/ +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-dlp + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-dlp + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud Data Loss Prevention + to see other available methods on the client. +- Read the `Cloud Data Loss Prevention Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Cloud Data Loss Prevention Product documentation: https://cloud.google.com/dlp/docs/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-dlp/SECURITY.md b/packages/google-cloud-dlp/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-dlp/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-dlp/docs/CHANGELOG.md b/packages/google-cloud-dlp/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-dlp/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-dlp/docs/README.rst b/packages/google-cloud-dlp/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-dlp/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-dlp/docs/_static/custom.css b/packages/google-cloud-dlp/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-dlp/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-dlp/docs/_templates/layout.html b/packages/google-cloud-dlp/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-dlp/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-dlp/docs/conf.py b/packages/google-cloud-dlp/docs/conf.py new file mode 100644 index 000000000000..4418563f8f48 --- /dev/null +++ b/packages/google-cloud-dlp/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-dlp documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-dlp" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-dlp", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-dlp-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-dlp.tex", + "google-cloud-dlp Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-dlp", + "google-cloud-dlp Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-dlp", + "google-cloud-dlp Documentation", + author, + "google-cloud-dlp", + "google-cloud-dlp Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-dlp/docs/dlp_v2/dlp_service.rst b/packages/google-cloud-dlp/docs/dlp_v2/dlp_service.rst new file mode 100644 index 000000000000..914da512249f --- /dev/null +++ b/packages/google-cloud-dlp/docs/dlp_v2/dlp_service.rst @@ -0,0 +1,10 @@ +DlpService +---------------------------- + +.. automodule:: google.cloud.dlp_v2.services.dlp_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dlp_v2.services.dlp_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dlp/docs/dlp_v2/services.rst b/packages/google-cloud-dlp/docs/dlp_v2/services.rst new file mode 100644 index 000000000000..864a8c839d6a --- /dev/null +++ b/packages/google-cloud-dlp/docs/dlp_v2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Dlp v2 API +==================================== +.. toctree:: + :maxdepth: 2 + + dlp_service diff --git a/packages/google-cloud-dlp/docs/dlp_v2/types.rst b/packages/google-cloud-dlp/docs/dlp_v2/types.rst new file mode 100644 index 000000000000..5470b7177179 --- /dev/null +++ b/packages/google-cloud-dlp/docs/dlp_v2/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Dlp v2 API +================================= + +.. automodule:: google.cloud.dlp_v2.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-dlp/docs/index.rst b/packages/google-cloud-dlp/docs/index.rst new file mode 100644 index 000000000000..3982f142f724 --- /dev/null +++ b/packages/google-cloud-dlp/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + dlp_v2/services + dlp_v2/types + + +Changelog +--------- + +For a list of all ``google-cloud-dlp`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-dlp/docs/multiprocessing.rst b/packages/google-cloud-dlp/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-dlp/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-dlp/google/cloud/dlp/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp/__init__.py new file mode 100644 index 000000000000..ae40ffee1321 --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp/__init__.py @@ -0,0 +1,399 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dlp import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.dlp_v2.services.dlp_service.async_client import DlpServiceAsyncClient +from google.cloud.dlp_v2.services.dlp_service.client import DlpServiceClient +from google.cloud.dlp_v2.types.dlp import ( + Action, + ActivateJobTriggerRequest, + AnalyzeDataSourceRiskDetails, + BoundingBox, + BucketingConfig, + ByteContentItem, + CancelDlpJobRequest, + CharacterMaskConfig, + CharsToIgnore, + Color, + Container, + ContentItem, + ContentLocation, + ContentOption, + CreateDeidentifyTemplateRequest, + CreateDlpJobRequest, + CreateInspectTemplateRequest, + CreateJobTriggerRequest, + CreateStoredInfoTypeRequest, + CryptoDeterministicConfig, + CryptoHashConfig, + CryptoKey, + CryptoReplaceFfxFpeConfig, + DataProfileAction, + DataProfileConfigSnapshot, + DataProfileJobConfig, + DataProfileLocation, + DataProfilePubSubCondition, + DataProfilePubSubMessage, + DataRiskLevel, + DateShiftConfig, + DateTime, + DeidentifyConfig, + DeidentifyContentRequest, + DeidentifyContentResponse, + DeidentifyTemplate, + DeleteDeidentifyTemplateRequest, + DeleteDlpJobRequest, + DeleteInspectTemplateRequest, + DeleteJobTriggerRequest, + DeleteStoredInfoTypeRequest, + DlpJob, + DlpJobType, + DocumentLocation, + EncryptionStatus, + Error, + ExcludeByHotword, + ExcludeInfoTypes, + ExclusionRule, + FieldTransformation, + Finding, + FinishDlpJobRequest, + FixedSizeBucketingConfig, + GetDeidentifyTemplateRequest, + GetDlpJobRequest, + GetInspectTemplateRequest, + GetJobTriggerRequest, + GetStoredInfoTypeRequest, + HybridContentItem, + HybridFindingDetails, + HybridInspectDlpJobRequest, + HybridInspectJobTriggerRequest, + HybridInspectResponse, + HybridInspectStatistics, + ImageLocation, + ImageTransformations, + InfoTypeCategory, + InfoTypeDescription, + InfoTypeStats, + InfoTypeSummary, + InfoTypeSupportedBy, + InfoTypeTransformations, + InspectConfig, + InspectContentRequest, + InspectContentResponse, + InspectDataSourceDetails, + InspectionRule, + InspectionRuleSet, + InspectJobConfig, + InspectResult, + InspectTemplate, + JobTrigger, + KmsWrappedCryptoKey, + LargeCustomDictionaryConfig, + LargeCustomDictionaryStats, + ListDeidentifyTemplatesRequest, + ListDeidentifyTemplatesResponse, + ListDlpJobsRequest, + ListDlpJobsResponse, + ListInfoTypesRequest, + ListInfoTypesResponse, + ListInspectTemplatesRequest, + ListInspectTemplatesResponse, + ListJobTriggersRequest, + ListJobTriggersResponse, + ListStoredInfoTypesRequest, + ListStoredInfoTypesResponse, + Location, + Manual, + MatchingType, + MetadataLocation, + MetadataType, + OtherInfoTypeSummary, + OutputStorageConfig, + PrimitiveTransformation, + PrivacyMetric, + ProfileStatus, + QuasiId, + QuoteInfo, + Range, + RecordCondition, + RecordLocation, + RecordSuppression, + RecordTransformation, + RecordTransformations, + RedactConfig, + RedactImageRequest, + RedactImageResponse, + ReidentifyContentRequest, + ReidentifyContentResponse, + RelationalOperator, + ReplaceDictionaryConfig, + ReplaceValueConfig, + ReplaceWithInfoTypeConfig, + ResourceVisibility, + RiskAnalysisJobConfig, + Schedule, + StatisticalTable, + StorageMetadataLabel, + StoredInfoType, + StoredInfoTypeConfig, + StoredInfoTypeState, + StoredInfoTypeStats, + StoredInfoTypeVersion, + Table, + TableDataProfile, + TableLocation, + TimePartConfig, + TransformationConfig, + TransformationContainerType, + TransformationDescription, + TransformationDetails, + TransformationDetailsStorageConfig, + TransformationErrorHandling, + TransformationLocation, + TransformationOverview, + TransformationResultStatus, + TransformationResultStatusType, + TransformationSummary, + TransformationType, + TransientCryptoKey, + UnwrappedCryptoKey, + UpdateDeidentifyTemplateRequest, + UpdateInspectTemplateRequest, + UpdateJobTriggerRequest, + UpdateStoredInfoTypeRequest, + Value, + ValueFrequency, + VersionDescription, +) +from google.cloud.dlp_v2.types.storage import ( + BigQueryField, + BigQueryKey, + BigQueryOptions, + BigQueryTable, + CloudStorageFileSet, + CloudStorageOptions, + CloudStoragePath, + CloudStorageRegexFileSet, + CustomInfoType, + DatastoreKey, + DatastoreOptions, + EntityId, + FieldId, + FileType, + HybridOptions, + InfoType, + Key, + KindExpression, + Likelihood, + PartitionId, + RecordKey, + SensitivityScore, + StorageConfig, + StoredType, + TableOptions, +) + +__all__ = ( + "DlpServiceClient", + "DlpServiceAsyncClient", + "Action", + "ActivateJobTriggerRequest", + "AnalyzeDataSourceRiskDetails", + "BoundingBox", + "BucketingConfig", + "ByteContentItem", + "CancelDlpJobRequest", + "CharacterMaskConfig", + "CharsToIgnore", + "Color", + "Container", + "ContentItem", + "ContentLocation", + "CreateDeidentifyTemplateRequest", + "CreateDlpJobRequest", + "CreateInspectTemplateRequest", + "CreateJobTriggerRequest", + "CreateStoredInfoTypeRequest", + "CryptoDeterministicConfig", + "CryptoHashConfig", + "CryptoKey", + "CryptoReplaceFfxFpeConfig", + "DataProfileAction", + "DataProfileConfigSnapshot", + "DataProfileJobConfig", + "DataProfileLocation", + "DataProfilePubSubCondition", + "DataProfilePubSubMessage", + "DataRiskLevel", + "DateShiftConfig", + "DateTime", + "DeidentifyConfig", + "DeidentifyContentRequest", + "DeidentifyContentResponse", + "DeidentifyTemplate", + "DeleteDeidentifyTemplateRequest", + "DeleteDlpJobRequest", + "DeleteInspectTemplateRequest", + "DeleteJobTriggerRequest", + "DeleteStoredInfoTypeRequest", + "DlpJob", + "DocumentLocation", + "Error", + "ExcludeByHotword", + "ExcludeInfoTypes", + "ExclusionRule", + "FieldTransformation", + "Finding", + "FinishDlpJobRequest", + "FixedSizeBucketingConfig", + "GetDeidentifyTemplateRequest", + "GetDlpJobRequest", + "GetInspectTemplateRequest", + "GetJobTriggerRequest", + "GetStoredInfoTypeRequest", + "HybridContentItem", + "HybridFindingDetails", + "HybridInspectDlpJobRequest", + "HybridInspectJobTriggerRequest", + "HybridInspectResponse", + "HybridInspectStatistics", + "ImageLocation", + "ImageTransformations", + "InfoTypeCategory", + "InfoTypeDescription", + "InfoTypeStats", + "InfoTypeSummary", + "InfoTypeTransformations", + "InspectConfig", + "InspectContentRequest", + "InspectContentResponse", + "InspectDataSourceDetails", + "InspectionRule", + "InspectionRuleSet", + "InspectJobConfig", + "InspectResult", + "InspectTemplate", + "JobTrigger", + "KmsWrappedCryptoKey", + "LargeCustomDictionaryConfig", + "LargeCustomDictionaryStats", + "ListDeidentifyTemplatesRequest", + "ListDeidentifyTemplatesResponse", + "ListDlpJobsRequest", + "ListDlpJobsResponse", + "ListInfoTypesRequest", + "ListInfoTypesResponse", + "ListInspectTemplatesRequest", + "ListInspectTemplatesResponse", + "ListJobTriggersRequest", + "ListJobTriggersResponse", + "ListStoredInfoTypesRequest", + "ListStoredInfoTypesResponse", + "Location", + "Manual", + "MetadataLocation", + "OtherInfoTypeSummary", + "OutputStorageConfig", + "PrimitiveTransformation", + "PrivacyMetric", + "ProfileStatus", + "QuasiId", + "QuoteInfo", + "Range", + "RecordCondition", + "RecordLocation", + "RecordSuppression", + "RecordTransformation", + "RecordTransformations", + "RedactConfig", + "RedactImageRequest", + "RedactImageResponse", + "ReidentifyContentRequest", + "ReidentifyContentResponse", + "ReplaceDictionaryConfig", + "ReplaceValueConfig", + "ReplaceWithInfoTypeConfig", + "RiskAnalysisJobConfig", + "Schedule", + "StatisticalTable", + "StorageMetadataLabel", + "StoredInfoType", + "StoredInfoTypeConfig", + "StoredInfoTypeStats", + "StoredInfoTypeVersion", + "Table", + "TableDataProfile", + "TableLocation", + "TimePartConfig", + "TransformationConfig", + "TransformationDescription", + "TransformationDetails", + "TransformationDetailsStorageConfig", + "TransformationErrorHandling", + "TransformationLocation", + "TransformationOverview", + "TransformationResultStatus", + "TransformationSummary", + "TransientCryptoKey", + "UnwrappedCryptoKey", + "UpdateDeidentifyTemplateRequest", + "UpdateInspectTemplateRequest", + "UpdateJobTriggerRequest", + "UpdateStoredInfoTypeRequest", + "Value", + "ValueFrequency", + "VersionDescription", + "ContentOption", + "DlpJobType", + "EncryptionStatus", + "InfoTypeSupportedBy", + "MatchingType", + "MetadataType", + "RelationalOperator", + "ResourceVisibility", + "StoredInfoTypeState", + "TransformationContainerType", + "TransformationResultStatusType", + "TransformationType", + "BigQueryField", + "BigQueryKey", + "BigQueryOptions", + "BigQueryTable", + "CloudStorageFileSet", + "CloudStorageOptions", + "CloudStoragePath", + "CloudStorageRegexFileSet", + "CustomInfoType", + "DatastoreKey", + "DatastoreOptions", + "EntityId", + "FieldId", + "HybridOptions", + "InfoType", + "Key", + "KindExpression", + "PartitionId", + "RecordKey", + "SensitivityScore", + "StorageConfig", + "StoredType", + "TableOptions", + "FileType", + "Likelihood", +) diff --git a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py new file mode 100644 index 000000000000..02f3a1ee6c01 --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "3.12.3" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/google/cloud/dlp/py.typed b/packages/google-cloud-dlp/google/cloud/dlp/py.typed new file mode 100644 index 000000000000..23d89ef3ac5c --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dlp package uses inline types. diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py new file mode 100644 index 000000000000..60e783cef175 --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/__init__.py @@ -0,0 +1,398 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dlp_v2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.dlp_service import DlpServiceAsyncClient, DlpServiceClient +from .types.dlp import ( + Action, + ActivateJobTriggerRequest, + AnalyzeDataSourceRiskDetails, + BoundingBox, + BucketingConfig, + ByteContentItem, + CancelDlpJobRequest, + CharacterMaskConfig, + CharsToIgnore, + Color, + Container, + ContentItem, + ContentLocation, + ContentOption, + CreateDeidentifyTemplateRequest, + CreateDlpJobRequest, + CreateInspectTemplateRequest, + CreateJobTriggerRequest, + CreateStoredInfoTypeRequest, + CryptoDeterministicConfig, + CryptoHashConfig, + CryptoKey, + CryptoReplaceFfxFpeConfig, + DataProfileAction, + DataProfileConfigSnapshot, + DataProfileJobConfig, + DataProfileLocation, + DataProfilePubSubCondition, + DataProfilePubSubMessage, + DataRiskLevel, + DateShiftConfig, + DateTime, + DeidentifyConfig, + DeidentifyContentRequest, + DeidentifyContentResponse, + DeidentifyTemplate, + DeleteDeidentifyTemplateRequest, + DeleteDlpJobRequest, + DeleteInspectTemplateRequest, + DeleteJobTriggerRequest, + DeleteStoredInfoTypeRequest, + DlpJob, + DlpJobType, + DocumentLocation, + EncryptionStatus, + Error, + ExcludeByHotword, + ExcludeInfoTypes, + ExclusionRule, + FieldTransformation, + Finding, + FinishDlpJobRequest, + FixedSizeBucketingConfig, + GetDeidentifyTemplateRequest, + GetDlpJobRequest, + GetInspectTemplateRequest, + GetJobTriggerRequest, + GetStoredInfoTypeRequest, + HybridContentItem, + HybridFindingDetails, + HybridInspectDlpJobRequest, + HybridInspectJobTriggerRequest, + HybridInspectResponse, + HybridInspectStatistics, + ImageLocation, + ImageTransformations, + InfoTypeCategory, + InfoTypeDescription, + InfoTypeStats, + InfoTypeSummary, + InfoTypeSupportedBy, + InfoTypeTransformations, + InspectConfig, + InspectContentRequest, + InspectContentResponse, + InspectDataSourceDetails, + InspectionRule, + InspectionRuleSet, + InspectJobConfig, + InspectResult, + InspectTemplate, + JobTrigger, + KmsWrappedCryptoKey, + LargeCustomDictionaryConfig, + LargeCustomDictionaryStats, + ListDeidentifyTemplatesRequest, + ListDeidentifyTemplatesResponse, + ListDlpJobsRequest, + ListDlpJobsResponse, + ListInfoTypesRequest, + ListInfoTypesResponse, + ListInspectTemplatesRequest, + ListInspectTemplatesResponse, + ListJobTriggersRequest, + ListJobTriggersResponse, + ListStoredInfoTypesRequest, + ListStoredInfoTypesResponse, + Location, + Manual, + MatchingType, + MetadataLocation, + MetadataType, + OtherInfoTypeSummary, + OutputStorageConfig, + PrimitiveTransformation, + PrivacyMetric, + ProfileStatus, + QuasiId, + QuoteInfo, + Range, + RecordCondition, + RecordLocation, + RecordSuppression, + RecordTransformation, + RecordTransformations, + RedactConfig, + RedactImageRequest, + RedactImageResponse, + ReidentifyContentRequest, + ReidentifyContentResponse, + RelationalOperator, + ReplaceDictionaryConfig, + ReplaceValueConfig, + ReplaceWithInfoTypeConfig, + ResourceVisibility, + RiskAnalysisJobConfig, + Schedule, + StatisticalTable, + StorageMetadataLabel, + StoredInfoType, + StoredInfoTypeConfig, + StoredInfoTypeState, + StoredInfoTypeStats, + StoredInfoTypeVersion, + Table, + TableDataProfile, + TableLocation, + TimePartConfig, + TransformationConfig, + TransformationContainerType, + TransformationDescription, + TransformationDetails, + TransformationDetailsStorageConfig, + TransformationErrorHandling, + TransformationLocation, + TransformationOverview, + TransformationResultStatus, + TransformationResultStatusType, + TransformationSummary, + TransformationType, + TransientCryptoKey, + UnwrappedCryptoKey, + UpdateDeidentifyTemplateRequest, + UpdateInspectTemplateRequest, + UpdateJobTriggerRequest, + UpdateStoredInfoTypeRequest, + Value, + ValueFrequency, + VersionDescription, +) +from .types.storage import ( + BigQueryField, + BigQueryKey, + BigQueryOptions, + BigQueryTable, + CloudStorageFileSet, + CloudStorageOptions, + CloudStoragePath, + CloudStorageRegexFileSet, + CustomInfoType, + DatastoreKey, + DatastoreOptions, + EntityId, + FieldId, + FileType, + HybridOptions, + InfoType, + Key, + KindExpression, + Likelihood, + PartitionId, + RecordKey, + SensitivityScore, + StorageConfig, + StoredType, + TableOptions, +) + +__all__ = ( + "DlpServiceAsyncClient", + "Action", + "ActivateJobTriggerRequest", + "AnalyzeDataSourceRiskDetails", + "BigQueryField", + "BigQueryKey", + "BigQueryOptions", + "BigQueryTable", + "BoundingBox", + "BucketingConfig", + "ByteContentItem", + "CancelDlpJobRequest", + "CharacterMaskConfig", + "CharsToIgnore", + "CloudStorageFileSet", + "CloudStorageOptions", + "CloudStoragePath", + "CloudStorageRegexFileSet", + "Color", + "Container", + "ContentItem", + "ContentLocation", + "ContentOption", + "CreateDeidentifyTemplateRequest", + "CreateDlpJobRequest", + "CreateInspectTemplateRequest", + "CreateJobTriggerRequest", + "CreateStoredInfoTypeRequest", + "CryptoDeterministicConfig", + "CryptoHashConfig", + "CryptoKey", + "CryptoReplaceFfxFpeConfig", + "CustomInfoType", + "DataProfileAction", + "DataProfileConfigSnapshot", + "DataProfileJobConfig", + "DataProfileLocation", + "DataProfilePubSubCondition", + "DataProfilePubSubMessage", + "DataRiskLevel", + "DatastoreKey", + "DatastoreOptions", + "DateShiftConfig", + "DateTime", + "DeidentifyConfig", + "DeidentifyContentRequest", + "DeidentifyContentResponse", + "DeidentifyTemplate", + "DeleteDeidentifyTemplateRequest", + "DeleteDlpJobRequest", + "DeleteInspectTemplateRequest", + "DeleteJobTriggerRequest", + "DeleteStoredInfoTypeRequest", + "DlpJob", + "DlpJobType", + "DlpServiceClient", + "DocumentLocation", + "EncryptionStatus", + "EntityId", + "Error", + "ExcludeByHotword", + "ExcludeInfoTypes", + "ExclusionRule", + "FieldId", + "FieldTransformation", + "FileType", + "Finding", + "FinishDlpJobRequest", + "FixedSizeBucketingConfig", + "GetDeidentifyTemplateRequest", + "GetDlpJobRequest", + "GetInspectTemplateRequest", + "GetJobTriggerRequest", + "GetStoredInfoTypeRequest", + "HybridContentItem", + "HybridFindingDetails", + "HybridInspectDlpJobRequest", + "HybridInspectJobTriggerRequest", + "HybridInspectResponse", + "HybridInspectStatistics", + "HybridOptions", + "ImageLocation", + "ImageTransformations", + "InfoType", + "InfoTypeCategory", + "InfoTypeDescription", + "InfoTypeStats", + "InfoTypeSummary", + "InfoTypeSupportedBy", + "InfoTypeTransformations", + "InspectConfig", + "InspectContentRequest", + "InspectContentResponse", + "InspectDataSourceDetails", + "InspectJobConfig", + "InspectResult", + "InspectTemplate", + "InspectionRule", + "InspectionRuleSet", + "JobTrigger", + "Key", + "KindExpression", + "KmsWrappedCryptoKey", + "LargeCustomDictionaryConfig", + "LargeCustomDictionaryStats", + "Likelihood", + "ListDeidentifyTemplatesRequest", + "ListDeidentifyTemplatesResponse", + "ListDlpJobsRequest", + "ListDlpJobsResponse", + "ListInfoTypesRequest", + "ListInfoTypesResponse", + "ListInspectTemplatesRequest", + "ListInspectTemplatesResponse", + "ListJobTriggersRequest", + "ListJobTriggersResponse", + "ListStoredInfoTypesRequest", + "ListStoredInfoTypesResponse", + "Location", + "Manual", + "MatchingType", + "MetadataLocation", + "MetadataType", + "OtherInfoTypeSummary", + "OutputStorageConfig", + "PartitionId", + "PrimitiveTransformation", + "PrivacyMetric", + "ProfileStatus", + "QuasiId", + "QuoteInfo", + "Range", + "RecordCondition", + "RecordKey", + "RecordLocation", + "RecordSuppression", + "RecordTransformation", + "RecordTransformations", + "RedactConfig", + "RedactImageRequest", + "RedactImageResponse", + "ReidentifyContentRequest", + "ReidentifyContentResponse", + "RelationalOperator", + "ReplaceDictionaryConfig", + "ReplaceValueConfig", + "ReplaceWithInfoTypeConfig", + "ResourceVisibility", + "RiskAnalysisJobConfig", + "Schedule", + "SensitivityScore", + "StatisticalTable", + "StorageConfig", + "StorageMetadataLabel", + "StoredInfoType", + "StoredInfoTypeConfig", + "StoredInfoTypeState", + "StoredInfoTypeStats", + "StoredInfoTypeVersion", + "StoredType", + "Table", + "TableDataProfile", + "TableLocation", + "TableOptions", + "TimePartConfig", + "TransformationConfig", + "TransformationContainerType", + "TransformationDescription", + "TransformationDetails", + "TransformationDetailsStorageConfig", + "TransformationErrorHandling", + "TransformationLocation", + "TransformationOverview", + "TransformationResultStatus", + "TransformationResultStatusType", + "TransformationSummary", + "TransformationType", + "TransientCryptoKey", + "UnwrappedCryptoKey", + "UpdateDeidentifyTemplateRequest", + "UpdateInspectTemplateRequest", + "UpdateJobTriggerRequest", + "UpdateStoredInfoTypeRequest", + "Value", + "ValueFrequency", + "VersionDescription", +) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_metadata.json b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_metadata.json new file mode 100644 index 000000000000..634002d47ccc --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_metadata.json @@ -0,0 +1,538 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dlp_v2", + "protoPackage": "google.privacy.dlp.v2", + "schema": "1.0", + "services": { + "DlpService": { + "clients": { + "grpc": { + "libraryClient": "DlpServiceClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DlpServiceAsyncClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + }, + "rest": { + "libraryClient": "DlpServiceClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py new file mode 100644 index 000000000000..02f3a1ee6c01 --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "3.12.3" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/py.typed b/packages/google-cloud-dlp/google/cloud/dlp_v2/py.typed new file mode 100644 index 000000000000..23d89ef3ac5c --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dlp package uses inline types. diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/__init__.py new file mode 100644 index 000000000000..44b233d30456 --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DlpServiceAsyncClient +from .client import DlpServiceClient + +__all__ = ( + "DlpServiceClient", + "DlpServiceAsyncClient", +) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py new file mode 100644 index 000000000000..d9e39302b13e --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/async_client.py @@ -0,0 +1,4258 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dlp_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.types import dlp + +from .client import DlpServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, DlpServiceTransport +from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport + + +class DlpServiceAsyncClient: + """The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + """ + + _client: DlpServiceClient + + DEFAULT_ENDPOINT = DlpServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DlpServiceClient.DEFAULT_MTLS_ENDPOINT + + deidentify_template_path = staticmethod(DlpServiceClient.deidentify_template_path) + parse_deidentify_template_path = staticmethod( + DlpServiceClient.parse_deidentify_template_path + ) + dlp_content_path = staticmethod(DlpServiceClient.dlp_content_path) + parse_dlp_content_path = staticmethod(DlpServiceClient.parse_dlp_content_path) + dlp_job_path = staticmethod(DlpServiceClient.dlp_job_path) + parse_dlp_job_path = staticmethod(DlpServiceClient.parse_dlp_job_path) + finding_path = staticmethod(DlpServiceClient.finding_path) + parse_finding_path = staticmethod(DlpServiceClient.parse_finding_path) + inspect_template_path = staticmethod(DlpServiceClient.inspect_template_path) + parse_inspect_template_path = staticmethod( + DlpServiceClient.parse_inspect_template_path + ) + job_trigger_path = staticmethod(DlpServiceClient.job_trigger_path) + parse_job_trigger_path = staticmethod(DlpServiceClient.parse_job_trigger_path) + stored_info_type_path = staticmethod(DlpServiceClient.stored_info_type_path) + parse_stored_info_type_path = staticmethod( + DlpServiceClient.parse_stored_info_type_path + ) + common_billing_account_path = staticmethod( + DlpServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DlpServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DlpServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DlpServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(DlpServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + DlpServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(DlpServiceClient.common_project_path) + parse_common_project_path = staticmethod(DlpServiceClient.parse_common_project_path) + common_location_path = staticmethod(DlpServiceClient.common_location_path) + parse_common_location_path = staticmethod( + DlpServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceAsyncClient: The constructed client. + """ + return DlpServiceClient.from_service_account_info.__func__(DlpServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceAsyncClient: The constructed client. + """ + return DlpServiceClient.from_service_account_file.__func__(DlpServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DlpServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DlpServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DlpServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(DlpServiceClient).get_transport_class, type(DlpServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DlpServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dlp service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DlpServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DlpServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def inspect_content( + self, + request: Optional[Union[dlp.InspectContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectContentResponse: + r"""Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = await client.inspect_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectContentResponse: + Results of inspecting an item. + """ + # Create or coerce a protobuf request object. + request = dlp.InspectContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.inspect_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def redact_image( + self, + request: Optional[Union[dlp.RedactImageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.RedactImageResponse: + r"""Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = await client.redact_image(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]]): + The request object. Request to search for potentially + sensitive info in an image and redact it + by covering it with a colored rectangle. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.RedactImageResponse: + Results of redacting an image. + """ + # Create or coerce a protobuf request object. + request = dlp.RedactImageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.redact_image, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def deidentify_content( + self, + request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = await client.deidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]]): + The request object. Request to de-identify a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + # Create or coerce a protobuf request object. + request = dlp.DeidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.deidentify_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def reidentify_content( + self, + request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.reidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]]): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ReidentifyContentResponse: + Results of re-identifying an item. + """ + # Create or coerce a protobuf request object. + request = dlp.ReidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reidentify_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_info_types( + self, + request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = await client.list_info_types(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]]): + The request object. Request for the list of infoTypes. + parent (:class:`str`): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.ListInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_info_types, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_inspect_template( + self, + request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]]): + The request object. Request message for + CreateInspectTemplate. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): + Required. The InspectTemplate to + create. + + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_template]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.CreateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_template is not None: + request.inspect_template = inspect_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_inspect_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_inspect_template( + self, + request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]]): + The request object. Request message for + UpdateInspectTemplate. + name (:class:`str`): + Required. Resource name of organization and + inspectTemplate to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): + New InspectTemplate value. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, inspect_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.UpdateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if inspect_template is not None: + request.inspect_template = inspect_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_inspect_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_inspect_template( + self, + request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]]): + The request object. Request message for + GetInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.GetInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_inspect_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_inspect_templates( + self, + request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInspectTemplatesAsyncPager: + r"""Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]]): + The request object. Request message for + ListInspectTemplates. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager: + Response message for + ListInspectTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.ListInspectTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_inspect_templates, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInspectTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_inspect_template( + self, + request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_inspect_template(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]]): + The request object. Request message for + DeleteInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.DeleteInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_inspect_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_deidentify_template( + self, + request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]]): + The request object. Request message for + CreateDeidentifyTemplate. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): + Required. The DeidentifyTemplate to + create. + + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deidentify_template]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.CreateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deidentify_template is not None: + request.deidentify_template = deidentify_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_deidentify_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_deidentify_template( + self, + request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.update_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]]): + The request object. Request message for + UpdateDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): + New DeidentifyTemplate value. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, deidentify_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.UpdateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if deidentify_template is not None: + request.deidentify_template = deidentify_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_deidentify_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_deidentify_template( + self, + request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]]): + The request object. Request message for + GetDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.GetDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_deidentify_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_deidentify_templates( + self, + request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeidentifyTemplatesAsyncPager: + r"""Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]]): + The request object. Request message for + ListDeidentifyTemplates. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager: + Response message for + ListDeidentifyTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.ListDeidentifyTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_deidentify_templates, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeidentifyTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_deidentify_template( + self, + request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_deidentify_template(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]]): + The request object. Request message for + DeleteDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.DeleteDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_deidentify_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_job_trigger( + self, + request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, + *, + parent: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = await client.create_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]]): + The request object. Request message for CreateJobTrigger. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): + Required. The JobTrigger to create. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_trigger]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.CreateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_trigger is not None: + request.job_trigger = job_trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_job_trigger( + self, + request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.update_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]]): + The request object. Request message for UpdateJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): + New JobTrigger value. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, job_trigger, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.UpdateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if job_trigger is not None: + request.job_trigger = job_trigger + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def hybrid_inspect_job_trigger( + self, + request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the trigger to execute a + hybrid inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.HybridInspectJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.hybrid_inspect_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job_trigger( + self, + request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]]): + The request object. Request message for GetJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.GetJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job_trigger, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_job_triggers( + self, + request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTriggersAsyncPager: + r"""Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]]): + The request object. Request message for ListJobTriggers. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager: + Response message for ListJobTriggers. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.ListJobTriggersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_job_triggers, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobTriggersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job_trigger( + self, + request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + await client.delete_job_trigger(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]]): + The request object. Request message for DeleteJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.DeleteJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job_trigger, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def activate_job_trigger( + self, + request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.activate_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]]): + The request object. Request message for + ActivateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + request = dlp.ActivateJobTriggerRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.activate_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_dlp_job( + self, + request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_job: Optional[dlp.InspectJobConfig] = None, + risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]]): + The request object. Request message for + CreateDlpJobRequest. Used to initiate + long running jobs such as calculating + risk metrics or inspecting Google Cloud + Storage. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_job (:class:`google.cloud.dlp_v2.types.InspectJobConfig`): + An inspection job scans a storage + repository for InfoTypes. + + This corresponds to the ``inspect_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + risk_job (:class:`google.cloud.dlp_v2.types.RiskAnalysisJobConfig`): + A risk analysis job calculates + re-identification risk metrics for a + BigQuery table. + + This corresponds to the ``risk_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_job, risk_job]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.CreateDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_job is not None: + request.inspect_job = inspect_job + if risk_job is not None: + request.risk_job = risk_job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_dlp_jobs( + self, + request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDlpJobsAsyncPager: + r"""Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]]): + The request object. The request message for listing DLP + jobs. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager: + The response message for listing DLP + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.ListDlpJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_dlp_jobs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDlpJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_dlp_job( + self, + request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]]): + The request object. The request message for [DlpJobs.GetDlpJob][]. + name (:class:`str`): + Required. The name of the DlpJob + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.GetDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_dlp_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_dlp_job( + self, + request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]]): + The request object. The request message for deleting a + DLP job. + name (:class:`str`): + Required. The name of the DlpJob + resource to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.DeleteDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_dlp_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_dlp_job( + self, + request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.cancel_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]]): + The request object. The request message for canceling a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = dlp.CancelDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_stored_info_type( + self, + request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]]): + The request object. Request message for + CreateStoredInfoType. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): + Required. Configuration of the + storedInfoType to create. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.CreateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if config is not None: + request.config = config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_stored_info_type, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_stored_info_type( + self, + request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.update_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]]): + The request object. Request message for + UpdateStoredInfoType. + name (:class:`str`): + Required. Resource name of organization and + storedInfoType to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): + Updated configuration for the + storedInfoType. If not provided, a new + version of the storedInfoType will be + created with the existing configuration. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.UpdateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if config is not None: + request.config = config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_stored_info_type, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_stored_info_type( + self, + request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = await client.get_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]]): + The request object. Request message for + GetStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.GetStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_stored_info_type, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_stored_info_types( + self, + request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListStoredInfoTypesAsyncPager: + r"""Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]]): + The request object. Request message for + ListStoredInfoTypes. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager: + Response message for + ListStoredInfoTypes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.ListStoredInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_stored_info_types, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListStoredInfoTypesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_stored_info_type( + self, + request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + await client.delete_stored_info_type(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]]): + The request object. Request message for + DeleteStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.DeleteStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_stored_info_type, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def hybrid_inspect_dlp_job( + self, + request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = await client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.HybridInspectDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.hybrid_inspect_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def finish_dlp_job( + self, + request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + async def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceAsyncClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + await client.finish_dlp_job(request=request) + + Args: + request (Optional[Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]]): + The request object. The request message for finishing a + DLP hybrid job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = dlp.FinishDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.finish_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "DlpServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DlpServiceAsyncClient",) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py new file mode 100644 index 000000000000..80b10cb26009 --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py @@ -0,0 +1,4410 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dlp_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.types import dlp + +from .transports.base import DEFAULT_CLIENT_INFO, DlpServiceTransport +from .transports.grpc import DlpServiceGrpcTransport +from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .transports.rest import DlpServiceRestTransport + + +class DlpServiceClientMeta(type): + """Metaclass for the DlpService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] + _transport_registry["grpc"] = DlpServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DlpServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DlpServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DlpServiceClient(metaclass=DlpServiceClientMeta): + """The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dlp.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DlpServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DlpServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def deidentify_template_path( + organization: str, + deidentify_template: str, + ) -> str: + """Returns a fully-qualified deidentify_template string.""" + return "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format( + organization=organization, + deidentify_template=deidentify_template, + ) + + @staticmethod + def parse_deidentify_template_path(path: str) -> Dict[str, str]: + """Parses a deidentify_template path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/deidentifyTemplates/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def dlp_content_path( + project: str, + ) -> str: + """Returns a fully-qualified dlp_content string.""" + return "projects/{project}/dlpContent".format( + project=project, + ) + + @staticmethod + def parse_dlp_content_path(path: str) -> Dict[str, str]: + """Parses a dlp_content path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dlpContent$", path) + return m.groupdict() if m else {} + + @staticmethod + def dlp_job_path( + project: str, + dlp_job: str, + ) -> str: + """Returns a fully-qualified dlp_job string.""" + return "projects/{project}/dlpJobs/{dlp_job}".format( + project=project, + dlp_job=dlp_job, + ) + + @staticmethod + def parse_dlp_job_path(path: str) -> Dict[str, str]: + """Parses a dlp_job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dlpJobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def finding_path( + project: str, + location: str, + finding: str, + ) -> str: + """Returns a fully-qualified finding string.""" + return "projects/{project}/locations/{location}/findings/{finding}".format( + project=project, + location=location, + finding=finding, + ) + + @staticmethod + def parse_finding_path(path: str) -> Dict[str, str]: + """Parses a finding path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/findings/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def inspect_template_path( + organization: str, + inspect_template: str, + ) -> str: + """Returns a fully-qualified inspect_template string.""" + return ( + "organizations/{organization}/inspectTemplates/{inspect_template}".format( + organization=organization, + inspect_template=inspect_template, + ) + ) + + @staticmethod + def parse_inspect_template_path(path: str) -> Dict[str, str]: + """Parses a inspect_template path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/inspectTemplates/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def job_trigger_path( + project: str, + job_trigger: str, + ) -> str: + """Returns a fully-qualified job_trigger string.""" + return "projects/{project}/jobTriggers/{job_trigger}".format( + project=project, + job_trigger=job_trigger, + ) + + @staticmethod + def parse_job_trigger_path(path: str) -> Dict[str, str]: + """Parses a job_trigger path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/jobTriggers/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def stored_info_type_path( + organization: str, + stored_info_type: str, + ) -> str: + """Returns a fully-qualified stored_info_type string.""" + return "organizations/{organization}/storedInfoTypes/{stored_info_type}".format( + organization=organization, + stored_info_type=stored_info_type, + ) + + @staticmethod + def parse_stored_info_type_path(path: str) -> Dict[str, str]: + """Parses a stored_info_type path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/storedInfoTypes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DlpServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dlp service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DlpServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DlpServiceTransport): + # transport is a DlpServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def inspect_content( + self, + request: Optional[Union[dlp.InspectContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectContentResponse: + r"""Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_inspect_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.InspectContentRequest( + ) + + # Make the request + response = client.inspect_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.InspectContentRequest, dict]): + The request object. Request to search for potentially + sensitive info in a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectContentResponse: + Results of inspecting an item. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.InspectContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.InspectContentRequest): + request = dlp.InspectContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.inspect_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def redact_image( + self, + request: Optional[Union[dlp.RedactImageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.RedactImageResponse: + r"""Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_redact_image(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.RedactImageRequest( + ) + + # Make the request + response = client.redact_image(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.RedactImageRequest, dict]): + The request object. Request to search for potentially + sensitive info in an image and redact it + by covering it with a colored rectangle. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.RedactImageResponse: + Results of redacting an image. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.RedactImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.RedactImageRequest): + request = dlp.RedactImageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.redact_image] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def deidentify_content( + self, + request: Optional[Union[dlp.DeidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_deidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeidentifyContentRequest( + ) + + # Make the request + response = client.deidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.DeidentifyContentRequest, dict]): + The request object. Request to de-identify a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeidentifyContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeidentifyContentRequest): + request = dlp.DeidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.deidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def reidentify_content( + self, + request: Optional[Union[dlp.ReidentifyContentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_reidentify_content(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ReidentifyContentRequest( + parent="parent_value", + ) + + # Make the request + response = client.reidentify_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ReidentifyContentRequest, dict]): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ReidentifyContentResponse: + Results of re-identifying an item. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ReidentifyContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ReidentifyContentRequest): + request = dlp.ReidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_info_types( + self, + request: Optional[Union[dlp.ListInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInfoTypesRequest( + ) + + # Make the request + response = client.list_info_types(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListInfoTypesRequest, dict]): + The request object. Request for the list of infoTypes. + parent (str): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListInfoTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListInfoTypesRequest): + request = dlp.ListInfoTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_info_types] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_inspect_template( + self, + request: Optional[Union[dlp.CreateInspectTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateInspectTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateInspectTemplateRequest, dict]): + The request object. Request message for + CreateInspectTemplate. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + Required. The InspectTemplate to + create. + + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_template]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateInspectTemplateRequest): + request = dlp.CreateInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_template is not None: + request.inspect_template = inspect_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_inspect_template( + self, + request: Optional[Union[dlp.UpdateInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + inspect_template: Optional[dlp.InspectTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateInspectTemplateRequest, dict]): + The request object. Request message for + UpdateInspectTemplate. + name (str): + Required. Resource name of organization and + inspectTemplate to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + New InspectTemplate value. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, inspect_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateInspectTemplateRequest): + request = dlp.UpdateInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if inspect_template is not None: + request.inspect_template = inspect_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_inspect_template( + self, + request: Optional[Union[dlp.GetInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetInspectTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_inspect_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetInspectTemplateRequest, dict]): + The request object. Request message for + GetInspectTemplate. + name (str): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetInspectTemplateRequest): + request = dlp.GetInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_inspect_templates( + self, + request: Optional[Union[dlp.ListInspectTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInspectTemplatesPager: + r"""Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_inspect_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListInspectTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inspect_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListInspectTemplatesRequest, dict]): + The request object. Request message for + ListInspectTemplates. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager: + Response message for + ListInspectTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListInspectTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListInspectTemplatesRequest): + request = dlp.ListInspectTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_inspect_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInspectTemplatesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_inspect_template( + self, + request: Optional[Union[dlp.DeleteInspectTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_inspect_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteInspectTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_inspect_template(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteInspectTemplateRequest, dict]): + The request object. Request message for + DeleteInspectTemplate. + name (str): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteInspectTemplateRequest): + request = dlp.DeleteInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_deidentify_template( + self, + request: Optional[Union[dlp.CreateDeidentifyTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDeidentifyTemplateRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest, dict]): + The request object. Request message for + CreateDeidentifyTemplate. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Required. The DeidentifyTemplate to + create. + + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deidentify_template]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): + request = dlp.CreateDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deidentify_template is not None: + request.deidentify_template = deidentify_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_deidentify_template + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_deidentify_template( + self, + request: Optional[Union[dlp.UpdateDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + deidentify_template: Optional[dlp.DeidentifyTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.update_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest, dict]): + The request object. Request message for + UpdateDeidentifyTemplate. + name (str): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + New DeidentifyTemplate value. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, deidentify_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): + request = dlp.UpdateDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if deidentify_template is not None: + request.deidentify_template = deidentify_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_deidentify_template + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_deidentify_template( + self, + request: Optional[Union[dlp.GetDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_deidentify_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest, dict]): + The request object. Request message for + GetDeidentifyTemplate. + name (str): + Required. Resource name of the organization and + deidentify template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetDeidentifyTemplateRequest): + request = dlp.GetDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_deidentify_templates( + self, + request: Optional[Union[dlp.ListDeidentifyTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeidentifyTemplatesPager: + r"""Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_deidentify_templates(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDeidentifyTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deidentify_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest, dict]): + The request object. Request message for + ListDeidentifyTemplates. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager: + Response message for + ListDeidentifyTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListDeidentifyTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): + request = dlp.ListDeidentifyTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_deidentify_templates + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeidentifyTemplatesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_deidentify_template( + self, + request: Optional[Union[dlp.DeleteDeidentifyTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_deidentify_template(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDeidentifyTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_deidentify_template(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest, dict]): + The request object. Request message for + DeleteDeidentifyTemplate. + name (str): + Required. Resource name of the organization and + deidentify template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): + request = dlp.DeleteDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_deidentify_template + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_job_trigger( + self, + request: Optional[Union[dlp.CreateJobTriggerRequest, dict]] = None, + *, + parent: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + job_trigger = dlp_v2.JobTrigger() + job_trigger.status = "CANCELLED" + + request = dlp_v2.CreateJobTriggerRequest( + parent="parent_value", + job_trigger=job_trigger, + ) + + # Make the request + response = client.create_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateJobTriggerRequest, dict]): + The request object. Request message for CreateJobTrigger. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + Required. The JobTrigger to create. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_trigger]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateJobTriggerRequest): + request = dlp.CreateJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_trigger is not None: + request.job_trigger = job_trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_job_trigger( + self, + request: Optional[Union[dlp.UpdateJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + job_trigger: Optional[dlp.JobTrigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.update_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateJobTriggerRequest, dict]): + The request object. Request message for UpdateJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + New JobTrigger value. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, job_trigger, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateJobTriggerRequest): + request = dlp.UpdateJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if job_trigger is not None: + request.job_trigger = job_trigger + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def hybrid_inspect_job_trigger( + self, + request: Optional[Union[dlp.HybridInspectJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_hybrid_inspect_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest, dict]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (str): + Required. Resource name of the trigger to execute a + hybrid inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.HybridInspectJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.HybridInspectJobTriggerRequest): + request = dlp.HybridInspectJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.hybrid_inspect_job_trigger + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job_trigger( + self, + request: Optional[Union[dlp.GetJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.get_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetJobTriggerRequest, dict]): + The request object. Request message for GetJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetJobTriggerRequest): + request = dlp.GetJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_job_triggers( + self, + request: Optional[Union[dlp.ListJobTriggersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTriggersPager: + r"""Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_job_triggers(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListJobTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListJobTriggersRequest, dict]): + The request object. Request message for ListJobTriggers. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager: + Response message for ListJobTriggers. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListJobTriggersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListJobTriggersRequest): + request = dlp.ListJobTriggersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_job_triggers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobTriggersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job_trigger( + self, + request: Optional[Union[dlp.DeleteJobTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteJobTriggerRequest( + name="name_value", + ) + + # Make the request + client.delete_job_trigger(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteJobTriggerRequest, dict]): + The request object. Request message for DeleteJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteJobTriggerRequest): + request = dlp.DeleteJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def activate_job_trigger( + self, + request: Optional[Union[dlp.ActivateJobTriggerRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_activate_job_trigger(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ActivateJobTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.activate_job_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ActivateJobTriggerRequest, dict]): + The request object. Request message for + ActivateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ActivateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ActivateJobTriggerRequest): + request = dlp.ActivateJobTriggerRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.activate_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_dlp_job( + self, + request: Optional[Union[dlp.CreateDlpJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + inspect_job: Optional[dlp.InspectJobConfig] = None, + risk_job: Optional[dlp.RiskAnalysisJobConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateDlpJobRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateDlpJobRequest, dict]): + The request object. Request message for + CreateDlpJobRequest. Used to initiate + long running jobs such as calculating + risk metrics or inspecting Google Cloud + Storage. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + An inspection job scans a storage + repository for InfoTypes. + + This corresponds to the ``inspect_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + A risk analysis job calculates + re-identification risk metrics for a + BigQuery table. + + This corresponds to the ``risk_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_job, risk_job]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateDlpJobRequest): + request = dlp.CreateDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_job is not None: + request.inspect_job = inspect_job + if risk_job is not None: + request.risk_job = risk_job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_dlp_jobs( + self, + request: Optional[Union[dlp.ListDlpJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDlpJobsPager: + r"""Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_dlp_jobs(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListDlpJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dlp_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListDlpJobsRequest, dict]): + The request object. The request message for listing DLP + jobs. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager: + The response message for listing DLP + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListDlpJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListDlpJobsRequest): + request = dlp.ListDlpJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_dlp_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDlpJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_dlp_job( + self, + request: Optional[Union[dlp.GetDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetDlpJobRequest, dict]): + The request object. The request message for [DlpJobs.GetDlpJob][]. + name (str): + Required. The name of the DlpJob + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetDlpJobRequest): + request = dlp.GetDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_dlp_job( + self, + request: Optional[Union[dlp.DeleteDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteDlpJobRequest( + name="name_value", + ) + + # Make the request + client.delete_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteDlpJobRequest, dict]): + The request object. The request message for deleting a + DLP job. + name (str): + Required. The name of the DlpJob + resource to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteDlpJobRequest): + request = dlp.DeleteDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_dlp_job( + self, + request: Optional[Union[dlp.CancelDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_cancel_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CancelDlpJobRequest( + name="name_value", + ) + + # Make the request + client.cancel_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.CancelDlpJobRequest, dict]): + The request object. The request message for canceling a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CancelDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CancelDlpJobRequest): + request = dlp.CancelDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_stored_info_type( + self, + request: Optional[Union[dlp.CreateStoredInfoTypeRequest, dict]] = None, + *, + parent: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_create_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.CreateStoredInfoTypeRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest, dict]): + The request object. Request message for + CreateStoredInfoType. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Required. Configuration of the + storedInfoType to create. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateStoredInfoTypeRequest): + request = dlp.CreateStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if config is not None: + request.config = config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_stored_info_type( + self, + request: Optional[Union[dlp.UpdateStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + config: Optional[dlp.StoredInfoTypeConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_update_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.UpdateStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.update_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest, dict]): + The request object. Request message for + UpdateStoredInfoType. + name (str): + Required. Resource name of organization and + storedInfoType to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Updated configuration for the + storedInfoType. If not provided, a new + version of the storedInfoType will be + created with the existing configuration. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): + request = dlp.UpdateStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if config is not None: + request.config = config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_stored_info_type( + self, + request: Optional[Union[dlp.GetStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_get_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.GetStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + response = client.get_stored_info_type(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.GetStoredInfoTypeRequest, dict]): + The request object. Request message for + GetStoredInfoType. + name (str): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetStoredInfoTypeRequest): + request = dlp.GetStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_stored_info_types( + self, + request: Optional[Union[dlp.ListStoredInfoTypesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListStoredInfoTypesPager: + r"""Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_list_stored_info_types(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.ListStoredInfoTypesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_stored_info_types(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.ListStoredInfoTypesRequest, dict]): + The request object. Request message for + ListStoredInfoTypes. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager: + Response message for + ListStoredInfoTypes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListStoredInfoTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListStoredInfoTypesRequest): + request = dlp.ListStoredInfoTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_stored_info_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListStoredInfoTypesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_stored_info_type( + self, + request: Optional[Union[dlp.DeleteStoredInfoTypeRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_delete_stored_info_type(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.DeleteStoredInfoTypeRequest( + name="name_value", + ) + + # Make the request + client.delete_stored_info_type(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest, dict]): + The request object. Request message for + DeleteStoredInfoType. + name (str): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): + request = dlp.DeleteStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def hybrid_inspect_dlp_job( + self, + request: Optional[Union[dlp.HybridInspectDlpJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_hybrid_inspect_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.HybridInspectDlpJobRequest( + name="name_value", + ) + + # Make the request + response = client.hybrid_inspect_dlp_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dlp_v2.types.HybridInspectDlpJobRequest, dict]): + The request object. Request to search for potentially + sensitive info in a custom location. + name (str): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.HybridInspectDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.HybridInspectDlpJobRequest): + request = dlp.HybridInspectDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def finish_dlp_job( + self, + request: Optional[Union[dlp.FinishDlpJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dlp_v2 + + def sample_finish_dlp_job(): + # Create a client + client = dlp_v2.DlpServiceClient() + + # Initialize request argument(s) + request = dlp_v2.FinishDlpJobRequest( + name="name_value", + ) + + # Make the request + client.finish_dlp_job(request=request) + + Args: + request (Union[google.cloud.dlp_v2.types.FinishDlpJobRequest, dict]): + The request object. The request message for finishing a + DLP hybrid job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.FinishDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.FinishDlpJobRequest): + request = dlp.FinishDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.finish_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "DlpServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DlpServiceClient",) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/pagers.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/pagers.py new file mode 100644 index 000000000000..e10a300d0073 --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/pagers.py @@ -0,0 +1,667 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.dlp_v2.types import dlp + + +class ListInspectTemplatesPager: + """A pager for iterating through ``list_inspect_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``inspect_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInspectTemplates`` requests and continue to iterate + through the ``inspect_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dlp.ListInspectTemplatesResponse], + request: dlp.ListInspectTemplatesRequest, + response: dlp.ListInspectTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListInspectTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListInspectTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.InspectTemplate]: + for page in self.pages: + yield from page.inspect_templates + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInspectTemplatesAsyncPager: + """A pager for iterating through ``list_inspect_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``inspect_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInspectTemplates`` requests and continue to iterate + through the ``inspect_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[dlp.ListInspectTemplatesResponse]], + request: dlp.ListInspectTemplatesRequest, + response: dlp.ListInspectTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListInspectTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListInspectTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[dlp.InspectTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.inspect_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeidentifyTemplatesPager: + """A pager for iterating through ``list_deidentify_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deidentify_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeidentifyTemplates`` requests and continue to iterate + through the ``deidentify_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dlp.ListDeidentifyTemplatesResponse], + request: dlp.ListDeidentifyTemplatesRequest, + response: dlp.ListDeidentifyTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDeidentifyTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListDeidentifyTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.DeidentifyTemplate]: + for page in self.pages: + yield from page.deidentify_templates + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeidentifyTemplatesAsyncPager: + """A pager for iterating through ``list_deidentify_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deidentify_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeidentifyTemplates`` requests and continue to iterate + through the ``deidentify_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[dlp.ListDeidentifyTemplatesResponse]], + request: dlp.ListDeidentifyTemplatesRequest, + response: dlp.ListDeidentifyTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDeidentifyTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListDeidentifyTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[dlp.DeidentifyTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.deidentify_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListJobTriggersPager: + """A pager for iterating through ``list_job_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``job_triggers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobTriggers`` requests and continue to iterate + through the ``job_triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dlp.ListJobTriggersResponse], + request: dlp.ListJobTriggersRequest, + response: dlp.ListJobTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListJobTriggersRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListJobTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListJobTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListJobTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.JobTrigger]: + for page in self.pages: + yield from page.job_triggers + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListJobTriggersAsyncPager: + """A pager for iterating through ``list_job_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``job_triggers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobTriggers`` requests and continue to iterate + through the ``job_triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[dlp.ListJobTriggersResponse]], + request: dlp.ListJobTriggersRequest, + response: dlp.ListJobTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListJobTriggersRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListJobTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListJobTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListJobTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[dlp.JobTrigger]: + async def async_generator(): + async for page in self.pages: + for response in page.job_triggers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDlpJobsPager: + """A pager for iterating through ``list_dlp_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDlpJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dlp.ListDlpJobsResponse], + request: dlp.ListDlpJobsRequest, + response: dlp.ListDlpJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDlpJobsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDlpJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDlpJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListDlpJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.DlpJob]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDlpJobsAsyncPager: + """A pager for iterating through ``list_dlp_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDlpJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[dlp.ListDlpJobsResponse]], + request: dlp.ListDlpJobsRequest, + response: dlp.ListDlpJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDlpJobsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDlpJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDlpJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListDlpJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[dlp.DlpJob]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListStoredInfoTypesPager: + """A pager for iterating through ``list_stored_info_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``stored_info_types`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListStoredInfoTypes`` requests and continue to iterate + through the ``stored_info_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dlp.ListStoredInfoTypesResponse], + request: dlp.ListStoredInfoTypesRequest, + response: dlp.ListStoredInfoTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListStoredInfoTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dlp.ListStoredInfoTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[dlp.StoredInfoType]: + for page in self.pages: + yield from page.stored_info_types + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListStoredInfoTypesAsyncPager: + """A pager for iterating through ``list_stored_info_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``stored_info_types`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListStoredInfoTypes`` requests and continue to iterate + through the ``stored_info_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[dlp.ListStoredInfoTypesResponse]], + request: dlp.ListStoredInfoTypesRequest, + response: dlp.ListStoredInfoTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListStoredInfoTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dlp.ListStoredInfoTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[dlp.StoredInfoType]: + async def async_generator(): + async for page in self.pages: + for response in page.stored_info_types: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py new file mode 100644 index 000000000000..2b918558f70c --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DlpServiceTransport +from .grpc import DlpServiceGrpcTransport +from .grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .rest import DlpServiceRestInterceptor, DlpServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] +_transport_registry["grpc"] = DlpServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DlpServiceRestTransport + +__all__ = ( + "DlpServiceTransport", + "DlpServiceGrpcTransport", + "DlpServiceGrpcAsyncIOTransport", + "DlpServiceRestTransport", + "DlpServiceRestInterceptor", +) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/base.py new file mode 100644 index 000000000000..81349ff61312 --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/base.py @@ -0,0 +1,814 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dlp_v2 import gapic_version as package_version +from google.cloud.dlp_v2.types import dlp + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class DlpServiceTransport(abc.ABC): + """Abstract transport class for DlpService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "dlp.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.inspect_content: gapic_v1.method.wrap_method( + self.inspect_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.redact_image: gapic_v1.method.wrap_method( + self.redact_image, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.deidentify_content: gapic_v1.method.wrap_method( + self.deidentify_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.reidentify_content: gapic_v1.method.wrap_method( + self.reidentify_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_info_types: gapic_v1.method.wrap_method( + self.list_info_types, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_inspect_template: gapic_v1.method.wrap_method( + self.create_inspect_template, + default_timeout=300.0, + client_info=client_info, + ), + self.update_inspect_template: gapic_v1.method.wrap_method( + self.update_inspect_template, + default_timeout=300.0, + client_info=client_info, + ), + self.get_inspect_template: gapic_v1.method.wrap_method( + self.get_inspect_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_inspect_templates: gapic_v1.method.wrap_method( + self.list_inspect_templates, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_inspect_template: gapic_v1.method.wrap_method( + self.delete_inspect_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_deidentify_template: gapic_v1.method.wrap_method( + self.create_deidentify_template, + default_timeout=300.0, + client_info=client_info, + ), + self.update_deidentify_template: gapic_v1.method.wrap_method( + self.update_deidentify_template, + default_timeout=300.0, + client_info=client_info, + ), + self.get_deidentify_template: gapic_v1.method.wrap_method( + self.get_deidentify_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_deidentify_templates: gapic_v1.method.wrap_method( + self.list_deidentify_templates, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_deidentify_template: gapic_v1.method.wrap_method( + self.delete_deidentify_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_job_trigger: gapic_v1.method.wrap_method( + self.create_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.update_job_trigger: gapic_v1.method.wrap_method( + self.update_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.hybrid_inspect_job_trigger: gapic_v1.method.wrap_method( + self.hybrid_inspect_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.get_job_trigger: gapic_v1.method.wrap_method( + self.get_job_trigger, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_job_triggers: gapic_v1.method.wrap_method( + self.list_job_triggers, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_job_trigger: gapic_v1.method.wrap_method( + self.delete_job_trigger, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.activate_job_trigger: gapic_v1.method.wrap_method( + self.activate_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.create_dlp_job: gapic_v1.method.wrap_method( + self.create_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.list_dlp_jobs: gapic_v1.method.wrap_method( + self.list_dlp_jobs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_dlp_job: gapic_v1.method.wrap_method( + self.get_dlp_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_dlp_job: gapic_v1.method.wrap_method( + self.delete_dlp_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.cancel_dlp_job: gapic_v1.method.wrap_method( + self.cancel_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.create_stored_info_type: gapic_v1.method.wrap_method( + self.create_stored_info_type, + default_timeout=300.0, + client_info=client_info, + ), + self.update_stored_info_type: gapic_v1.method.wrap_method( + self.update_stored_info_type, + default_timeout=300.0, + client_info=client_info, + ), + self.get_stored_info_type: gapic_v1.method.wrap_method( + self.get_stored_info_type, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_stored_info_types: gapic_v1.method.wrap_method( + self.list_stored_info_types, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_stored_info_type: gapic_v1.method.wrap_method( + self.delete_stored_info_type, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.hybrid_inspect_dlp_job: gapic_v1.method.wrap_method( + self.hybrid_inspect_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.finish_dlp_job: gapic_v1.method.wrap_method( + self.finish_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def inspect_content( + self, + ) -> Callable[ + [dlp.InspectContentRequest], + Union[dlp.InspectContentResponse, Awaitable[dlp.InspectContentResponse]], + ]: + raise NotImplementedError() + + @property + def redact_image( + self, + ) -> Callable[ + [dlp.RedactImageRequest], + Union[dlp.RedactImageResponse, Awaitable[dlp.RedactImageResponse]], + ]: + raise NotImplementedError() + + @property + def deidentify_content( + self, + ) -> Callable[ + [dlp.DeidentifyContentRequest], + Union[dlp.DeidentifyContentResponse, Awaitable[dlp.DeidentifyContentResponse]], + ]: + raise NotImplementedError() + + @property + def reidentify_content( + self, + ) -> Callable[ + [dlp.ReidentifyContentRequest], + Union[dlp.ReidentifyContentResponse, Awaitable[dlp.ReidentifyContentResponse]], + ]: + raise NotImplementedError() + + @property + def list_info_types( + self, + ) -> Callable[ + [dlp.ListInfoTypesRequest], + Union[dlp.ListInfoTypesResponse, Awaitable[dlp.ListInfoTypesResponse]], + ]: + raise NotImplementedError() + + @property + def create_inspect_template( + self, + ) -> Callable[ + [dlp.CreateInspectTemplateRequest], + Union[dlp.InspectTemplate, Awaitable[dlp.InspectTemplate]], + ]: + raise NotImplementedError() + + @property + def update_inspect_template( + self, + ) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + Union[dlp.InspectTemplate, Awaitable[dlp.InspectTemplate]], + ]: + raise NotImplementedError() + + @property + def get_inspect_template( + self, + ) -> Callable[ + [dlp.GetInspectTemplateRequest], + Union[dlp.InspectTemplate, Awaitable[dlp.InspectTemplate]], + ]: + raise NotImplementedError() + + @property + def list_inspect_templates( + self, + ) -> Callable[ + [dlp.ListInspectTemplatesRequest], + Union[ + dlp.ListInspectTemplatesResponse, + Awaitable[dlp.ListInspectTemplatesResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_inspect_template( + self, + ) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def create_deidentify_template( + self, + ) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + Union[dlp.DeidentifyTemplate, Awaitable[dlp.DeidentifyTemplate]], + ]: + raise NotImplementedError() + + @property + def update_deidentify_template( + self, + ) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + Union[dlp.DeidentifyTemplate, Awaitable[dlp.DeidentifyTemplate]], + ]: + raise NotImplementedError() + + @property + def get_deidentify_template( + self, + ) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + Union[dlp.DeidentifyTemplate, Awaitable[dlp.DeidentifyTemplate]], + ]: + raise NotImplementedError() + + @property + def list_deidentify_templates( + self, + ) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + Union[ + dlp.ListDeidentifyTemplatesResponse, + Awaitable[dlp.ListDeidentifyTemplatesResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_deidentify_template( + self, + ) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def create_job_trigger( + self, + ) -> Callable[ + [dlp.CreateJobTriggerRequest], Union[dlp.JobTrigger, Awaitable[dlp.JobTrigger]] + ]: + raise NotImplementedError() + + @property + def update_job_trigger( + self, + ) -> Callable[ + [dlp.UpdateJobTriggerRequest], Union[dlp.JobTrigger, Awaitable[dlp.JobTrigger]] + ]: + raise NotImplementedError() + + @property + def hybrid_inspect_job_trigger( + self, + ) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + Union[dlp.HybridInspectResponse, Awaitable[dlp.HybridInspectResponse]], + ]: + raise NotImplementedError() + + @property + def get_job_trigger( + self, + ) -> Callable[ + [dlp.GetJobTriggerRequest], Union[dlp.JobTrigger, Awaitable[dlp.JobTrigger]] + ]: + raise NotImplementedError() + + @property + def list_job_triggers( + self, + ) -> Callable[ + [dlp.ListJobTriggersRequest], + Union[dlp.ListJobTriggersResponse, Awaitable[dlp.ListJobTriggersResponse]], + ]: + raise NotImplementedError() + + @property + def delete_job_trigger( + self, + ) -> Callable[ + [dlp.DeleteJobTriggerRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def activate_job_trigger( + self, + ) -> Callable[ + [dlp.ActivateJobTriggerRequest], Union[dlp.DlpJob, Awaitable[dlp.DlpJob]] + ]: + raise NotImplementedError() + + @property + def create_dlp_job( + self, + ) -> Callable[[dlp.CreateDlpJobRequest], Union[dlp.DlpJob, Awaitable[dlp.DlpJob]]]: + raise NotImplementedError() + + @property + def list_dlp_jobs( + self, + ) -> Callable[ + [dlp.ListDlpJobsRequest], + Union[dlp.ListDlpJobsResponse, Awaitable[dlp.ListDlpJobsResponse]], + ]: + raise NotImplementedError() + + @property + def get_dlp_job( + self, + ) -> Callable[[dlp.GetDlpJobRequest], Union[dlp.DlpJob, Awaitable[dlp.DlpJob]]]: + raise NotImplementedError() + + @property + def delete_dlp_job( + self, + ) -> Callable[ + [dlp.DeleteDlpJobRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + + @property + def cancel_dlp_job( + self, + ) -> Callable[ + [dlp.CancelDlpJobRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + + @property + def create_stored_info_type( + self, + ) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + Union[dlp.StoredInfoType, Awaitable[dlp.StoredInfoType]], + ]: + raise NotImplementedError() + + @property + def update_stored_info_type( + self, + ) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + Union[dlp.StoredInfoType, Awaitable[dlp.StoredInfoType]], + ]: + raise NotImplementedError() + + @property + def get_stored_info_type( + self, + ) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + Union[dlp.StoredInfoType, Awaitable[dlp.StoredInfoType]], + ]: + raise NotImplementedError() + + @property + def list_stored_info_types( + self, + ) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + Union[ + dlp.ListStoredInfoTypesResponse, Awaitable[dlp.ListStoredInfoTypesResponse] + ], + ]: + raise NotImplementedError() + + @property + def delete_stored_info_type( + self, + ) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def hybrid_inspect_dlp_job( + self, + ) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + Union[dlp.HybridInspectResponse, Awaitable[dlp.HybridInspectResponse]], + ]: + raise NotImplementedError() + + @property + def finish_dlp_job( + self, + ) -> Callable[ + [dlp.FinishDlpJobRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DlpServiceTransport",) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py new file mode 100644 index 000000000000..ad86d1a36cca --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py @@ -0,0 +1,1257 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.dlp_v2.types import dlp + +from .base import DEFAULT_CLIENT_INFO, DlpServiceTransport + + +class DlpServiceGrpcTransport(DlpServiceTransport): + """gRPC backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dlp.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dlp.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def inspect_content( + self, + ) -> Callable[[dlp.InspectContentRequest], dlp.InspectContentResponse]: + r"""Return a callable for the inspect content method over gRPC. + + Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Returns: + Callable[[~.InspectContentRequest], + ~.InspectContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "inspect_content" not in self._stubs: + self._stubs["inspect_content"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/InspectContent", + request_serializer=dlp.InspectContentRequest.serialize, + response_deserializer=dlp.InspectContentResponse.deserialize, + ) + return self._stubs["inspect_content"] + + @property + def redact_image( + self, + ) -> Callable[[dlp.RedactImageRequest], dlp.RedactImageResponse]: + r"""Return a callable for the redact image method over gRPC. + + Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.RedactImageRequest], + ~.RedactImageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "redact_image" not in self._stubs: + self._stubs["redact_image"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/RedactImage", + request_serializer=dlp.RedactImageRequest.serialize, + response_deserializer=dlp.RedactImageResponse.deserialize, + ) + return self._stubs["redact_image"] + + @property + def deidentify_content( + self, + ) -> Callable[[dlp.DeidentifyContentRequest], dlp.DeidentifyContentResponse]: + r"""Return a callable for the deidentify content method over gRPC. + + De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.DeidentifyContentRequest], + ~.DeidentifyContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "deidentify_content" not in self._stubs: + self._stubs["deidentify_content"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeidentifyContent", + request_serializer=dlp.DeidentifyContentRequest.serialize, + response_deserializer=dlp.DeidentifyContentResponse.deserialize, + ) + return self._stubs["deidentify_content"] + + @property + def reidentify_content( + self, + ) -> Callable[[dlp.ReidentifyContentRequest], dlp.ReidentifyContentResponse]: + r"""Return a callable for the reidentify content method over gRPC. + + Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Returns: + Callable[[~.ReidentifyContentRequest], + ~.ReidentifyContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reidentify_content" not in self._stubs: + self._stubs["reidentify_content"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ReidentifyContent", + request_serializer=dlp.ReidentifyContentRequest.serialize, + response_deserializer=dlp.ReidentifyContentResponse.deserialize, + ) + return self._stubs["reidentify_content"] + + @property + def list_info_types( + self, + ) -> Callable[[dlp.ListInfoTypesRequest], dlp.ListInfoTypesResponse]: + r"""Return a callable for the list info types method over gRPC. + + Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Returns: + Callable[[~.ListInfoTypesRequest], + ~.ListInfoTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_info_types" not in self._stubs: + self._stubs["list_info_types"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListInfoTypes", + request_serializer=dlp.ListInfoTypesRequest.serialize, + response_deserializer=dlp.ListInfoTypesResponse.deserialize, + ) + return self._stubs["list_info_types"] + + @property + def create_inspect_template( + self, + ) -> Callable[[dlp.CreateInspectTemplateRequest], dlp.InspectTemplate]: + r"""Return a callable for the create inspect template method over gRPC. + + Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + Returns: + Callable[[~.CreateInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_inspect_template" not in self._stubs: + self._stubs["create_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateInspectTemplate", + request_serializer=dlp.CreateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs["create_inspect_template"] + + @property + def update_inspect_template( + self, + ) -> Callable[[dlp.UpdateInspectTemplateRequest], dlp.InspectTemplate]: + r"""Return a callable for the update inspect template method over gRPC. + + Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.UpdateInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_inspect_template" not in self._stubs: + self._stubs["update_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate", + request_serializer=dlp.UpdateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs["update_inspect_template"] + + @property + def get_inspect_template( + self, + ) -> Callable[[dlp.GetInspectTemplateRequest], dlp.InspectTemplate]: + r"""Return a callable for the get inspect template method over gRPC. + + Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.GetInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_inspect_template" not in self._stubs: + self._stubs["get_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetInspectTemplate", + request_serializer=dlp.GetInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs["get_inspect_template"] + + @property + def list_inspect_templates( + self, + ) -> Callable[[dlp.ListInspectTemplatesRequest], dlp.ListInspectTemplatesResponse]: + r"""Return a callable for the list inspect templates method over gRPC. + + Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.ListInspectTemplatesRequest], + ~.ListInspectTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_inspect_templates" not in self._stubs: + self._stubs["list_inspect_templates"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListInspectTemplates", + request_serializer=dlp.ListInspectTemplatesRequest.serialize, + response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, + ) + return self._stubs["list_inspect_templates"] + + @property + def delete_inspect_template( + self, + ) -> Callable[[dlp.DeleteInspectTemplateRequest], empty_pb2.Empty]: + r"""Return a callable for the delete inspect template method over gRPC. + + Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.DeleteInspectTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_inspect_template" not in self._stubs: + self._stubs["delete_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate", + request_serializer=dlp.DeleteInspectTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_inspect_template"] + + @property + def create_deidentify_template( + self, + ) -> Callable[[dlp.CreateDeidentifyTemplateRequest], dlp.DeidentifyTemplate]: + r"""Return a callable for the create deidentify template method over gRPC. + + Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.CreateDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deidentify_template" not in self._stubs: + self._stubs["create_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate", + request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs["create_deidentify_template"] + + @property + def update_deidentify_template( + self, + ) -> Callable[[dlp.UpdateDeidentifyTemplateRequest], dlp.DeidentifyTemplate]: + r"""Return a callable for the update deidentify template method over gRPC. + + Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.UpdateDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deidentify_template" not in self._stubs: + self._stubs["update_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate", + request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs["update_deidentify_template"] + + @property + def get_deidentify_template( + self, + ) -> Callable[[dlp.GetDeidentifyTemplateRequest], dlp.DeidentifyTemplate]: + r"""Return a callable for the get deidentify template method over gRPC. + + Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.GetDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deidentify_template" not in self._stubs: + self._stubs["get_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate", + request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs["get_deidentify_template"] + + @property + def list_deidentify_templates( + self, + ) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], dlp.ListDeidentifyTemplatesResponse + ]: + r"""Return a callable for the list deidentify templates method over gRPC. + + Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.ListDeidentifyTemplatesRequest], + ~.ListDeidentifyTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deidentify_templates" not in self._stubs: + self._stubs["list_deidentify_templates"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates", + request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, + response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, + ) + return self._stubs["list_deidentify_templates"] + + @property + def delete_deidentify_template( + self, + ) -> Callable[[dlp.DeleteDeidentifyTemplateRequest], empty_pb2.Empty]: + r"""Return a callable for the delete deidentify template method over gRPC. + + Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.DeleteDeidentifyTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deidentify_template" not in self._stubs: + self._stubs["delete_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate", + request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_deidentify_template"] + + @property + def create_job_trigger( + self, + ) -> Callable[[dlp.CreateJobTriggerRequest], dlp.JobTrigger]: + r"""Return a callable for the create job trigger method over gRPC. + + Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.CreateJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_job_trigger" not in self._stubs: + self._stubs["create_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateJobTrigger", + request_serializer=dlp.CreateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs["create_job_trigger"] + + @property + def update_job_trigger( + self, + ) -> Callable[[dlp.UpdateJobTriggerRequest], dlp.JobTrigger]: + r"""Return a callable for the update job trigger method over gRPC. + + Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.UpdateJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_job_trigger" not in self._stubs: + self._stubs["update_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateJobTrigger", + request_serializer=dlp.UpdateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs["update_job_trigger"] + + @property + def hybrid_inspect_job_trigger( + self, + ) -> Callable[[dlp.HybridInspectJobTriggerRequest], dlp.HybridInspectResponse]: + r"""Return a callable for the hybrid inspect job trigger method over gRPC. + + Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + Returns: + Callable[[~.HybridInspectJobTriggerRequest], + ~.HybridInspectResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "hybrid_inspect_job_trigger" not in self._stubs: + self._stubs["hybrid_inspect_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger", + request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs["hybrid_inspect_job_trigger"] + + @property + def get_job_trigger(self) -> Callable[[dlp.GetJobTriggerRequest], dlp.JobTrigger]: + r"""Return a callable for the get job trigger method over gRPC. + + Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.GetJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job_trigger" not in self._stubs: + self._stubs["get_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetJobTrigger", + request_serializer=dlp.GetJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs["get_job_trigger"] + + @property + def list_job_triggers( + self, + ) -> Callable[[dlp.ListJobTriggersRequest], dlp.ListJobTriggersResponse]: + r"""Return a callable for the list job triggers method over gRPC. + + Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.ListJobTriggersRequest], + ~.ListJobTriggersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_job_triggers" not in self._stubs: + self._stubs["list_job_triggers"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListJobTriggers", + request_serializer=dlp.ListJobTriggersRequest.serialize, + response_deserializer=dlp.ListJobTriggersResponse.deserialize, + ) + return self._stubs["list_job_triggers"] + + @property + def delete_job_trigger( + self, + ) -> Callable[[dlp.DeleteJobTriggerRequest], empty_pb2.Empty]: + r"""Return a callable for the delete job trigger method over gRPC. + + Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.DeleteJobTriggerRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_job_trigger" not in self._stubs: + self._stubs["delete_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteJobTrigger", + request_serializer=dlp.DeleteJobTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_job_trigger"] + + @property + def activate_job_trigger( + self, + ) -> Callable[[dlp.ActivateJobTriggerRequest], dlp.DlpJob]: + r"""Return a callable for the activate job trigger method over gRPC. + + Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Returns: + Callable[[~.ActivateJobTriggerRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "activate_job_trigger" not in self._stubs: + self._stubs["activate_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ActivateJobTrigger", + request_serializer=dlp.ActivateJobTriggerRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs["activate_job_trigger"] + + @property + def create_dlp_job(self) -> Callable[[dlp.CreateDlpJobRequest], dlp.DlpJob]: + r"""Return a callable for the create dlp job method over gRPC. + + Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.CreateDlpJobRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dlp_job" not in self._stubs: + self._stubs["create_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateDlpJob", + request_serializer=dlp.CreateDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs["create_dlp_job"] + + @property + def list_dlp_jobs( + self, + ) -> Callable[[dlp.ListDlpJobsRequest], dlp.ListDlpJobsResponse]: + r"""Return a callable for the list dlp jobs method over gRPC. + + Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.ListDlpJobsRequest], + ~.ListDlpJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dlp_jobs" not in self._stubs: + self._stubs["list_dlp_jobs"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListDlpJobs", + request_serializer=dlp.ListDlpJobsRequest.serialize, + response_deserializer=dlp.ListDlpJobsResponse.deserialize, + ) + return self._stubs["list_dlp_jobs"] + + @property + def get_dlp_job(self) -> Callable[[dlp.GetDlpJobRequest], dlp.DlpJob]: + r"""Return a callable for the get dlp job method over gRPC. + + Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.GetDlpJobRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dlp_job" not in self._stubs: + self._stubs["get_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetDlpJob", + request_serializer=dlp.GetDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs["get_dlp_job"] + + @property + def delete_dlp_job(self) -> Callable[[dlp.DeleteDlpJobRequest], empty_pb2.Empty]: + r"""Return a callable for the delete dlp job method over gRPC. + + Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.DeleteDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dlp_job" not in self._stubs: + self._stubs["delete_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteDlpJob", + request_serializer=dlp.DeleteDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_dlp_job"] + + @property + def cancel_dlp_job(self) -> Callable[[dlp.CancelDlpJobRequest], empty_pb2.Empty]: + r"""Return a callable for the cancel dlp job method over gRPC. + + Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.CancelDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_dlp_job" not in self._stubs: + self._stubs["cancel_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CancelDlpJob", + request_serializer=dlp.CancelDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["cancel_dlp_job"] + + @property + def create_stored_info_type( + self, + ) -> Callable[[dlp.CreateStoredInfoTypeRequest], dlp.StoredInfoType]: + r"""Return a callable for the create stored info type method over gRPC. + + Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.CreateStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_stored_info_type" not in self._stubs: + self._stubs["create_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateStoredInfoType", + request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs["create_stored_info_type"] + + @property + def update_stored_info_type( + self, + ) -> Callable[[dlp.UpdateStoredInfoTypeRequest], dlp.StoredInfoType]: + r"""Return a callable for the update stored info type method over gRPC. + + Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.UpdateStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_stored_info_type" not in self._stubs: + self._stubs["update_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType", + request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs["update_stored_info_type"] + + @property + def get_stored_info_type( + self, + ) -> Callable[[dlp.GetStoredInfoTypeRequest], dlp.StoredInfoType]: + r"""Return a callable for the get stored info type method over gRPC. + + Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.GetStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_stored_info_type" not in self._stubs: + self._stubs["get_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetStoredInfoType", + request_serializer=dlp.GetStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs["get_stored_info_type"] + + @property + def list_stored_info_types( + self, + ) -> Callable[[dlp.ListStoredInfoTypesRequest], dlp.ListStoredInfoTypesResponse]: + r"""Return a callable for the list stored info types method over gRPC. + + Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.ListStoredInfoTypesRequest], + ~.ListStoredInfoTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_stored_info_types" not in self._stubs: + self._stubs["list_stored_info_types"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes", + request_serializer=dlp.ListStoredInfoTypesRequest.serialize, + response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, + ) + return self._stubs["list_stored_info_types"] + + @property + def delete_stored_info_type( + self, + ) -> Callable[[dlp.DeleteStoredInfoTypeRequest], empty_pb2.Empty]: + r"""Return a callable for the delete stored info type method over gRPC. + + Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.DeleteStoredInfoTypeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_stored_info_type" not in self._stubs: + self._stubs["delete_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType", + request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_stored_info_type"] + + @property + def hybrid_inspect_dlp_job( + self, + ) -> Callable[[dlp.HybridInspectDlpJobRequest], dlp.HybridInspectResponse]: + r"""Return a callable for the hybrid inspect dlp job method over gRPC. + + Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + Returns: + Callable[[~.HybridInspectDlpJobRequest], + ~.HybridInspectResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "hybrid_inspect_dlp_job" not in self._stubs: + self._stubs["hybrid_inspect_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob", + request_serializer=dlp.HybridInspectDlpJobRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs["hybrid_inspect_dlp_job"] + + @property + def finish_dlp_job(self) -> Callable[[dlp.FinishDlpJobRequest], empty_pb2.Empty]: + r"""Return a callable for the finish dlp job method over gRPC. + + Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + Returns: + Callable[[~.FinishDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "finish_dlp_job" not in self._stubs: + self._stubs["finish_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/FinishDlpJob", + request_serializer=dlp.FinishDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["finish_dlp_job"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DlpServiceGrpcTransport",) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..3bc31823976f --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py @@ -0,0 +1,1285 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dlp_v2.types import dlp + +from .base import DEFAULT_CLIENT_INFO, DlpServiceTransport +from .grpc import DlpServiceGrpcTransport + + +class DlpServiceGrpcAsyncIOTransport(DlpServiceTransport): + """gRPC AsyncIO backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dlp.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dlp.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def inspect_content( + self, + ) -> Callable[[dlp.InspectContentRequest], Awaitable[dlp.InspectContentResponse]]: + r"""Return a callable for the inspect content method over gRPC. + + Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Returns: + Callable[[~.InspectContentRequest], + Awaitable[~.InspectContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "inspect_content" not in self._stubs: + self._stubs["inspect_content"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/InspectContent", + request_serializer=dlp.InspectContentRequest.serialize, + response_deserializer=dlp.InspectContentResponse.deserialize, + ) + return self._stubs["inspect_content"] + + @property + def redact_image( + self, + ) -> Callable[[dlp.RedactImageRequest], Awaitable[dlp.RedactImageResponse]]: + r"""Return a callable for the redact image method over gRPC. + + Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive-data-images + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.RedactImageRequest], + Awaitable[~.RedactImageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "redact_image" not in self._stubs: + self._stubs["redact_image"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/RedactImage", + request_serializer=dlp.RedactImageRequest.serialize, + response_deserializer=dlp.RedactImageResponse.deserialize, + ) + return self._stubs["redact_image"] + + @property + def deidentify_content( + self, + ) -> Callable[ + [dlp.DeidentifyContentRequest], Awaitable[dlp.DeidentifyContentResponse] + ]: + r"""Return a callable for the deidentify content method over gRPC. + + De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive-data + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.DeidentifyContentRequest], + Awaitable[~.DeidentifyContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "deidentify_content" not in self._stubs: + self._stubs["deidentify_content"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeidentifyContent", + request_serializer=dlp.DeidentifyContentRequest.serialize, + response_deserializer=dlp.DeidentifyContentResponse.deserialize, + ) + return self._stubs["deidentify_content"] + + @property + def reidentify_content( + self, + ) -> Callable[ + [dlp.ReidentifyContentRequest], Awaitable[dlp.ReidentifyContentResponse] + ]: + r"""Return a callable for the reidentify content method over gRPC. + + Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Returns: + Callable[[~.ReidentifyContentRequest], + Awaitable[~.ReidentifyContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reidentify_content" not in self._stubs: + self._stubs["reidentify_content"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ReidentifyContent", + request_serializer=dlp.ReidentifyContentRequest.serialize, + response_deserializer=dlp.ReidentifyContentResponse.deserialize, + ) + return self._stubs["reidentify_content"] + + @property + def list_info_types( + self, + ) -> Callable[[dlp.ListInfoTypesRequest], Awaitable[dlp.ListInfoTypesResponse]]: + r"""Return a callable for the list info types method over gRPC. + + Returns a list of the sensitive information types + that DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Returns: + Callable[[~.ListInfoTypesRequest], + Awaitable[~.ListInfoTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_info_types" not in self._stubs: + self._stubs["list_info_types"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListInfoTypes", + request_serializer=dlp.ListInfoTypesRequest.serialize, + response_deserializer=dlp.ListInfoTypesResponse.deserialize, + ) + return self._stubs["list_info_types"] + + @property + def create_inspect_template( + self, + ) -> Callable[[dlp.CreateInspectTemplateRequest], Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the create inspect template method over gRPC. + + Creates an InspectTemplate for reusing frequently + used configuration for inspecting content, images, and + storage. See + https://cloud.google.com/dlp/docs/creating-templates to + learn more. + + Returns: + Callable[[~.CreateInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_inspect_template" not in self._stubs: + self._stubs["create_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateInspectTemplate", + request_serializer=dlp.CreateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs["create_inspect_template"] + + @property + def update_inspect_template( + self, + ) -> Callable[[dlp.UpdateInspectTemplateRequest], Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the update inspect template method over gRPC. + + Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.UpdateInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_inspect_template" not in self._stubs: + self._stubs["update_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate", + request_serializer=dlp.UpdateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs["update_inspect_template"] + + @property + def get_inspect_template( + self, + ) -> Callable[[dlp.GetInspectTemplateRequest], Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the get inspect template method over gRPC. + + Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.GetInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_inspect_template" not in self._stubs: + self._stubs["get_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetInspectTemplate", + request_serializer=dlp.GetInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs["get_inspect_template"] + + @property + def list_inspect_templates( + self, + ) -> Callable[ + [dlp.ListInspectTemplatesRequest], Awaitable[dlp.ListInspectTemplatesResponse] + ]: + r"""Return a callable for the list inspect templates method over gRPC. + + Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.ListInspectTemplatesRequest], + Awaitable[~.ListInspectTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_inspect_templates" not in self._stubs: + self._stubs["list_inspect_templates"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListInspectTemplates", + request_serializer=dlp.ListInspectTemplatesRequest.serialize, + response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, + ) + return self._stubs["list_inspect_templates"] + + @property + def delete_inspect_template( + self, + ) -> Callable[[dlp.DeleteInspectTemplateRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete inspect template method over gRPC. + + Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.DeleteInspectTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_inspect_template" not in self._stubs: + self._stubs["delete_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate", + request_serializer=dlp.DeleteInspectTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_inspect_template"] + + @property + def create_deidentify_template( + self, + ) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], Awaitable[dlp.DeidentifyTemplate] + ]: + r"""Return a callable for the create deidentify template method over gRPC. + + Creates a DeidentifyTemplate for reusing frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.CreateDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deidentify_template" not in self._stubs: + self._stubs["create_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate", + request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs["create_deidentify_template"] + + @property + def update_deidentify_template( + self, + ) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], Awaitable[dlp.DeidentifyTemplate] + ]: + r"""Return a callable for the update deidentify template method over gRPC. + + Updates the DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.UpdateDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deidentify_template" not in self._stubs: + self._stubs["update_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate", + request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs["update_deidentify_template"] + + @property + def get_deidentify_template( + self, + ) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], Awaitable[dlp.DeidentifyTemplate] + ]: + r"""Return a callable for the get deidentify template method over gRPC. + + Gets a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.GetDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deidentify_template" not in self._stubs: + self._stubs["get_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate", + request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs["get_deidentify_template"] + + @property + def list_deidentify_templates( + self, + ) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + Awaitable[dlp.ListDeidentifyTemplatesResponse], + ]: + r"""Return a callable for the list deidentify templates method over gRPC. + + Lists DeidentifyTemplates. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.ListDeidentifyTemplatesRequest], + Awaitable[~.ListDeidentifyTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deidentify_templates" not in self._stubs: + self._stubs["list_deidentify_templates"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates", + request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, + response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, + ) + return self._stubs["list_deidentify_templates"] + + @property + def delete_deidentify_template( + self, + ) -> Callable[[dlp.DeleteDeidentifyTemplateRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete deidentify template method over gRPC. + + Deletes a DeidentifyTemplate. + See + https://cloud.google.com/dlp/docs/creating-templates-deid + to learn more. + + Returns: + Callable[[~.DeleteDeidentifyTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deidentify_template" not in self._stubs: + self._stubs["delete_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate", + request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_deidentify_template"] + + @property + def create_job_trigger( + self, + ) -> Callable[[dlp.CreateJobTriggerRequest], Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the create job trigger method over gRPC. + + Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.CreateJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_job_trigger" not in self._stubs: + self._stubs["create_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateJobTrigger", + request_serializer=dlp.CreateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs["create_job_trigger"] + + @property + def update_job_trigger( + self, + ) -> Callable[[dlp.UpdateJobTriggerRequest], Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the update job trigger method over gRPC. + + Updates a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.UpdateJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_job_trigger" not in self._stubs: + self._stubs["update_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateJobTrigger", + request_serializer=dlp.UpdateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs["update_job_trigger"] + + @property + def hybrid_inspect_job_trigger( + self, + ) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], Awaitable[dlp.HybridInspectResponse] + ]: + r"""Return a callable for the hybrid inspect job trigger method over gRPC. + + Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + + Returns: + Callable[[~.HybridInspectJobTriggerRequest], + Awaitable[~.HybridInspectResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "hybrid_inspect_job_trigger" not in self._stubs: + self._stubs["hybrid_inspect_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger", + request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs["hybrid_inspect_job_trigger"] + + @property + def get_job_trigger( + self, + ) -> Callable[[dlp.GetJobTriggerRequest], Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the get job trigger method over gRPC. + + Gets a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.GetJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job_trigger" not in self._stubs: + self._stubs["get_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetJobTrigger", + request_serializer=dlp.GetJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs["get_job_trigger"] + + @property + def list_job_triggers( + self, + ) -> Callable[[dlp.ListJobTriggersRequest], Awaitable[dlp.ListJobTriggersResponse]]: + r"""Return a callable for the list job triggers method over gRPC. + + Lists job triggers. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.ListJobTriggersRequest], + Awaitable[~.ListJobTriggersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_job_triggers" not in self._stubs: + self._stubs["list_job_triggers"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListJobTriggers", + request_serializer=dlp.ListJobTriggersRequest.serialize, + response_deserializer=dlp.ListJobTriggersResponse.deserialize, + ) + return self._stubs["list_job_triggers"] + + @property + def delete_job_trigger( + self, + ) -> Callable[[dlp.DeleteJobTriggerRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job trigger method over gRPC. + + Deletes a job trigger. + See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.DeleteJobTriggerRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_job_trigger" not in self._stubs: + self._stubs["delete_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteJobTrigger", + request_serializer=dlp.DeleteJobTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_job_trigger"] + + @property + def activate_job_trigger( + self, + ) -> Callable[[dlp.ActivateJobTriggerRequest], Awaitable[dlp.DlpJob]]: + r"""Return a callable for the activate job trigger method over gRPC. + + Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Returns: + Callable[[~.ActivateJobTriggerRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "activate_job_trigger" not in self._stubs: + self._stubs["activate_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ActivateJobTrigger", + request_serializer=dlp.ActivateJobTriggerRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs["activate_job_trigger"] + + @property + def create_dlp_job( + self, + ) -> Callable[[dlp.CreateDlpJobRequest], Awaitable[dlp.DlpJob]]: + r"""Return a callable for the create dlp job method over gRPC. + + Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.CreateDlpJobRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dlp_job" not in self._stubs: + self._stubs["create_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateDlpJob", + request_serializer=dlp.CreateDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs["create_dlp_job"] + + @property + def list_dlp_jobs( + self, + ) -> Callable[[dlp.ListDlpJobsRequest], Awaitable[dlp.ListDlpJobsResponse]]: + r"""Return a callable for the list dlp jobs method over gRPC. + + Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.ListDlpJobsRequest], + Awaitable[~.ListDlpJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dlp_jobs" not in self._stubs: + self._stubs["list_dlp_jobs"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListDlpJobs", + request_serializer=dlp.ListDlpJobsRequest.serialize, + response_deserializer=dlp.ListDlpJobsResponse.deserialize, + ) + return self._stubs["list_dlp_jobs"] + + @property + def get_dlp_job(self) -> Callable[[dlp.GetDlpJobRequest], Awaitable[dlp.DlpJob]]: + r"""Return a callable for the get dlp job method over gRPC. + + Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.GetDlpJobRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dlp_job" not in self._stubs: + self._stubs["get_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetDlpJob", + request_serializer=dlp.GetDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs["get_dlp_job"] + + @property + def delete_dlp_job( + self, + ) -> Callable[[dlp.DeleteDlpJobRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete dlp job method over gRPC. + + Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be canceled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.DeleteDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dlp_job" not in self._stubs: + self._stubs["delete_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteDlpJob", + request_serializer=dlp.DeleteDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_dlp_job"] + + @property + def cancel_dlp_job( + self, + ) -> Callable[[dlp.CancelDlpJobRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the cancel dlp job method over gRPC. + + Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.CancelDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_dlp_job" not in self._stubs: + self._stubs["cancel_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CancelDlpJob", + request_serializer=dlp.CancelDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["cancel_dlp_job"] + + @property + def create_stored_info_type( + self, + ) -> Callable[[dlp.CreateStoredInfoTypeRequest], Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the create stored info type method over gRPC. + + Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.CreateStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_stored_info_type" not in self._stubs: + self._stubs["create_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateStoredInfoType", + request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs["create_stored_info_type"] + + @property + def update_stored_info_type( + self, + ) -> Callable[[dlp.UpdateStoredInfoTypeRequest], Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the update stored info type method over gRPC. + + Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.UpdateStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_stored_info_type" not in self._stubs: + self._stubs["update_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType", + request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs["update_stored_info_type"] + + @property + def get_stored_info_type( + self, + ) -> Callable[[dlp.GetStoredInfoTypeRequest], Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the get stored info type method over gRPC. + + Gets a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.GetStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_stored_info_type" not in self._stubs: + self._stubs["get_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetStoredInfoType", + request_serializer=dlp.GetStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs["get_stored_info_type"] + + @property + def list_stored_info_types( + self, + ) -> Callable[ + [dlp.ListStoredInfoTypesRequest], Awaitable[dlp.ListStoredInfoTypesResponse] + ]: + r"""Return a callable for the list stored info types method over gRPC. + + Lists stored infoTypes. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.ListStoredInfoTypesRequest], + Awaitable[~.ListStoredInfoTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_stored_info_types" not in self._stubs: + self._stubs["list_stored_info_types"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes", + request_serializer=dlp.ListStoredInfoTypesRequest.serialize, + response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, + ) + return self._stubs["list_stored_info_types"] + + @property + def delete_stored_info_type( + self, + ) -> Callable[[dlp.DeleteStoredInfoTypeRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete stored info type method over gRPC. + + Deletes a stored infoType. + See + https://cloud.google.com/dlp/docs/creating-stored-infotypes + to learn more. + + Returns: + Callable[[~.DeleteStoredInfoTypeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_stored_info_type" not in self._stubs: + self._stubs["delete_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType", + request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_stored_info_type"] + + @property + def hybrid_inspect_dlp_job( + self, + ) -> Callable[ + [dlp.HybridInspectDlpJobRequest], Awaitable[dlp.HybridInspectResponse] + ]: + r"""Return a callable for the hybrid inspect dlp job method over gRPC. + + Inspect hybrid content and store findings to a job. + To review the findings, inspect the job. Inspection will + occur asynchronously. + + Returns: + Callable[[~.HybridInspectDlpJobRequest], + Awaitable[~.HybridInspectResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "hybrid_inspect_dlp_job" not in self._stubs: + self._stubs["hybrid_inspect_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob", + request_serializer=dlp.HybridInspectDlpJobRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs["hybrid_inspect_dlp_job"] + + @property + def finish_dlp_job( + self, + ) -> Callable[[dlp.FinishDlpJobRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the finish dlp job method over gRPC. + + Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. + + Returns: + Callable[[~.FinishDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "finish_dlp_job" not in self._stubs: + self._stubs["finish_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/FinishDlpJob", + request_serializer=dlp.FinishDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["finish_dlp_job"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("DlpServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py new file mode 100644 index 000000000000..17c18c5d02f4 --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py @@ -0,0 +1,4770 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dlp_v2.types import dlp + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DlpServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DlpServiceRestInterceptor: + """Interceptor for DlpService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DlpServiceRestTransport. + + .. code-block:: python + class MyCustomDlpServiceInterceptor(DlpServiceRestInterceptor): + def pre_activate_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_activate_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_cancel_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_create_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_deidentify_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_deidentify_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_finish_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_hybrid_inspect_dlp_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_hybrid_inspect_dlp_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_hybrid_inspect_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_hybrid_inspect_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_inspect_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_inspect_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_deidentify_templates(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deidentify_templates(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_dlp_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_dlp_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_info_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_info_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_inspect_templates(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_inspect_templates(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_job_triggers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_job_triggers(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_stored_info_types(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_stored_info_types(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_redact_image(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_redact_image(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_reidentify_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reidentify_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_deidentify_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_deidentify_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_inspect_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_inspect_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_job_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_job_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_stored_info_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_stored_info_type(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DlpServiceRestTransport(interceptor=MyCustomDlpServiceInterceptor()) + client = DlpServiceClient(transport=transport) + + + """ + + def pre_activate_job_trigger( + self, + request: dlp.ActivateJobTriggerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.ActivateJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for activate_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_activate_job_trigger(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for activate_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_cancel_dlp_job( + self, request: dlp.CancelDlpJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.CancelDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_create_deidentify_template( + self, + request: dlp.CreateDeidentifyTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.CreateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_deidentify_template( + self, response: dlp.DeidentifyTemplate + ) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for create_deidentify_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_create_dlp_job( + self, request: dlp.CreateDlpJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.CreateDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for create_dlp_job + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_create_inspect_template( + self, + request: dlp.CreateInspectTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.CreateInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_inspect_template( + self, response: dlp.InspectTemplate + ) -> dlp.InspectTemplate: + """Post-rpc interceptor for create_inspect_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_create_job_trigger( + self, request: dlp.CreateJobTriggerRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.CreateJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for create_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_create_stored_info_type( + self, + request: dlp.CreateStoredInfoTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.CreateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_create_stored_info_type( + self, response: dlp.StoredInfoType + ) -> dlp.StoredInfoType: + """Post-rpc interceptor for create_stored_info_type + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_deidentify_content( + self, request: dlp.DeidentifyContentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.DeidentifyContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for deidentify_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_deidentify_content( + self, response: dlp.DeidentifyContentResponse + ) -> dlp.DeidentifyContentResponse: + """Post-rpc interceptor for deidentify_content + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_delete_deidentify_template( + self, + request: dlp.DeleteDeidentifyTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.DeleteDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_dlp_job( + self, request: dlp.DeleteDlpJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.DeleteDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_inspect_template( + self, + request: dlp.DeleteInspectTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.DeleteInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_job_trigger( + self, request: dlp.DeleteJobTriggerRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.DeleteJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_delete_stored_info_type( + self, + request: dlp.DeleteStoredInfoTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.DeleteStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_finish_dlp_job( + self, request: dlp.FinishDlpJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.FinishDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for finish_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def pre_get_deidentify_template( + self, + request: dlp.GetDeidentifyTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.GetDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_deidentify_template( + self, response: dlp.DeidentifyTemplate + ) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for get_deidentify_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_get_dlp_job( + self, request: dlp.GetDlpJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.GetDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: + """Post-rpc interceptor for get_dlp_job + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_get_inspect_template( + self, + request: dlp.GetInspectTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.GetInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_inspect_template( + self, response: dlp.InspectTemplate + ) -> dlp.InspectTemplate: + """Post-rpc interceptor for get_inspect_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_get_job_trigger( + self, request: dlp.GetJobTriggerRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.GetJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for get_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_get_stored_info_type( + self, request: dlp.GetStoredInfoTypeRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.GetStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_get_stored_info_type( + self, response: dlp.StoredInfoType + ) -> dlp.StoredInfoType: + """Post-rpc interceptor for get_stored_info_type + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_hybrid_inspect_dlp_job( + self, + request: dlp.HybridInspectDlpJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.HybridInspectDlpJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for hybrid_inspect_dlp_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_hybrid_inspect_dlp_job( + self, response: dlp.HybridInspectResponse + ) -> dlp.HybridInspectResponse: + """Post-rpc interceptor for hybrid_inspect_dlp_job + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_hybrid_inspect_job_trigger( + self, + request: dlp.HybridInspectJobTriggerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.HybridInspectJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for hybrid_inspect_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_hybrid_inspect_job_trigger( + self, response: dlp.HybridInspectResponse + ) -> dlp.HybridInspectResponse: + """Post-rpc interceptor for hybrid_inspect_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_inspect_content( + self, request: dlp.InspectContentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.InspectContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for inspect_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_inspect_content( + self, response: dlp.InspectContentResponse + ) -> dlp.InspectContentResponse: + """Post-rpc interceptor for inspect_content + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_list_deidentify_templates( + self, + request: dlp.ListDeidentifyTemplatesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.ListDeidentifyTemplatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_deidentify_templates + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_deidentify_templates( + self, response: dlp.ListDeidentifyTemplatesResponse + ) -> dlp.ListDeidentifyTemplatesResponse: + """Post-rpc interceptor for list_deidentify_templates + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_list_dlp_jobs( + self, request: dlp.ListDlpJobsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.ListDlpJobsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_dlp_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_dlp_jobs( + self, response: dlp.ListDlpJobsResponse + ) -> dlp.ListDlpJobsResponse: + """Post-rpc interceptor for list_dlp_jobs + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_list_info_types( + self, request: dlp.ListInfoTypesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.ListInfoTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_info_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_info_types( + self, response: dlp.ListInfoTypesResponse + ) -> dlp.ListInfoTypesResponse: + """Post-rpc interceptor for list_info_types + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_list_inspect_templates( + self, + request: dlp.ListInspectTemplatesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.ListInspectTemplatesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_inspect_templates + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_inspect_templates( + self, response: dlp.ListInspectTemplatesResponse + ) -> dlp.ListInspectTemplatesResponse: + """Post-rpc interceptor for list_inspect_templates + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_list_job_triggers( + self, request: dlp.ListJobTriggersRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.ListJobTriggersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_job_triggers + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_job_triggers( + self, response: dlp.ListJobTriggersResponse + ) -> dlp.ListJobTriggersResponse: + """Post-rpc interceptor for list_job_triggers + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_list_stored_info_types( + self, + request: dlp.ListStoredInfoTypesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.ListStoredInfoTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_stored_info_types + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_list_stored_info_types( + self, response: dlp.ListStoredInfoTypesResponse + ) -> dlp.ListStoredInfoTypesResponse: + """Post-rpc interceptor for list_stored_info_types + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_redact_image( + self, request: dlp.RedactImageRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.RedactImageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for redact_image + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_redact_image( + self, response: dlp.RedactImageResponse + ) -> dlp.RedactImageResponse: + """Post-rpc interceptor for redact_image + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_reidentify_content( + self, request: dlp.ReidentifyContentRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.ReidentifyContentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reidentify_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_reidentify_content( + self, response: dlp.ReidentifyContentResponse + ) -> dlp.ReidentifyContentResponse: + """Post-rpc interceptor for reidentify_content + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_update_deidentify_template( + self, + request: dlp.UpdateDeidentifyTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.UpdateDeidentifyTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_deidentify_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_deidentify_template( + self, response: dlp.DeidentifyTemplate + ) -> dlp.DeidentifyTemplate: + """Post-rpc interceptor for update_deidentify_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_update_inspect_template( + self, + request: dlp.UpdateInspectTemplateRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.UpdateInspectTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_inspect_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_inspect_template( + self, response: dlp.InspectTemplate + ) -> dlp.InspectTemplate: + """Post-rpc interceptor for update_inspect_template + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_update_job_trigger( + self, request: dlp.UpdateJobTriggerRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[dlp.UpdateJobTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_job_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: + """Post-rpc interceptor for update_job_trigger + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + def pre_update_stored_info_type( + self, + request: dlp.UpdateStoredInfoTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[dlp.UpdateStoredInfoTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_stored_info_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the DlpService server. + """ + return request, metadata + + def post_update_stored_info_type( + self, response: dlp.StoredInfoType + ) -> dlp.StoredInfoType: + """Post-rpc interceptor for update_stored_info_type + + Override in a subclass to manipulate the response + after it is returned by the DlpService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DlpServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DlpServiceRestInterceptor + + +class DlpServiceRestTransport(DlpServiceTransport): + """REST backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in + user-supplied, unstructured data streams, like text blocks or + images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "dlp.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DlpServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DlpServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ActivateJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("ActivateJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.ActivateJobTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Call the activate job trigger method over HTTP. + + Args: + request (~.dlp.ActivateJobTriggerRequest): + The request object. Request message for + ActivateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/jobTriggers/*}:activate", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/jobTriggers/*}:activate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_activate_job_trigger( + request, metadata + ) + pb_request = dlp.ActivateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_activate_job_trigger(resp) + return resp + + class _CancelDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("CancelDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.CancelDlpJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the cancel dlp job method over HTTP. + + Args: + request (~.dlp.CancelDlpJobRequest): + The request object. The request message for canceling a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/dlpJobs/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/dlpJobs/*}:cancel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_cancel_dlp_job(request, metadata) + pb_request = dlp.CancelDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _CreateDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("CreateDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.CreateDeidentifyTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Call the create deidentify + template method over HTTP. + + Args: + request (~.dlp.CreateDeidentifyTemplateRequest): + The request object. Request message for + CreateDeidentifyTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=organizations/*}/deidentifyTemplates", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=organizations/*/locations/*}/deidentifyTemplates", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*}/deidentifyTemplates", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/deidentifyTemplates", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_deidentify_template( + request, metadata + ) + pb_request = dlp.CreateDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_deidentify_template(resp) + return resp + + class _CreateDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("CreateDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.CreateDlpJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Call the create dlp job method over HTTP. + + Args: + request (~.dlp.CreateDlpJobRequest): + The request object. Request message for + CreateDlpJobRequest. Used to initiate + long running jobs such as calculating + risk metrics or inspecting Google Cloud + Storage. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*}/dlpJobs", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/dlpJobs", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_dlp_job(request, metadata) + pb_request = dlp.CreateDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_dlp_job(resp) + return resp + + class _CreateInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("CreateInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.CreateInspectTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Call the create inspect template method over HTTP. + + Args: + request (~.dlp.CreateInspectTemplateRequest): + The request object. Request message for + CreateInspectTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=organizations/*}/inspectTemplates", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=organizations/*/locations/*}/inspectTemplates", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*}/inspectTemplates", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/inspectTemplates", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_inspect_template( + request, metadata + ) + pb_request = dlp.CreateInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_inspect_template(resp) + return resp + + class _CreateJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("CreateJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.CreateJobTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Call the create job trigger method over HTTP. + + Args: + request (~.dlp.CreateJobTriggerRequest): + The request object. Request message for CreateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*}/jobTriggers", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/jobTriggers", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=organizations/*/locations/*}/jobTriggers", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_job_trigger( + request, metadata + ) + pb_request = dlp.CreateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_job_trigger(resp) + return resp + + class _CreateStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("CreateStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.CreateStoredInfoTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Call the create stored info type method over HTTP. + + Args: + request (~.dlp.CreateStoredInfoTypeRequest): + The request object. Request message for + CreateStoredInfoType. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=organizations/*}/storedInfoTypes", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=organizations/*/locations/*}/storedInfoTypes", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*}/storedInfoTypes", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/storedInfoTypes", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_stored_info_type( + request, metadata + ) + pb_request = dlp.CreateStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_stored_info_type(resp) + return resp + + class _DeidentifyContent(DlpServiceRestStub): + def __hash__(self): + return hash("DeidentifyContent") + + def __call__( + self, + request: dlp.DeidentifyContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""Call the deidentify content method over HTTP. + + Args: + request (~.dlp.DeidentifyContentRequest): + The request object. Request to de-identify a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*}/content:deidentify", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/content:deidentify", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_deidentify_content( + request, metadata + ) + pb_request = dlp.DeidentifyContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyContentResponse() + pb_resp = dlp.DeidentifyContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_deidentify_content(resp) + return resp + + class _DeleteDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.DeleteDeidentifyTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete deidentify + template method over HTTP. + + Args: + request (~.dlp.DeleteDeidentifyTemplateRequest): + The request object. Request message for + DeleteDeidentifyTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=organizations/*/deidentifyTemplates/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/deidentifyTemplates/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/deidentifyTemplates/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_deidentify_template( + request, metadata + ) + pb_request = dlp.DeleteDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.DeleteDlpJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete dlp job method over HTTP. + + Args: + request (~.dlp.DeleteDlpJobRequest): + The request object. The request message for deleting a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=projects/*/dlpJobs/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/dlpJobs/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_dlp_job(request, metadata) + pb_request = dlp.DeleteDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.DeleteInspectTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete inspect template method over HTTP. + + Args: + request (~.dlp.DeleteInspectTemplateRequest): + The request object. Request message for + DeleteInspectTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=organizations/*/inspectTemplates/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=organizations/*/locations/*/inspectTemplates/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/inspectTemplates/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/inspectTemplates/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_inspect_template( + request, metadata + ) + pb_request = dlp.DeleteInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.DeleteJobTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete job trigger method over HTTP. + + Args: + request (~.dlp.DeleteJobTriggerRequest): + The request object. Request message for DeleteJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=projects/*/jobTriggers/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/jobTriggers/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=organizations/*/locations/*/jobTriggers/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_job_trigger( + request, metadata + ) + pb_request = dlp.DeleteJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("DeleteStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.DeleteStoredInfoTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete stored info type method over HTTP. + + Args: + request (~.dlp.DeleteStoredInfoTypeRequest): + The request object. Request message for + DeleteStoredInfoType. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=organizations/*/storedInfoTypes/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=organizations/*/locations/*/storedInfoTypes/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/storedInfoTypes/*}", + }, + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/storedInfoTypes/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_stored_info_type( + request, metadata + ) + pb_request = dlp.DeleteStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _FinishDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("FinishDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.FinishDlpJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the finish dlp job method over HTTP. + + Args: + request (~.dlp.FinishDlpJobRequest): + The request object. The request message for finishing a + DLP hybrid job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/dlpJobs/*}:finish", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_finish_dlp_job(request, metadata) + pb_request = dlp.FinishDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("GetDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.GetDeidentifyTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Call the get deidentify template method over HTTP. + + Args: + request (~.dlp.GetDeidentifyTemplateRequest): + The request object. Request message for + GetDeidentifyTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=organizations/*/deidentifyTemplates/*}", + }, + { + "method": "get", + "uri": "/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/deidentifyTemplates/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/deidentifyTemplates/*}", + }, + ] + request, metadata = self._interceptor.pre_get_deidentify_template( + request, metadata + ) + pb_request = dlp.GetDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_deidentify_template(resp) + return resp + + class _GetDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("GetDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.GetDlpJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Call the get dlp job method over HTTP. + + Args: + request (~.dlp.GetDlpJobRequest): + The request object. The request message for [DlpJobs.GetDlpJob][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/dlpJobs/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/dlpJobs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_dlp_job(request, metadata) + pb_request = dlp.GetDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DlpJob() + pb_resp = dlp.DlpJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_dlp_job(resp) + return resp + + class _GetInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("GetInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.GetInspectTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Call the get inspect template method over HTTP. + + Args: + request (~.dlp.GetInspectTemplateRequest): + The request object. Request message for + GetInspectTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=organizations/*/inspectTemplates/*}", + }, + { + "method": "get", + "uri": "/v2/{name=organizations/*/locations/*/inspectTemplates/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/inspectTemplates/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/inspectTemplates/*}", + }, + ] + request, metadata = self._interceptor.pre_get_inspect_template( + request, metadata + ) + pb_request = dlp.GetInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_inspect_template(resp) + return resp + + class _GetJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("GetJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.GetJobTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Call the get job trigger method over HTTP. + + Args: + request (~.dlp.GetJobTriggerRequest): + The request object. Request message for GetJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/jobTriggers/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/jobTriggers/*}", + }, + { + "method": "get", + "uri": "/v2/{name=organizations/*/locations/*/jobTriggers/*}", + }, + ] + request, metadata = self._interceptor.pre_get_job_trigger(request, metadata) + pb_request = dlp.GetJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job_trigger(resp) + return resp + + class _GetStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("GetStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.GetStoredInfoTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Call the get stored info type method over HTTP. + + Args: + request (~.dlp.GetStoredInfoTypeRequest): + The request object. Request message for + GetStoredInfoType. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=organizations/*/storedInfoTypes/*}", + }, + { + "method": "get", + "uri": "/v2/{name=organizations/*/locations/*/storedInfoTypes/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/storedInfoTypes/*}", + }, + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/storedInfoTypes/*}", + }, + ] + request, metadata = self._interceptor.pre_get_stored_info_type( + request, metadata + ) + pb_request = dlp.GetStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_stored_info_type(resp) + return resp + + class _HybridInspectDlpJob(DlpServiceRestStub): + def __hash__(self): + return hash("HybridInspectDlpJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.HybridInspectDlpJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Call the hybrid inspect dlp job method over HTTP. + + Args: + request (~.dlp.HybridInspectDlpJobRequest): + The request object. Request to search for potentially + sensitive info in a custom location. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_hybrid_inspect_dlp_job( + request, metadata + ) + pb_request = dlp.HybridInspectDlpJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.HybridInspectResponse() + pb_resp = dlp.HybridInspectResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_hybrid_inspect_dlp_job(resp) + return resp + + class _HybridInspectJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("HybridInspectJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.HybridInspectJobTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Call the hybrid inspect job + trigger method over HTTP. + + Args: + request (~.dlp.HybridInspectJobTriggerRequest): + The request object. Request to search for potentially + sensitive info in a custom location. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_hybrid_inspect_job_trigger( + request, metadata + ) + pb_request = dlp.HybridInspectJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.HybridInspectResponse() + pb_resp = dlp.HybridInspectResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_hybrid_inspect_job_trigger(resp) + return resp + + class _InspectContent(DlpServiceRestStub): + def __hash__(self): + return hash("InspectContent") + + def __call__( + self, + request: dlp.InspectContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectContentResponse: + r"""Call the inspect content method over HTTP. + + Args: + request (~.dlp.InspectContentRequest): + The request object. Request to search for potentially + sensitive info in a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectContentResponse: + Results of inspecting an item. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*}/content:inspect", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/content:inspect", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_inspect_content(request, metadata) + pb_request = dlp.InspectContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectContentResponse() + pb_resp = dlp.InspectContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_inspect_content(resp) + return resp + + class _ListDeidentifyTemplates(DlpServiceRestStub): + def __hash__(self): + return hash("ListDeidentifyTemplates") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.ListDeidentifyTemplatesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListDeidentifyTemplatesResponse: + r"""Call the list deidentify templates method over HTTP. + + Args: + request (~.dlp.ListDeidentifyTemplatesRequest): + The request object. Request message for + ListDeidentifyTemplates. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListDeidentifyTemplatesResponse: + Response message for + ListDeidentifyTemplates. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=organizations/*}/deidentifyTemplates", + }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/locations/*}/deidentifyTemplates", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*}/deidentifyTemplates", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*}/deidentifyTemplates", + }, + ] + request, metadata = self._interceptor.pre_list_deidentify_templates( + request, metadata + ) + pb_request = dlp.ListDeidentifyTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListDeidentifyTemplatesResponse() + pb_resp = dlp.ListDeidentifyTemplatesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_deidentify_templates(resp) + return resp + + class _ListDlpJobs(DlpServiceRestStub): + def __hash__(self): + return hash("ListDlpJobs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.ListDlpJobsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListDlpJobsResponse: + r"""Call the list dlp jobs method over HTTP. + + Args: + request (~.dlp.ListDlpJobsRequest): + The request object. The request message for listing DLP + jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListDlpJobsResponse: + The response message for listing DLP + jobs. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=projects/*}/dlpJobs", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*}/dlpJobs", + }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/locations/*}/dlpJobs", + }, + ] + request, metadata = self._interceptor.pre_list_dlp_jobs(request, metadata) + pb_request = dlp.ListDlpJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListDlpJobsResponse() + pb_resp = dlp.ListDlpJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_dlp_jobs(resp) + return resp + + class _ListInfoTypes(DlpServiceRestStub): + def __hash__(self): + return hash("ListInfoTypes") + + def __call__( + self, + request: dlp.ListInfoTypesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Call the list info types method over HTTP. + + Args: + request (~.dlp.ListInfoTypesRequest): + The request object. Request for the list of infoTypes. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/infoTypes", + }, + { + "method": "get", + "uri": "/v2/{parent=locations/*}/infoTypes", + }, + ] + request, metadata = self._interceptor.pre_list_info_types(request, metadata) + pb_request = dlp.ListInfoTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListInfoTypesResponse() + pb_resp = dlp.ListInfoTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_info_types(resp) + return resp + + class _ListInspectTemplates(DlpServiceRestStub): + def __hash__(self): + return hash("ListInspectTemplates") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.ListInspectTemplatesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInspectTemplatesResponse: + r"""Call the list inspect templates method over HTTP. + + Args: + request (~.dlp.ListInspectTemplatesRequest): + The request object. Request message for + ListInspectTemplates. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListInspectTemplatesResponse: + Response message for + ListInspectTemplates. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=organizations/*}/inspectTemplates", + }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/locations/*}/inspectTemplates", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*}/inspectTemplates", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*}/inspectTemplates", + }, + ] + request, metadata = self._interceptor.pre_list_inspect_templates( + request, metadata + ) + pb_request = dlp.ListInspectTemplatesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListInspectTemplatesResponse() + pb_resp = dlp.ListInspectTemplatesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_inspect_templates(resp) + return resp + + class _ListJobTriggers(DlpServiceRestStub): + def __hash__(self): + return hash("ListJobTriggers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.ListJobTriggersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListJobTriggersResponse: + r"""Call the list job triggers method over HTTP. + + Args: + request (~.dlp.ListJobTriggersRequest): + The request object. Request message for ListJobTriggers. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListJobTriggersResponse: + Response message for ListJobTriggers. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=projects/*}/jobTriggers", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*}/jobTriggers", + }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/locations/*}/jobTriggers", + }, + ] + request, metadata = self._interceptor.pre_list_job_triggers( + request, metadata + ) + pb_request = dlp.ListJobTriggersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListJobTriggersResponse() + pb_resp = dlp.ListJobTriggersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_job_triggers(resp) + return resp + + class _ListStoredInfoTypes(DlpServiceRestStub): + def __hash__(self): + return hash("ListStoredInfoTypes") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.ListStoredInfoTypesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListStoredInfoTypesResponse: + r"""Call the list stored info types method over HTTP. + + Args: + request (~.dlp.ListStoredInfoTypesRequest): + The request object. Request message for + ListStoredInfoTypes. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListStoredInfoTypesResponse: + Response message for + ListStoredInfoTypes. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=organizations/*}/storedInfoTypes", + }, + { + "method": "get", + "uri": "/v2/{parent=organizations/*/locations/*}/storedInfoTypes", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*}/storedInfoTypes", + }, + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*}/storedInfoTypes", + }, + ] + request, metadata = self._interceptor.pre_list_stored_info_types( + request, metadata + ) + pb_request = dlp.ListStoredInfoTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ListStoredInfoTypesResponse() + pb_resp = dlp.ListStoredInfoTypesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_stored_info_types(resp) + return resp + + class _RedactImage(DlpServiceRestStub): + def __hash__(self): + return hash("RedactImage") + + def __call__( + self, + request: dlp.RedactImageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.RedactImageResponse: + r"""Call the redact image method over HTTP. + + Args: + request (~.dlp.RedactImageRequest): + The request object. Request to search for potentially + sensitive info in an image and redact it + by covering it with a colored rectangle. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.RedactImageResponse: + Results of redacting an image. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*}/image:redact", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/image:redact", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_redact_image(request, metadata) + pb_request = dlp.RedactImageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.RedactImageResponse() + pb_resp = dlp.RedactImageResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_redact_image(resp) + return resp + + class _ReidentifyContent(DlpServiceRestStub): + def __hash__(self): + return hash("ReidentifyContent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.ReidentifyContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Call the reidentify content method over HTTP. + + Args: + request (~.dlp.ReidentifyContentRequest): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ReidentifyContentResponse: + Results of re-identifying an item. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*}/content:reidentify", + "body": "*", + }, + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/content:reidentify", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_reidentify_content( + request, metadata + ) + pb_request = dlp.ReidentifyContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.ReidentifyContentResponse() + pb_resp = dlp.ReidentifyContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reidentify_content(resp) + return resp + + class _UpdateDeidentifyTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateDeidentifyTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.UpdateDeidentifyTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Call the update deidentify + template method over HTTP. + + Args: + request (~.dlp.UpdateDeidentifyTemplateRequest): + The request object. Request message for + UpdateDeidentifyTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2/{name=organizations/*/deidentifyTemplates/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=organizations/*/locations/*/deidentifyTemplates/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=projects/*/deidentifyTemplates/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=projects/*/locations/*/deidentifyTemplates/*}", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_deidentify_template( + request, metadata + ) + pb_request = dlp.UpdateDeidentifyTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.DeidentifyTemplate() + pb_resp = dlp.DeidentifyTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_deidentify_template(resp) + return resp + + class _UpdateInspectTemplate(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateInspectTemplate") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.UpdateInspectTemplateRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Call the update inspect template method over HTTP. + + Args: + request (~.dlp.UpdateInspectTemplateRequest): + The request object. Request message for + UpdateInspectTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates + to learn more. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2/{name=organizations/*/inspectTemplates/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=organizations/*/locations/*/inspectTemplates/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=projects/*/inspectTemplates/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=projects/*/locations/*/inspectTemplates/*}", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_inspect_template( + request, metadata + ) + pb_request = dlp.UpdateInspectTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.InspectTemplate() + pb_resp = dlp.InspectTemplate.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_inspect_template(resp) + return resp + + class _UpdateJobTrigger(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateJobTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.UpdateJobTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Call the update job trigger method over HTTP. + + Args: + request (~.dlp.UpdateJobTriggerRequest): + The request object. Request message for UpdateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers + to learn more. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2/{name=projects/*/jobTriggers/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=projects/*/locations/*/jobTriggers/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=organizations/*/locations/*/jobTriggers/*}", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_job_trigger( + request, metadata + ) + pb_request = dlp.UpdateJobTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.JobTrigger() + pb_resp = dlp.JobTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_job_trigger(resp) + return resp + + class _UpdateStoredInfoType(DlpServiceRestStub): + def __hash__(self): + return hash("UpdateStoredInfoType") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: dlp.UpdateStoredInfoTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Call the update stored info type method over HTTP. + + Args: + request (~.dlp.UpdateStoredInfoTypeRequest): + The request object. Request message for + UpdateStoredInfoType. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2/{name=organizations/*/storedInfoTypes/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=organizations/*/locations/*/storedInfoTypes/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=projects/*/storedInfoTypes/*}", + "body": "*", + }, + { + "method": "patch", + "uri": "/v2/{name=projects/*/locations/*/storedInfoTypes/*}", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_stored_info_type( + request, metadata + ) + pb_request = dlp.UpdateStoredInfoTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dlp.StoredInfoType() + pb_resp = dlp.StoredInfoType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_stored_info_type(resp) + return resp + + @property + def activate_job_trigger( + self, + ) -> Callable[[dlp.ActivateJobTriggerRequest], dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ActivateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_dlp_job(self) -> Callable[[dlp.CancelDlpJobRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_deidentify_template( + self, + ) -> Callable[[dlp.CreateDeidentifyTemplateRequest], dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_dlp_job(self) -> Callable[[dlp.CreateDlpJobRequest], dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_inspect_template( + self, + ) -> Callable[[dlp.CreateInspectTemplateRequest], dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_job_trigger( + self, + ) -> Callable[[dlp.CreateJobTriggerRequest], dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_stored_info_type( + self, + ) -> Callable[[dlp.CreateStoredInfoTypeRequest], dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def deidentify_content( + self, + ) -> Callable[[dlp.DeidentifyContentRequest], dlp.DeidentifyContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeidentifyContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_deidentify_template( + self, + ) -> Callable[[dlp.DeleteDeidentifyTemplateRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_dlp_job(self) -> Callable[[dlp.DeleteDlpJobRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_inspect_template( + self, + ) -> Callable[[dlp.DeleteInspectTemplateRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_job_trigger( + self, + ) -> Callable[[dlp.DeleteJobTriggerRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_stored_info_type( + self, + ) -> Callable[[dlp.DeleteStoredInfoTypeRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def finish_dlp_job(self) -> Callable[[dlp.FinishDlpJobRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FinishDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_deidentify_template( + self, + ) -> Callable[[dlp.GetDeidentifyTemplateRequest], dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_dlp_job(self) -> Callable[[dlp.GetDlpJobRequest], dlp.DlpJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_inspect_template( + self, + ) -> Callable[[dlp.GetInspectTemplateRequest], dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job_trigger(self) -> Callable[[dlp.GetJobTriggerRequest], dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_stored_info_type( + self, + ) -> Callable[[dlp.GetStoredInfoTypeRequest], dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def hybrid_inspect_dlp_job( + self, + ) -> Callable[[dlp.HybridInspectDlpJobRequest], dlp.HybridInspectResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._HybridInspectDlpJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def hybrid_inspect_job_trigger( + self, + ) -> Callable[[dlp.HybridInspectJobTriggerRequest], dlp.HybridInspectResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._HybridInspectJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def inspect_content( + self, + ) -> Callable[[dlp.InspectContentRequest], dlp.InspectContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InspectContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_deidentify_templates( + self, + ) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], dlp.ListDeidentifyTemplatesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeidentifyTemplates(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_dlp_jobs( + self, + ) -> Callable[[dlp.ListDlpJobsRequest], dlp.ListDlpJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDlpJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_info_types( + self, + ) -> Callable[[dlp.ListInfoTypesRequest], dlp.ListInfoTypesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInfoTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_inspect_templates( + self, + ) -> Callable[[dlp.ListInspectTemplatesRequest], dlp.ListInspectTemplatesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInspectTemplates(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_job_triggers( + self, + ) -> Callable[[dlp.ListJobTriggersRequest], dlp.ListJobTriggersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobTriggers(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_stored_info_types( + self, + ) -> Callable[[dlp.ListStoredInfoTypesRequest], dlp.ListStoredInfoTypesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListStoredInfoTypes(self._session, self._host, self._interceptor) # type: ignore + + @property + def redact_image( + self, + ) -> Callable[[dlp.RedactImageRequest], dlp.RedactImageResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RedactImage(self._session, self._host, self._interceptor) # type: ignore + + @property + def reidentify_content( + self, + ) -> Callable[[dlp.ReidentifyContentRequest], dlp.ReidentifyContentResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReidentifyContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_deidentify_template( + self, + ) -> Callable[[dlp.UpdateDeidentifyTemplateRequest], dlp.DeidentifyTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDeidentifyTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_inspect_template( + self, + ) -> Callable[[dlp.UpdateInspectTemplateRequest], dlp.InspectTemplate]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInspectTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_job_trigger( + self, + ) -> Callable[[dlp.UpdateJobTriggerRequest], dlp.JobTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateJobTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_stored_info_type( + self, + ) -> Callable[[dlp.UpdateStoredInfoTypeRequest], dlp.StoredInfoType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateStoredInfoType(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DlpServiceRestTransport",) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py new file mode 100644 index 000000000000..f0f672966aa8 --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/__init__.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .dlp import ( + Action, + ActivateJobTriggerRequest, + AnalyzeDataSourceRiskDetails, + BoundingBox, + BucketingConfig, + ByteContentItem, + CancelDlpJobRequest, + CharacterMaskConfig, + CharsToIgnore, + Color, + Container, + ContentItem, + ContentLocation, + ContentOption, + CreateDeidentifyTemplateRequest, + CreateDlpJobRequest, + CreateInspectTemplateRequest, + CreateJobTriggerRequest, + CreateStoredInfoTypeRequest, + CryptoDeterministicConfig, + CryptoHashConfig, + CryptoKey, + CryptoReplaceFfxFpeConfig, + DataProfileAction, + DataProfileConfigSnapshot, + DataProfileJobConfig, + DataProfileLocation, + DataProfilePubSubCondition, + DataProfilePubSubMessage, + DataRiskLevel, + DateShiftConfig, + DateTime, + DeidentifyConfig, + DeidentifyContentRequest, + DeidentifyContentResponse, + DeidentifyTemplate, + DeleteDeidentifyTemplateRequest, + DeleteDlpJobRequest, + DeleteInspectTemplateRequest, + DeleteJobTriggerRequest, + DeleteStoredInfoTypeRequest, + DlpJob, + DlpJobType, + DocumentLocation, + EncryptionStatus, + Error, + ExcludeByHotword, + ExcludeInfoTypes, + ExclusionRule, + FieldTransformation, + Finding, + FinishDlpJobRequest, + FixedSizeBucketingConfig, + GetDeidentifyTemplateRequest, + GetDlpJobRequest, + GetInspectTemplateRequest, + GetJobTriggerRequest, + GetStoredInfoTypeRequest, + HybridContentItem, + HybridFindingDetails, + HybridInspectDlpJobRequest, + HybridInspectJobTriggerRequest, + HybridInspectResponse, + HybridInspectStatistics, + ImageLocation, + ImageTransformations, + InfoTypeCategory, + InfoTypeDescription, + InfoTypeStats, + InfoTypeSummary, + InfoTypeSupportedBy, + InfoTypeTransformations, + InspectConfig, + InspectContentRequest, + InspectContentResponse, + InspectDataSourceDetails, + InspectionRule, + InspectionRuleSet, + InspectJobConfig, + InspectResult, + InspectTemplate, + JobTrigger, + KmsWrappedCryptoKey, + LargeCustomDictionaryConfig, + LargeCustomDictionaryStats, + ListDeidentifyTemplatesRequest, + ListDeidentifyTemplatesResponse, + ListDlpJobsRequest, + ListDlpJobsResponse, + ListInfoTypesRequest, + ListInfoTypesResponse, + ListInspectTemplatesRequest, + ListInspectTemplatesResponse, + ListJobTriggersRequest, + ListJobTriggersResponse, + ListStoredInfoTypesRequest, + ListStoredInfoTypesResponse, + Location, + Manual, + MatchingType, + MetadataLocation, + MetadataType, + OtherInfoTypeSummary, + OutputStorageConfig, + PrimitiveTransformation, + PrivacyMetric, + ProfileStatus, + QuasiId, + QuoteInfo, + Range, + RecordCondition, + RecordLocation, + RecordSuppression, + RecordTransformation, + RecordTransformations, + RedactConfig, + RedactImageRequest, + RedactImageResponse, + ReidentifyContentRequest, + ReidentifyContentResponse, + RelationalOperator, + ReplaceDictionaryConfig, + ReplaceValueConfig, + ReplaceWithInfoTypeConfig, + ResourceVisibility, + RiskAnalysisJobConfig, + Schedule, + StatisticalTable, + StorageMetadataLabel, + StoredInfoType, + StoredInfoTypeConfig, + StoredInfoTypeState, + StoredInfoTypeStats, + StoredInfoTypeVersion, + Table, + TableDataProfile, + TableLocation, + TimePartConfig, + TransformationConfig, + TransformationContainerType, + TransformationDescription, + TransformationDetails, + TransformationDetailsStorageConfig, + TransformationErrorHandling, + TransformationLocation, + TransformationOverview, + TransformationResultStatus, + TransformationResultStatusType, + TransformationSummary, + TransformationType, + TransientCryptoKey, + UnwrappedCryptoKey, + UpdateDeidentifyTemplateRequest, + UpdateInspectTemplateRequest, + UpdateJobTriggerRequest, + UpdateStoredInfoTypeRequest, + Value, + ValueFrequency, + VersionDescription, +) +from .storage import ( + BigQueryField, + BigQueryKey, + BigQueryOptions, + BigQueryTable, + CloudStorageFileSet, + CloudStorageOptions, + CloudStoragePath, + CloudStorageRegexFileSet, + CustomInfoType, + DatastoreKey, + DatastoreOptions, + EntityId, + FieldId, + FileType, + HybridOptions, + InfoType, + Key, + KindExpression, + Likelihood, + PartitionId, + RecordKey, + SensitivityScore, + StorageConfig, + StoredType, + TableOptions, +) + +__all__ = ( + "Action", + "ActivateJobTriggerRequest", + "AnalyzeDataSourceRiskDetails", + "BoundingBox", + "BucketingConfig", + "ByteContentItem", + "CancelDlpJobRequest", + "CharacterMaskConfig", + "CharsToIgnore", + "Color", + "Container", + "ContentItem", + "ContentLocation", + "CreateDeidentifyTemplateRequest", + "CreateDlpJobRequest", + "CreateInspectTemplateRequest", + "CreateJobTriggerRequest", + "CreateStoredInfoTypeRequest", + "CryptoDeterministicConfig", + "CryptoHashConfig", + "CryptoKey", + "CryptoReplaceFfxFpeConfig", + "DataProfileAction", + "DataProfileConfigSnapshot", + "DataProfileJobConfig", + "DataProfileLocation", + "DataProfilePubSubCondition", + "DataProfilePubSubMessage", + "DataRiskLevel", + "DateShiftConfig", + "DateTime", + "DeidentifyConfig", + "DeidentifyContentRequest", + "DeidentifyContentResponse", + "DeidentifyTemplate", + "DeleteDeidentifyTemplateRequest", + "DeleteDlpJobRequest", + "DeleteInspectTemplateRequest", + "DeleteJobTriggerRequest", + "DeleteStoredInfoTypeRequest", + "DlpJob", + "DocumentLocation", + "Error", + "ExcludeByHotword", + "ExcludeInfoTypes", + "ExclusionRule", + "FieldTransformation", + "Finding", + "FinishDlpJobRequest", + "FixedSizeBucketingConfig", + "GetDeidentifyTemplateRequest", + "GetDlpJobRequest", + "GetInspectTemplateRequest", + "GetJobTriggerRequest", + "GetStoredInfoTypeRequest", + "HybridContentItem", + "HybridFindingDetails", + "HybridInspectDlpJobRequest", + "HybridInspectJobTriggerRequest", + "HybridInspectResponse", + "HybridInspectStatistics", + "ImageLocation", + "ImageTransformations", + "InfoTypeCategory", + "InfoTypeDescription", + "InfoTypeStats", + "InfoTypeSummary", + "InfoTypeTransformations", + "InspectConfig", + "InspectContentRequest", + "InspectContentResponse", + "InspectDataSourceDetails", + "InspectionRule", + "InspectionRuleSet", + "InspectJobConfig", + "InspectResult", + "InspectTemplate", + "JobTrigger", + "KmsWrappedCryptoKey", + "LargeCustomDictionaryConfig", + "LargeCustomDictionaryStats", + "ListDeidentifyTemplatesRequest", + "ListDeidentifyTemplatesResponse", + "ListDlpJobsRequest", + "ListDlpJobsResponse", + "ListInfoTypesRequest", + "ListInfoTypesResponse", + "ListInspectTemplatesRequest", + "ListInspectTemplatesResponse", + "ListJobTriggersRequest", + "ListJobTriggersResponse", + "ListStoredInfoTypesRequest", + "ListStoredInfoTypesResponse", + "Location", + "Manual", + "MetadataLocation", + "OtherInfoTypeSummary", + "OutputStorageConfig", + "PrimitiveTransformation", + "PrivacyMetric", + "ProfileStatus", + "QuasiId", + "QuoteInfo", + "Range", + "RecordCondition", + "RecordLocation", + "RecordSuppression", + "RecordTransformation", + "RecordTransformations", + "RedactConfig", + "RedactImageRequest", + "RedactImageResponse", + "ReidentifyContentRequest", + "ReidentifyContentResponse", + "ReplaceDictionaryConfig", + "ReplaceValueConfig", + "ReplaceWithInfoTypeConfig", + "RiskAnalysisJobConfig", + "Schedule", + "StatisticalTable", + "StorageMetadataLabel", + "StoredInfoType", + "StoredInfoTypeConfig", + "StoredInfoTypeStats", + "StoredInfoTypeVersion", + "Table", + "TableDataProfile", + "TableLocation", + "TimePartConfig", + "TransformationConfig", + "TransformationDescription", + "TransformationDetails", + "TransformationDetailsStorageConfig", + "TransformationErrorHandling", + "TransformationLocation", + "TransformationOverview", + "TransformationResultStatus", + "TransformationSummary", + "TransientCryptoKey", + "UnwrappedCryptoKey", + "UpdateDeidentifyTemplateRequest", + "UpdateInspectTemplateRequest", + "UpdateJobTriggerRequest", + "UpdateStoredInfoTypeRequest", + "Value", + "ValueFrequency", + "VersionDescription", + "ContentOption", + "DlpJobType", + "EncryptionStatus", + "InfoTypeSupportedBy", + "MatchingType", + "MetadataType", + "RelationalOperator", + "ResourceVisibility", + "StoredInfoTypeState", + "TransformationContainerType", + "TransformationResultStatusType", + "TransformationType", + "BigQueryField", + "BigQueryKey", + "BigQueryOptions", + "BigQueryTable", + "CloudStorageFileSet", + "CloudStorageOptions", + "CloudStoragePath", + "CloudStorageRegexFileSet", + "CustomInfoType", + "DatastoreKey", + "DatastoreOptions", + "EntityId", + "FieldId", + "HybridOptions", + "InfoType", + "Key", + "KindExpression", + "PartitionId", + "RecordKey", + "SensitivityScore", + "StorageConfig", + "StoredType", + "TableOptions", + "FileType", + "Likelihood", +) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py new file mode 100644 index 000000000000..c5a374526e60 --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/dlp.py @@ -0,0 +1,8916 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.dlp_v2.types import storage + +__protobuf__ = proto.module( + package="google.privacy.dlp.v2", + manifest={ + "TransformationResultStatusType", + "TransformationContainerType", + "TransformationType", + "RelationalOperator", + "MatchingType", + "ContentOption", + "MetadataType", + "InfoTypeSupportedBy", + "DlpJobType", + "StoredInfoTypeState", + "ResourceVisibility", + "EncryptionStatus", + "ExcludeInfoTypes", + "ExcludeByHotword", + "ExclusionRule", + "InspectionRule", + "InspectionRuleSet", + "InspectConfig", + "ByteContentItem", + "ContentItem", + "Table", + "InspectResult", + "Finding", + "Location", + "ContentLocation", + "MetadataLocation", + "StorageMetadataLabel", + "DocumentLocation", + "RecordLocation", + "TableLocation", + "Container", + "Range", + "ImageLocation", + "BoundingBox", + "RedactImageRequest", + "Color", + "RedactImageResponse", + "DeidentifyContentRequest", + "DeidentifyContentResponse", + "ReidentifyContentRequest", + "ReidentifyContentResponse", + "InspectContentRequest", + "InspectContentResponse", + "OutputStorageConfig", + "InfoTypeStats", + "InspectDataSourceDetails", + "HybridInspectStatistics", + "InfoTypeDescription", + "InfoTypeCategory", + "VersionDescription", + "ListInfoTypesRequest", + "ListInfoTypesResponse", + "RiskAnalysisJobConfig", + "QuasiId", + "StatisticalTable", + "PrivacyMetric", + "AnalyzeDataSourceRiskDetails", + "ValueFrequency", + "Value", + "QuoteInfo", + "DateTime", + "DeidentifyConfig", + "ImageTransformations", + "TransformationErrorHandling", + "PrimitiveTransformation", + "TimePartConfig", + "CryptoHashConfig", + "CryptoDeterministicConfig", + "ReplaceValueConfig", + "ReplaceDictionaryConfig", + "ReplaceWithInfoTypeConfig", + "RedactConfig", + "CharsToIgnore", + "CharacterMaskConfig", + "FixedSizeBucketingConfig", + "BucketingConfig", + "CryptoReplaceFfxFpeConfig", + "CryptoKey", + "TransientCryptoKey", + "UnwrappedCryptoKey", + "KmsWrappedCryptoKey", + "DateShiftConfig", + "InfoTypeTransformations", + "FieldTransformation", + "RecordTransformations", + "RecordSuppression", + "RecordCondition", + "TransformationOverview", + "TransformationSummary", + "TransformationDescription", + "TransformationDetails", + "TransformationLocation", + "RecordTransformation", + "TransformationResultStatus", + "TransformationDetailsStorageConfig", + "Schedule", + "Manual", + "InspectTemplate", + "DeidentifyTemplate", + "Error", + "JobTrigger", + "Action", + "TransformationConfig", + "CreateInspectTemplateRequest", + "UpdateInspectTemplateRequest", + "GetInspectTemplateRequest", + "ListInspectTemplatesRequest", + "ListInspectTemplatesResponse", + "DeleteInspectTemplateRequest", + "CreateJobTriggerRequest", + "ActivateJobTriggerRequest", + "UpdateJobTriggerRequest", + "GetJobTriggerRequest", + "CreateDlpJobRequest", + "ListJobTriggersRequest", + "ListJobTriggersResponse", + "DeleteJobTriggerRequest", + "InspectJobConfig", + "DataProfileAction", + "DataProfileJobConfig", + "DataProfileLocation", + "DlpJob", + "GetDlpJobRequest", + "ListDlpJobsRequest", + "ListDlpJobsResponse", + "CancelDlpJobRequest", + "FinishDlpJobRequest", + "DeleteDlpJobRequest", + "CreateDeidentifyTemplateRequest", + "UpdateDeidentifyTemplateRequest", + "GetDeidentifyTemplateRequest", + "ListDeidentifyTemplatesRequest", + "ListDeidentifyTemplatesResponse", + "DeleteDeidentifyTemplateRequest", + "LargeCustomDictionaryConfig", + "LargeCustomDictionaryStats", + "StoredInfoTypeConfig", + "StoredInfoTypeStats", + "StoredInfoTypeVersion", + "StoredInfoType", + "CreateStoredInfoTypeRequest", + "UpdateStoredInfoTypeRequest", + "GetStoredInfoTypeRequest", + "ListStoredInfoTypesRequest", + "ListStoredInfoTypesResponse", + "DeleteStoredInfoTypeRequest", + "HybridInspectJobTriggerRequest", + "HybridInspectDlpJobRequest", + "HybridContentItem", + "HybridFindingDetails", + "HybridInspectResponse", + "DataRiskLevel", + "DataProfileConfigSnapshot", + "TableDataProfile", + "ProfileStatus", + "InfoTypeSummary", + "OtherInfoTypeSummary", + "DataProfilePubSubCondition", + "DataProfilePubSubMessage", + }, +) + + +class TransformationResultStatusType(proto.Enum): + r"""Enum of possible outcomes of transformations. SUCCESS if + transformation and storing of transformation was successful, + otherwise, reason for not transforming. + + Values: + STATE_TYPE_UNSPECIFIED (0): + No description available. + INVALID_TRANSFORM (1): + This will be set when a finding could not be + transformed (i.e. outside user set bucket + range). + BIGQUERY_MAX_ROW_SIZE_EXCEEDED (2): + This will be set when a BigQuery + transformation was successful but could not be + stored back in BigQuery because the transformed + row exceeds BigQuery's max row size. + METADATA_UNRETRIEVABLE (3): + This will be set when there is a finding in + the custom metadata of a file, but at the write + time of the transformed file, this key / value + pair is unretrievable. + SUCCESS (4): + This will be set when the transformation and + storing of it is successful. + """ + STATE_TYPE_UNSPECIFIED = 0 + INVALID_TRANSFORM = 1 + BIGQUERY_MAX_ROW_SIZE_EXCEEDED = 2 + METADATA_UNRETRIEVABLE = 3 + SUCCESS = 4 + + +class TransformationContainerType(proto.Enum): + r"""Describes functionality of a given container in its original + format. + + Values: + TRANSFORM_UNKNOWN_CONTAINER (0): + No description available. + TRANSFORM_BODY (1): + No description available. + TRANSFORM_METADATA (2): + No description available. + TRANSFORM_TABLE (3): + No description available. + """ + TRANSFORM_UNKNOWN_CONTAINER = 0 + TRANSFORM_BODY = 1 + TRANSFORM_METADATA = 2 + TRANSFORM_TABLE = 3 + + +class TransformationType(proto.Enum): + r"""An enum of rules that can be used to transform a value. Can be a + record suppression, or one of the transformation rules specified + under ``PrimitiveTransformation``. + + Values: + TRANSFORMATION_TYPE_UNSPECIFIED (0): + Unused + RECORD_SUPPRESSION (1): + Record suppression + REPLACE_VALUE (2): + Replace value + REPLACE_DICTIONARY (15): + Replace value using a dictionary. + REDACT (3): + Redact + CHARACTER_MASK (4): + Character mask + CRYPTO_REPLACE_FFX_FPE (5): + FFX-FPE + FIXED_SIZE_BUCKETING (6): + Fixed size bucketing + BUCKETING (7): + Bucketing + REPLACE_WITH_INFO_TYPE (8): + Replace with info type + TIME_PART (9): + Time part + CRYPTO_HASH (10): + Crypto hash + DATE_SHIFT (12): + Date shift + CRYPTO_DETERMINISTIC_CONFIG (13): + Deterministic crypto + REDACT_IMAGE (14): + Redact image + """ + TRANSFORMATION_TYPE_UNSPECIFIED = 0 + RECORD_SUPPRESSION = 1 + REPLACE_VALUE = 2 + REPLACE_DICTIONARY = 15 + REDACT = 3 + CHARACTER_MASK = 4 + CRYPTO_REPLACE_FFX_FPE = 5 + FIXED_SIZE_BUCKETING = 6 + BUCKETING = 7 + REPLACE_WITH_INFO_TYPE = 8 + TIME_PART = 9 + CRYPTO_HASH = 10 + DATE_SHIFT = 12 + CRYPTO_DETERMINISTIC_CONFIG = 13 + REDACT_IMAGE = 14 + + +class RelationalOperator(proto.Enum): + r"""Operators available for comparing the value of fields. + + Values: + RELATIONAL_OPERATOR_UNSPECIFIED (0): + Unused + EQUAL_TO (1): + Equal. Attempts to match even with + incompatible types. + NOT_EQUAL_TO (2): + Not equal to. Attempts to match even with + incompatible types. + GREATER_THAN (3): + Greater than. + LESS_THAN (4): + Less than. + GREATER_THAN_OR_EQUALS (5): + Greater than or equals. + LESS_THAN_OR_EQUALS (6): + Less than or equals. + EXISTS (7): + Exists + """ + RELATIONAL_OPERATOR_UNSPECIFIED = 0 + EQUAL_TO = 1 + NOT_EQUAL_TO = 2 + GREATER_THAN = 3 + LESS_THAN = 4 + GREATER_THAN_OR_EQUALS = 5 + LESS_THAN_OR_EQUALS = 6 + EXISTS = 7 + + +class MatchingType(proto.Enum): + r"""Type of the match which can be applied to different ways of + matching, like Dictionary, regular expression and intersecting + with findings of another info type. + + Values: + MATCHING_TYPE_UNSPECIFIED (0): + Invalid. + MATCHING_TYPE_FULL_MATCH (1): + Full match. + + - Dictionary: join of Dictionary results matched + complete finding quote + - Regex: all regex matches fill a finding quote + start to end + - Exclude info type: completely inside affecting + info types findings + MATCHING_TYPE_PARTIAL_MATCH (2): + Partial match. + + - Dictionary: at least one of the tokens in the + finding matches + - Regex: substring of the finding matches + - Exclude info type: intersects with affecting + info types findings + MATCHING_TYPE_INVERSE_MATCH (3): + Inverse match. + + - Dictionary: no tokens in the finding match the + dictionary + - Regex: finding doesn't match the regex + - Exclude info type: no intersection with + affecting info types findings + """ + MATCHING_TYPE_UNSPECIFIED = 0 + MATCHING_TYPE_FULL_MATCH = 1 + MATCHING_TYPE_PARTIAL_MATCH = 2 + MATCHING_TYPE_INVERSE_MATCH = 3 + + +class ContentOption(proto.Enum): + r"""Deprecated and unused. + + Values: + CONTENT_UNSPECIFIED (0): + Includes entire content of a file or a data + stream. + CONTENT_TEXT (1): + Text content within the data, excluding any + metadata. + CONTENT_IMAGE (2): + Images found in the data. + """ + CONTENT_UNSPECIFIED = 0 + CONTENT_TEXT = 1 + CONTENT_IMAGE = 2 + + +class MetadataType(proto.Enum): + r"""Type of metadata containing the finding. + + Values: + METADATATYPE_UNSPECIFIED (0): + Unused + STORAGE_METADATA (2): + General file metadata provided by Cloud + Storage. + """ + METADATATYPE_UNSPECIFIED = 0 + STORAGE_METADATA = 2 + + +class InfoTypeSupportedBy(proto.Enum): + r"""Parts of the APIs which use certain infoTypes. + + Values: + ENUM_TYPE_UNSPECIFIED (0): + Unused. + INSPECT (1): + Supported by the inspect operations. + RISK_ANALYSIS (2): + Supported by the risk analysis operations. + """ + ENUM_TYPE_UNSPECIFIED = 0 + INSPECT = 1 + RISK_ANALYSIS = 2 + + +class DlpJobType(proto.Enum): + r"""An enum to represent the various types of DLP jobs. + + Values: + DLP_JOB_TYPE_UNSPECIFIED (0): + Defaults to INSPECT_JOB. + INSPECT_JOB (1): + The job inspected Google Cloud for sensitive + data. + RISK_ANALYSIS_JOB (2): + The job executed a Risk Analysis computation. + """ + DLP_JOB_TYPE_UNSPECIFIED = 0 + INSPECT_JOB = 1 + RISK_ANALYSIS_JOB = 2 + + +class StoredInfoTypeState(proto.Enum): + r"""State of a StoredInfoType version. + + Values: + STORED_INFO_TYPE_STATE_UNSPECIFIED (0): + Unused + PENDING (1): + StoredInfoType version is being created. + READY (2): + StoredInfoType version is ready for use. + FAILED (3): + StoredInfoType creation failed. All relevant error messages + are returned in the ``StoredInfoTypeVersion`` message. + INVALID (4): + StoredInfoType is no longer valid because artifacts stored + in user-controlled storage were modified. To fix an invalid + StoredInfoType, use the ``UpdateStoredInfoType`` method to + create a new version. + """ + STORED_INFO_TYPE_STATE_UNSPECIFIED = 0 + PENDING = 1 + READY = 2 + FAILED = 3 + INVALID = 4 + + +class ResourceVisibility(proto.Enum): + r"""How broadly a resource has been shared. New items may be + added over time. A higher number means more restricted. + + Values: + RESOURCE_VISIBILITY_UNSPECIFIED (0): + Unused. + RESOURCE_VISIBILITY_PUBLIC (10): + Visible to any user. + RESOURCE_VISIBILITY_RESTRICTED (20): + Visible only to specific users. + """ + RESOURCE_VISIBILITY_UNSPECIFIED = 0 + RESOURCE_VISIBILITY_PUBLIC = 10 + RESOURCE_VISIBILITY_RESTRICTED = 20 + + +class EncryptionStatus(proto.Enum): + r"""How a resource is encrypted. + + Values: + ENCRYPTION_STATUS_UNSPECIFIED (0): + Unused. + ENCRYPTION_GOOGLE_MANAGED (1): + Google manages server-side encryption keys on + your behalf. + ENCRYPTION_CUSTOMER_MANAGED (2): + Customer provides the key. + """ + ENCRYPTION_STATUS_UNSPECIFIED = 0 + ENCRYPTION_GOOGLE_MANAGED = 1 + ENCRYPTION_CUSTOMER_MANAGED = 2 + + +class ExcludeInfoTypes(proto.Message): + r"""List of excluded infoTypes. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + InfoType list in ExclusionRule rule drops a finding when it + overlaps or contained within with a finding of an infoType + from this list. For example, for + ``InspectionRuleSet.info_types`` containing + "PHONE_NUMBER"``and``\ exclusion_rule\ ``containing``\ exclude_info_types.info_types\` + with "EMAIL_ADDRESS" the phone number findings are dropped + if they overlap with EMAIL_ADDRESS finding. That leads to + "555-222-2222@example.org" to generate only a single + finding, namely email address. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + + +class ExcludeByHotword(proto.Message): + r"""The rule to exclude findings based on a hotword. For record + inspection of tables, column names are considered hotwords. An + example of this is to exclude a finding if a BigQuery column + matches a specific pattern. + + Attributes: + hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression pattern defining what + qualifies as a hotword. + proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): + Range of characters within which the entire + hotword must reside. The total length of the + window cannot exceed 1000 characters. The + windowBefore property in proximity should be set + to 1 if the hotword needs to be included in a + column header. + """ + + hotword_regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=1, + message=storage.CustomInfoType.Regex, + ) + proximity: storage.CustomInfoType.DetectionRule.Proximity = proto.Field( + proto.MESSAGE, + number=2, + message=storage.CustomInfoType.DetectionRule.Proximity, + ) + + +class ExclusionRule(proto.Message): + r"""The rule that specifies conditions when findings of infoTypes + specified in ``InspectionRuleSet`` are removed from results. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + Dictionary which defines the rule. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression which defines the rule. + + This field is a member of `oneof`_ ``type``. + exclude_info_types (google.cloud.dlp_v2.types.ExcludeInfoTypes): + Set of infoTypes for which findings would + affect this rule. + + This field is a member of `oneof`_ ``type``. + exclude_by_hotword (google.cloud.dlp_v2.types.ExcludeByHotword): + Drop if the hotword rule is contained in the + proximate context. For tabular data, the context + includes the column name. + + This field is a member of `oneof`_ ``type``. + matching_type (google.cloud.dlp_v2.types.MatchingType): + How the rule is applied, see MatchingType + documentation for details. + """ + + dictionary: storage.CustomInfoType.Dictionary = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message=storage.CustomInfoType.Dictionary, + ) + regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=2, + oneof="type", + message=storage.CustomInfoType.Regex, + ) + exclude_info_types: "ExcludeInfoTypes" = proto.Field( + proto.MESSAGE, + number=3, + oneof="type", + message="ExcludeInfoTypes", + ) + exclude_by_hotword: "ExcludeByHotword" = proto.Field( + proto.MESSAGE, + number=5, + oneof="type", + message="ExcludeByHotword", + ) + matching_type: "MatchingType" = proto.Field( + proto.ENUM, + number=4, + enum="MatchingType", + ) + + +class InspectionRule(proto.Message): + r"""A single inspection rule to be applied to infoTypes, specified in + ``InspectionRuleSet``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): + Hotword-based detection rule. + + This field is a member of `oneof`_ ``type``. + exclusion_rule (google.cloud.dlp_v2.types.ExclusionRule): + Exclusion rule. + + This field is a member of `oneof`_ ``type``. + """ + + hotword_rule: storage.CustomInfoType.DetectionRule.HotwordRule = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message=storage.CustomInfoType.DetectionRule.HotwordRule, + ) + exclusion_rule: "ExclusionRule" = proto.Field( + proto.MESSAGE, + number=2, + oneof="type", + message="ExclusionRule", + ) + + +class InspectionRuleSet(proto.Message): + r"""Rule set for modifying a set of infoTypes to alter behavior + under certain circumstances, depending on the specific details + of the rules within the set. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + List of infoTypes this rule set is applied + to. + rules (MutableSequence[google.cloud.dlp_v2.types.InspectionRule]): + Set of rules to be applied to infoTypes. The + rules are applied in order. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + rules: MutableSequence["InspectionRule"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="InspectionRule", + ) + + +class InspectConfig(proto.Message): + r"""Configuration description of the scanning process. When used with + redactContent only info_types and min_likelihood are currently used. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + Restricts what info_types to look for. The values must + correspond to InfoType values returned by ListInfoTypes or + listed at + https://cloud.google.com/dlp/docs/infotypes-reference. + + When no InfoTypes or CustomInfoTypes are specified in a + request, the system may automatically choose what detectors + to run. By default this may be all types, but may change + over time as detectors are updated. + + If you need precise control and predictability as to what + detectors are run you should specify specific InfoTypes + listed in the reference, otherwise a default list will be + used, which may change over time. + min_likelihood (google.cloud.dlp_v2.types.Likelihood): + Only returns findings equal or above this + threshold. The default is POSSIBLE. + See https://cloud.google.com/dlp/docs/likelihood + to learn more. + limits (google.cloud.dlp_v2.types.InspectConfig.FindingLimits): + Configuration to control the number of findings returned. + This is not used for data profiling. + + When redacting sensitive data from images, finding limits + don't apply. They can cause unexpected or inconsistent + results, where only some data is redacted. Don't include + finding limits in + [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] + requests. Otherwise, Cloud DLP returns an error. + include_quote (bool): + When true, a contextual quote from the data that triggered a + finding is included in the response; see + [Finding.quote][google.privacy.dlp.v2.Finding.quote]. This + is not used for data profiling. + exclude_info_types (bool): + When true, excludes type information of the + findings. This is not used for data profiling. + custom_info_types (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType]): + CustomInfoTypes provided by the user. See + https://cloud.google.com/dlp/docs/creating-custom-infotypes + to learn more. + content_options (MutableSequence[google.cloud.dlp_v2.types.ContentOption]): + Deprecated and unused. + rule_set (MutableSequence[google.cloud.dlp_v2.types.InspectionRuleSet]): + Set of rules to apply to the findings for + this InspectConfig. Exclusion rules, contained + in the set are executed in the end, other rules + are executed in the order they are specified for + each info type. + """ + + class FindingLimits(proto.Message): + r"""Configuration to control the number of findings returned for + inspection. This is not used for de-identification or data + profiling. + + When redacting sensitive data from images, finding limits don't + apply. They can cause unexpected or inconsistent results, where only + some data is redacted. Don't include finding limits in + [RedactImage][google.privacy.dlp.v2.DlpService.RedactImage] + requests. Otherwise, Cloud DLP returns an error. + + Attributes: + max_findings_per_item (int): + Max number of findings that will be returned for each item + scanned. When set within ``InspectJobConfig``, the maximum + returned is 2000 regardless if this is set higher. When set + within ``InspectContentRequest``, this field is ignored. + max_findings_per_request (int): + Max number of findings that will be returned per + request/job. When set within ``InspectContentRequest``, the + maximum returned is 2000 regardless if this is set higher. + max_findings_per_info_type (MutableSequence[google.cloud.dlp_v2.types.InspectConfig.FindingLimits.InfoTypeLimit]): + Configuration of findings limit given for + specified infoTypes. + """ + + class InfoTypeLimit(proto.Message): + r"""Max findings configuration per infoType, per content item or + long running DlpJob. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Type of information the findings limit applies to. Only one + limit per info_type should be provided. If InfoTypeLimit + does not have an info_type, the DLP API applies the limit + against all info_types that are found but not specified in + another InfoTypeLimit. + max_findings (int): + Max findings limit for the given infoType. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + max_findings: int = proto.Field( + proto.INT32, + number=2, + ) + + max_findings_per_item: int = proto.Field( + proto.INT32, + number=1, + ) + max_findings_per_request: int = proto.Field( + proto.INT32, + number=2, + ) + max_findings_per_info_type: MutableSequence[ + "InspectConfig.FindingLimits.InfoTypeLimit" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="InspectConfig.FindingLimits.InfoTypeLimit", + ) + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + min_likelihood: storage.Likelihood = proto.Field( + proto.ENUM, + number=2, + enum=storage.Likelihood, + ) + limits: FindingLimits = proto.Field( + proto.MESSAGE, + number=3, + message=FindingLimits, + ) + include_quote: bool = proto.Field( + proto.BOOL, + number=4, + ) + exclude_info_types: bool = proto.Field( + proto.BOOL, + number=5, + ) + custom_info_types: MutableSequence[storage.CustomInfoType] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=storage.CustomInfoType, + ) + content_options: MutableSequence["ContentOption"] = proto.RepeatedField( + proto.ENUM, + number=8, + enum="ContentOption", + ) + rule_set: MutableSequence["InspectionRuleSet"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="InspectionRuleSet", + ) + + +class ByteContentItem(proto.Message): + r"""Container for bytes to inspect or redact. + + Attributes: + type_ (google.cloud.dlp_v2.types.ByteContentItem.BytesType): + The type of data stored in the bytes string. Default will be + TEXT_UTF8. + data (bytes): + Content data to inspect or redact. + """ + + class BytesType(proto.Enum): + r"""The type of data being sent for inspection. To learn more, see + `Supported file + types `__. + + Values: + BYTES_TYPE_UNSPECIFIED (0): + Unused + IMAGE (6): + Any image type. + IMAGE_JPEG (1): + jpeg + IMAGE_BMP (2): + bmp + IMAGE_PNG (3): + png + IMAGE_SVG (4): + svg + TEXT_UTF8 (5): + plain text + WORD_DOCUMENT (7): + docx, docm, dotx, dotm + PDF (8): + pdf + POWERPOINT_DOCUMENT (9): + pptx, pptm, potx, potm, pot + EXCEL_DOCUMENT (10): + xlsx, xlsm, xltx, xltm + AVRO (11): + avro + CSV (12): + csv + TSV (13): + tsv + """ + BYTES_TYPE_UNSPECIFIED = 0 + IMAGE = 6 + IMAGE_JPEG = 1 + IMAGE_BMP = 2 + IMAGE_PNG = 3 + IMAGE_SVG = 4 + TEXT_UTF8 = 5 + WORD_DOCUMENT = 7 + PDF = 8 + POWERPOINT_DOCUMENT = 9 + EXCEL_DOCUMENT = 10 + AVRO = 11 + CSV = 12 + TSV = 13 + + type_: BytesType = proto.Field( + proto.ENUM, + number=1, + enum=BytesType, + ) + data: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class ContentItem(proto.Message): + r""" + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + value (str): + String data to inspect or redact. + + This field is a member of `oneof`_ ``data_item``. + table (google.cloud.dlp_v2.types.Table): + Structured content for inspection. See + https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table + to learn more. + + This field is a member of `oneof`_ ``data_item``. + byte_item (google.cloud.dlp_v2.types.ByteContentItem): + Content data to inspect or redact. Replaces ``type`` and + ``data``. + + This field is a member of `oneof`_ ``data_item``. + """ + + value: str = proto.Field( + proto.STRING, + number=3, + oneof="data_item", + ) + table: "Table" = proto.Field( + proto.MESSAGE, + number=4, + oneof="data_item", + message="Table", + ) + byte_item: "ByteContentItem" = proto.Field( + proto.MESSAGE, + number=5, + oneof="data_item", + message="ByteContentItem", + ) + + +class Table(proto.Message): + r"""Structured content to inspect. Up to 50,000 ``Value``\ s per request + allowed. See + https://cloud.google.com/dlp/docs/inspecting-structured-text#inspecting_a_table + to learn more. + + Attributes: + headers (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Headers of the table. + rows (MutableSequence[google.cloud.dlp_v2.types.Table.Row]): + Rows of the table. + """ + + class Row(proto.Message): + r"""Values of the row. + + Attributes: + values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Individual cells. + """ + + values: MutableSequence["Value"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Value", + ) + + headers: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + rows: MutableSequence[Row] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Row, + ) + + +class InspectResult(proto.Message): + r"""All the findings for a single scanned item. + + Attributes: + findings (MutableSequence[google.cloud.dlp_v2.types.Finding]): + List of findings for an item. + findings_truncated (bool): + If true, then this item might have more + findings than were returned, and the findings + returned are an arbitrary subset of all + findings. The findings list might be truncated + because the input items were too large, or + because the server reached the maximum amount of + resources allowed for a single API call. For + best results, divide the input into smaller + batches. + """ + + findings: MutableSequence["Finding"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Finding", + ) + findings_truncated: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class Finding(proto.Message): + r"""Represents a piece of potentially sensitive content. + + Attributes: + name (str): + Resource name in format + projects/{project}/locations/{location}/findings/{finding} + Populated only when viewing persisted findings. + quote (str): + The content that was found. Even if the content is not + textual, it may be converted to a textual representation + here. Provided if ``include_quote`` is true and the finding + is less than or equal to 4096 bytes long. If the finding + exceeds 4096 bytes in length, the quote may be omitted. + info_type (google.cloud.dlp_v2.types.InfoType): + The type of content that might have been found. Provided if + ``excluded_types`` is false. + likelihood (google.cloud.dlp_v2.types.Likelihood): + Confidence of how likely it is that the ``info_type`` is + correct. + location (google.cloud.dlp_v2.types.Location): + Where the content was found. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when finding was detected. + quote_info (google.cloud.dlp_v2.types.QuoteInfo): + Contains data parsed from quotes. Only populated if + include_quote was set to true and a supported infoType was + requested. Currently supported infoTypes: DATE, + DATE_OF_BIRTH and TIME. + resource_name (str): + The job that stored the finding. + trigger_name (str): + Job trigger name, if applicable, for this + finding. + labels (MutableMapping[str, str]): + The labels associated with this ``Finding``. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + job_create_time (google.protobuf.timestamp_pb2.Timestamp): + Time the job started that produced this + finding. + job_name (str): + The job that stored the finding. + finding_id (str): + The unique finding id. + """ + + name: str = proto.Field( + proto.STRING, + number=14, + ) + quote: str = proto.Field( + proto.STRING, + number=1, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + message=storage.InfoType, + ) + likelihood: storage.Likelihood = proto.Field( + proto.ENUM, + number=3, + enum=storage.Likelihood, + ) + location: "Location" = proto.Field( + proto.MESSAGE, + number=4, + message="Location", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + quote_info: "QuoteInfo" = proto.Field( + proto.MESSAGE, + number=7, + message="QuoteInfo", + ) + resource_name: str = proto.Field( + proto.STRING, + number=8, + ) + trigger_name: str = proto.Field( + proto.STRING, + number=9, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + job_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + job_name: str = proto.Field( + proto.STRING, + number=13, + ) + finding_id: str = proto.Field( + proto.STRING, + number=15, + ) + + +class Location(proto.Message): + r"""Specifies the location of the finding. + + Attributes: + byte_range (google.cloud.dlp_v2.types.Range): + Zero-based byte offsets delimiting the + finding. These are relative to the finding's + containing element. Note that when the content + is not textual, this references the UTF-8 + encoded textual representation of the content. + Omitted if content is an image. + codepoint_range (google.cloud.dlp_v2.types.Range): + Unicode character offsets delimiting the + finding. These are relative to the finding's + containing element. Provided when the content is + text. + content_locations (MutableSequence[google.cloud.dlp_v2.types.ContentLocation]): + List of nested objects pointing to the + precise location of the finding within the file + or record. + container (google.cloud.dlp_v2.types.Container): + Information about the container where this + finding occurred, if available. + """ + + byte_range: "Range" = proto.Field( + proto.MESSAGE, + number=1, + message="Range", + ) + codepoint_range: "Range" = proto.Field( + proto.MESSAGE, + number=2, + message="Range", + ) + content_locations: MutableSequence["ContentLocation"] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="ContentLocation", + ) + container: "Container" = proto.Field( + proto.MESSAGE, + number=8, + message="Container", + ) + + +class ContentLocation(proto.Message): + r"""Precise location of the finding within a document, record, + image, or metadata container. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + container_name (str): + Name of the container where the finding is located. The top + level name is the source file name or table name. Names of + some common storage containers are formatted as follows: + + - BigQuery tables: ``{project_id}:{dataset_id}.{table_id}`` + - Cloud Storage files: ``gs://{bucket}/{path}`` + - Datastore namespace: {namespace} + + Nested names could be absent if the embedded object has no + string identifier (for example, an image contained within a + document). + record_location (google.cloud.dlp_v2.types.RecordLocation): + Location within a row or record of a database + table. + + This field is a member of `oneof`_ ``location``. + image_location (google.cloud.dlp_v2.types.ImageLocation): + Location within an image's pixels. + + This field is a member of `oneof`_ ``location``. + document_location (google.cloud.dlp_v2.types.DocumentLocation): + Location data for document files. + + This field is a member of `oneof`_ ``location``. + metadata_location (google.cloud.dlp_v2.types.MetadataLocation): + Location within the metadata for inspected + content. + + This field is a member of `oneof`_ ``location``. + container_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Finding container modification timestamp, if applicable. For + Cloud Storage, this field contains the last file + modification timestamp. For a BigQuery table, this field + contains the last_modified_time property. For Datastore, + this field isn't populated. + container_version (str): + Finding container version, if available + ("generation" for Cloud Storage). + """ + + container_name: str = proto.Field( + proto.STRING, + number=1, + ) + record_location: "RecordLocation" = proto.Field( + proto.MESSAGE, + number=2, + oneof="location", + message="RecordLocation", + ) + image_location: "ImageLocation" = proto.Field( + proto.MESSAGE, + number=3, + oneof="location", + message="ImageLocation", + ) + document_location: "DocumentLocation" = proto.Field( + proto.MESSAGE, + number=5, + oneof="location", + message="DocumentLocation", + ) + metadata_location: "MetadataLocation" = proto.Field( + proto.MESSAGE, + number=8, + oneof="location", + message="MetadataLocation", + ) + container_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + container_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class MetadataLocation(proto.Message): + r"""Metadata Location + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.cloud.dlp_v2.types.MetadataType): + Type of metadata containing the finding. + storage_label (google.cloud.dlp_v2.types.StorageMetadataLabel): + Storage metadata. + + This field is a member of `oneof`_ ``label``. + """ + + type_: "MetadataType" = proto.Field( + proto.ENUM, + number=1, + enum="MetadataType", + ) + storage_label: "StorageMetadataLabel" = proto.Field( + proto.MESSAGE, + number=3, + oneof="label", + message="StorageMetadataLabel", + ) + + +class StorageMetadataLabel(proto.Message): + r"""Storage metadata label to indicate which metadata entry + contains findings. + + Attributes: + key (str): + + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DocumentLocation(proto.Message): + r"""Location of a finding within a document. + + Attributes: + file_offset (int): + Offset of the line, from the beginning of the + file, where the finding is located. + """ + + file_offset: int = proto.Field( + proto.INT64, + number=1, + ) + + +class RecordLocation(proto.Message): + r"""Location of a finding within a row or record. + + Attributes: + record_key (google.cloud.dlp_v2.types.RecordKey): + Key of the finding. + field_id (google.cloud.dlp_v2.types.FieldId): + Field id of the field containing the finding. + table_location (google.cloud.dlp_v2.types.TableLocation): + Location within a ``ContentItem.Table``. + """ + + record_key: storage.RecordKey = proto.Field( + proto.MESSAGE, + number=1, + message=storage.RecordKey, + ) + field_id: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + table_location: "TableLocation" = proto.Field( + proto.MESSAGE, + number=3, + message="TableLocation", + ) + + +class TableLocation(proto.Message): + r"""Location of a finding within a table. + + Attributes: + row_index (int): + The zero-based index of the row where the finding is + located. Only populated for resources that have a natural + ordering, not BigQuery. In BigQuery, to identify the row a + finding came from, populate + BigQueryOptions.identifying_fields with your primary key + column names and when you store the findings the value of + those columns will be stored inside of Finding. + """ + + row_index: int = proto.Field( + proto.INT64, + number=1, + ) + + +class Container(proto.Message): + r"""Represents a container that may contain DLP findings. + Examples of a container include a file, table, or database + record. + + Attributes: + type_ (str): + Container type, for example BigQuery or Cloud + Storage. + project_id (str): + Project where the finding was found. + Can be different from the project that owns the + finding. + full_path (str): + A string representation of the full container + name. Examples: + + - BigQuery: 'Project:DataSetId.TableId' + - Cloud Storage: + 'gs://Bucket/folders/filename.txt' + root_path (str): + The root of the container. Examples: + + - For BigQuery table ``project_id:dataset_id.table_id``, + the root is ``dataset_id`` + - For Cloud Storage file + ``gs://bucket/folder/filename.txt``, the root is + ``gs://bucket`` + relative_path (str): + The rest of the path after the root. Examples: + + - For BigQuery table ``project_id:dataset_id.table_id``, + the relative path is ``table_id`` + - For Cloud Storage file + ``gs://bucket/folder/filename.txt``, the relative path is + ``folder/filename.txt`` + update_time (google.protobuf.timestamp_pb2.Timestamp): + Findings container modification timestamp, if applicable. + For Cloud Storage, this field contains the last file + modification timestamp. For a BigQuery table, this field + contains the last_modified_time property. For Datastore, + this field isn't populated. + version (str): + Findings container version, if available + ("generation" for Cloud Storage). + """ + + type_: str = proto.Field( + proto.STRING, + number=1, + ) + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + full_path: str = proto.Field( + proto.STRING, + number=3, + ) + root_path: str = proto.Field( + proto.STRING, + number=4, + ) + relative_path: str = proto.Field( + proto.STRING, + number=5, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class Range(proto.Message): + r"""Generic half-open interval [start, end) + + Attributes: + start (int): + Index of the first character of the range + (inclusive). + end (int): + Index of the last character of the range + (exclusive). + """ + + start: int = proto.Field( + proto.INT64, + number=1, + ) + end: int = proto.Field( + proto.INT64, + number=2, + ) + + +class ImageLocation(proto.Message): + r"""Location of the finding within an image. + + Attributes: + bounding_boxes (MutableSequence[google.cloud.dlp_v2.types.BoundingBox]): + Bounding boxes locating the pixels within the + image containing the finding. + """ + + bounding_boxes: MutableSequence["BoundingBox"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BoundingBox", + ) + + +class BoundingBox(proto.Message): + r"""Bounding box encompassing detected text within an image. + + Attributes: + top (int): + Top coordinate of the bounding box. (0,0) is + upper left. + left (int): + Left coordinate of the bounding box. (0,0) is + upper left. + width (int): + Width of the bounding box in pixels. + height (int): + Height of the bounding box in pixels. + """ + + top: int = proto.Field( + proto.INT32, + number=1, + ) + left: int = proto.Field( + proto.INT32, + number=2, + ) + width: int = proto.Field( + proto.INT32, + number=3, + ) + height: int = proto.Field( + proto.INT32, + number=4, + ) + + +class RedactImageRequest(proto.Message): + r"""Request to search for potentially sensitive info in an image + and redact it by covering it with a colored rectangle. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + location_id (str): + Deprecated. This field has no effect. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. + image_redaction_configs (MutableSequence[google.cloud.dlp_v2.types.RedactImageRequest.ImageRedactionConfig]): + The configuration for specifying what content + to redact from images. + include_findings (bool): + Whether the response should include findings + along with the redacted image. + byte_item (google.cloud.dlp_v2.types.ByteContentItem): + The content must be PNG, JPEG, SVG or BMP. + """ + + class ImageRedactionConfig(proto.Message): + r"""Configuration for determining how redaction of images should + occur. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Only one per info_type should be provided per request. If + not specified, and redact_all_text is false, the DLP API + will redact all text that it matches against all info_types + that are found, but not specified in another + ImageRedactionConfig. + + This field is a member of `oneof`_ ``target``. + redact_all_text (bool): + If true, all text found in the image, regardless whether it + matches an info_type, is redacted. Only one should be + provided. + + This field is a member of `oneof`_ ``target``. + redaction_color (google.cloud.dlp_v2.types.Color): + The color to use when redacting content from + an image. If not specified, the default is + black. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + oneof="target", + message=storage.InfoType, + ) + redact_all_text: bool = proto.Field( + proto.BOOL, + number=2, + oneof="target", + ) + redaction_color: "Color" = proto.Field( + proto.MESSAGE, + number=3, + message="Color", + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + location_id: str = proto.Field( + proto.STRING, + number=8, + ) + inspect_config: "InspectConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="InspectConfig", + ) + image_redaction_configs: MutableSequence[ + ImageRedactionConfig + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=ImageRedactionConfig, + ) + include_findings: bool = proto.Field( + proto.BOOL, + number=6, + ) + byte_item: "ByteContentItem" = proto.Field( + proto.MESSAGE, + number=7, + message="ByteContentItem", + ) + + +class Color(proto.Message): + r"""Represents a color in the RGB color space. + + Attributes: + red (float): + The amount of red in the color as a value in the interval + [0, 1]. + green (float): + The amount of green in the color as a value in the interval + [0, 1]. + blue (float): + The amount of blue in the color as a value in the interval + [0, 1]. + """ + + red: float = proto.Field( + proto.FLOAT, + number=1, + ) + green: float = proto.Field( + proto.FLOAT, + number=2, + ) + blue: float = proto.Field( + proto.FLOAT, + number=3, + ) + + +class RedactImageResponse(proto.Message): + r"""Results of redacting an image. + + Attributes: + redacted_image (bytes): + The redacted image. The type will be the same + as the original image. + extracted_text (str): + If an image was being inspected and the InspectConfig's + include_quote was set to true, then this field will include + all text, if any, that was found in the image. + inspect_result (google.cloud.dlp_v2.types.InspectResult): + The findings. Populated when include_findings in the request + is true. + """ + + redacted_image: bytes = proto.Field( + proto.BYTES, + number=1, + ) + extracted_text: str = proto.Field( + proto.STRING, + number=2, + ) + inspect_result: "InspectResult" = proto.Field( + proto.MESSAGE, + number=3, + message="InspectResult", + ) + + +class DeidentifyContentRequest(proto.Message): + r"""Request to de-identify a ContentItem. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + Configuration for the de-identification of the content item. + Items specified here will override the template referenced + by the deidentify_template_name argument. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. Items specified here will + override the template referenced by the + inspect_template_name argument. + item (google.cloud.dlp_v2.types.ContentItem): + The item to de-identify. Will be treated as text. + + This value must be of type + [Table][google.privacy.dlp.v2.Table] if your + [deidentify_config][google.privacy.dlp.v2.DeidentifyContentRequest.deidentify_config] + is a + [RecordTransformations][google.privacy.dlp.v2.RecordTransformations] + object. + inspect_template_name (str): + Template to use. Any configuration directly specified in + inspect_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + deidentify_template_name (str): + Template to use. Any configuration directly specified in + deidentify_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_config: "DeidentifyConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="DeidentifyConfig", + ) + inspect_config: "InspectConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="InspectConfig", + ) + item: "ContentItem" = proto.Field( + proto.MESSAGE, + number=4, + message="ContentItem", + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=5, + ) + deidentify_template_name: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class DeidentifyContentResponse(proto.Message): + r"""Results of de-identifying a ContentItem. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The de-identified item. + overview (google.cloud.dlp_v2.types.TransformationOverview): + An overview of the changes that were made on the ``item``. + """ + + item: "ContentItem" = proto.Field( + proto.MESSAGE, + number=1, + message="ContentItem", + ) + overview: "TransformationOverview" = proto.Field( + proto.MESSAGE, + number=2, + message="TransformationOverview", + ) + + +class ReidentifyContentRequest(proto.Message): + r"""Request to re-identify an item. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + reidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + Configuration for the re-identification of the content item. + This field shares the same proto message type that is used + for de-identification, however its usage here is for the + reversal of the previous de-identification. + Re-identification is performed by examining the + transformations used to de-identify the items and executing + the reverse. This requires that only reversible + transformations be provided here. The reversible + transformations are: + + - ``CryptoDeterministicConfig`` + - ``CryptoReplaceFfxFpeConfig`` + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. + item (google.cloud.dlp_v2.types.ContentItem): + The item to re-identify. Will be treated as + text. + inspect_template_name (str): + Template to use. Any configuration directly specified in + ``inspect_config`` will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + reidentify_template_name (str): + Template to use. References an instance of + ``DeidentifyTemplate``. Any configuration directly specified + in ``reidentify_config`` or ``inspect_config`` will override + those set in the template. The ``DeidentifyTemplate`` used + must include only reversible transformations. Singular + fields that are set in this request will replace their + corresponding fields in the template. Repeated fields are + appended. Singular sub-messages and groups are recursively + merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + reidentify_config: "DeidentifyConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="DeidentifyConfig", + ) + inspect_config: "InspectConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="InspectConfig", + ) + item: "ContentItem" = proto.Field( + proto.MESSAGE, + number=4, + message="ContentItem", + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=5, + ) + reidentify_template_name: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ReidentifyContentResponse(proto.Message): + r"""Results of re-identifying an item. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The re-identified item. + overview (google.cloud.dlp_v2.types.TransformationOverview): + An overview of the changes that were made to the ``item``. + """ + + item: "ContentItem" = proto.Field( + proto.MESSAGE, + number=1, + message="ContentItem", + ) + overview: "TransformationOverview" = proto.Field( + proto.MESSAGE, + number=2, + message="TransformationOverview", + ) + + +class InspectContentRequest(proto.Message): + r"""Request to search for potentially sensitive info in a + ContentItem. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. What specified here will + override the template referenced by the + inspect_template_name argument. + item (google.cloud.dlp_v2.types.ContentItem): + The item to inspect. + inspect_template_name (str): + Template to use. Any configuration directly specified in + inspect_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_config: "InspectConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="InspectConfig", + ) + item: "ContentItem" = proto.Field( + proto.MESSAGE, + number=3, + message="ContentItem", + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class InspectContentResponse(proto.Message): + r"""Results of inspecting an item. + + Attributes: + result (google.cloud.dlp_v2.types.InspectResult): + The findings. + """ + + result: "InspectResult" = proto.Field( + proto.MESSAGE, + number=1, + message="InspectResult", + ) + + +class OutputStorageConfig(proto.Message): + r"""Cloud repository for storing output. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Store findings in an existing table or a new table in an + existing dataset. If table_id is not set a new one will be + generated for you with the following format: + dlp_googleapis_yyyy_mm_dd_[dlp_job_id]. Pacific time zone + will be used for generating the date details. + + For Inspect, each column in an existing output table must + have the same name, type, and mode of a field in the + ``Finding`` object. + + For Risk, an existing output table should be the output of a + previous Risk analysis job run on the same source table, + with the same privacy metric and quasi-identifiers. Risk + jobs that analyze the same table but compute a different + privacy metric, or use different sets of quasi-identifiers, + cannot store their results in the same table. + + This field is a member of `oneof`_ ``type``. + output_schema (google.cloud.dlp_v2.types.OutputStorageConfig.OutputSchema): + Schema used for writing the findings for Inspect jobs. This + field is only used for Inspect and must be unspecified for + Risk jobs. Columns are derived from the ``Finding`` object. + If appending to an existing table, any columns from the + predefined schema that are missing will be added. No columns + in the existing table will be deleted. + + If unspecified, then all available columns will be used for + a new table or an (existing) table with no schema, and no + changes will be made to an existing table that has a schema. + Only for use with external storage. + """ + + class OutputSchema(proto.Enum): + r"""Predefined schemas for storing findings. + Only for use with external storage. + + Values: + OUTPUT_SCHEMA_UNSPECIFIED (0): + Unused. + BASIC_COLUMNS (1): + Basic schema including only ``info_type``, ``quote``, + ``certainty``, and ``timestamp``. + GCS_COLUMNS (2): + Schema tailored to findings from scanning + Cloud Storage. + DATASTORE_COLUMNS (3): + Schema tailored to findings from scanning + Google Datastore. + BIG_QUERY_COLUMNS (4): + Schema tailored to findings from scanning + Google BigQuery. + ALL_COLUMNS (5): + Schema containing all columns. + """ + OUTPUT_SCHEMA_UNSPECIFIED = 0 + BASIC_COLUMNS = 1 + GCS_COLUMNS = 2 + DATASTORE_COLUMNS = 3 + BIG_QUERY_COLUMNS = 4 + ALL_COLUMNS = 5 + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message=storage.BigQueryTable, + ) + output_schema: OutputSchema = proto.Field( + proto.ENUM, + number=3, + enum=OutputSchema, + ) + + +class InfoTypeStats(proto.Message): + r"""Statistics regarding a specific InfoType. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The type of finding this stat is for. + count (int): + Number of findings for this infoType. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + count: int = proto.Field( + proto.INT64, + number=2, + ) + + +class InspectDataSourceDetails(proto.Message): + r"""The results of an inspect DataSource job. + + Attributes: + requested_options (google.cloud.dlp_v2.types.InspectDataSourceDetails.RequestedOptions): + The configuration used for this job. + result (google.cloud.dlp_v2.types.InspectDataSourceDetails.Result): + A summary of the outcome of this inspection + job. + """ + + class RequestedOptions(proto.Message): + r"""Snapshot of the inspection configuration. + + Attributes: + snapshot_inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + If run with an InspectTemplate, a snapshot of + its state at the time of this run. + job_config (google.cloud.dlp_v2.types.InspectJobConfig): + Inspect config. + """ + + snapshot_inspect_template: "InspectTemplate" = proto.Field( + proto.MESSAGE, + number=1, + message="InspectTemplate", + ) + job_config: "InspectJobConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="InspectJobConfig", + ) + + class Result(proto.Message): + r"""All result fields mentioned below are updated while the job + is processing. + + Attributes: + processed_bytes (int): + Total size in bytes that were processed. + total_estimated_bytes (int): + Estimate of the number of bytes to process. + info_type_stats (MutableSequence[google.cloud.dlp_v2.types.InfoTypeStats]): + Statistics of how many instances of each info + type were found during inspect job. + hybrid_stats (google.cloud.dlp_v2.types.HybridInspectStatistics): + Statistics related to the processing of + hybrid inspect. + """ + + processed_bytes: int = proto.Field( + proto.INT64, + number=1, + ) + total_estimated_bytes: int = proto.Field( + proto.INT64, + number=2, + ) + info_type_stats: MutableSequence["InfoTypeStats"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="InfoTypeStats", + ) + hybrid_stats: "HybridInspectStatistics" = proto.Field( + proto.MESSAGE, + number=7, + message="HybridInspectStatistics", + ) + + requested_options: RequestedOptions = proto.Field( + proto.MESSAGE, + number=2, + message=RequestedOptions, + ) + result: Result = proto.Field( + proto.MESSAGE, + number=3, + message=Result, + ) + + +class HybridInspectStatistics(proto.Message): + r"""Statistics related to processing hybrid inspect requests. + + Attributes: + processed_count (int): + The number of hybrid inspection requests + processed within this job. + aborted_count (int): + The number of hybrid inspection requests + aborted because the job ran out of quota or was + ended before they could be processed. + pending_count (int): + The number of hybrid requests currently being processed. + Only populated when called via method ``getDlpJob``. A burst + of traffic may cause hybrid inspect requests to be enqueued. + Processing will take place as quickly as possible, but + resource limitations may impact how long a request is + enqueued for. + """ + + processed_count: int = proto.Field( + proto.INT64, + number=1, + ) + aborted_count: int = proto.Field( + proto.INT64, + number=2, + ) + pending_count: int = proto.Field( + proto.INT64, + number=3, + ) + + +class InfoTypeDescription(proto.Message): + r"""InfoType description. + + Attributes: + name (str): + Internal name of the infoType. + display_name (str): + Human readable form of the infoType name. + supported_by (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSupportedBy]): + Which parts of the API supports this + InfoType. + description (str): + Description of the infotype. Translated when + language is provided in the request. + versions (MutableSequence[google.cloud.dlp_v2.types.VersionDescription]): + A list of available versions for the + infotype. + categories (MutableSequence[google.cloud.dlp_v2.types.InfoTypeCategory]): + The category of the infoType. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + supported_by: MutableSequence["InfoTypeSupportedBy"] = proto.RepeatedField( + proto.ENUM, + number=3, + enum="InfoTypeSupportedBy", + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + versions: MutableSequence["VersionDescription"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="VersionDescription", + ) + categories: MutableSequence["InfoTypeCategory"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="InfoTypeCategory", + ) + + +class InfoTypeCategory(proto.Message): + r"""Classification of infoTypes to organize them according to + geographic location, industry, and data type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + location_category (google.cloud.dlp_v2.types.InfoTypeCategory.LocationCategory): + The region or country that issued the ID or + document represented by the infoType. + + This field is a member of `oneof`_ ``category``. + industry_category (google.cloud.dlp_v2.types.InfoTypeCategory.IndustryCategory): + The group of relevant businesses where this + infoType is commonly used + + This field is a member of `oneof`_ ``category``. + type_category (google.cloud.dlp_v2.types.InfoTypeCategory.TypeCategory): + The class of identifiers where this infoType + belongs + + This field is a member of `oneof`_ ``category``. + """ + + class LocationCategory(proto.Enum): + r"""Enum of the current locations. + We might add more locations in the future. + + Values: + LOCATION_UNSPECIFIED (0): + Unused location + GLOBAL (1): + The infoType is not issued by or tied to a + specific region, but is used almost everywhere. + ARGENTINA (2): + The infoType is typically used in Argentina. + AUSTRALIA (3): + The infoType is typically used in Australia. + BELGIUM (4): + The infoType is typically used in Belgium. + BRAZIL (5): + The infoType is typically used in Brazil. + CANADA (6): + The infoType is typically used in Canada. + CHILE (7): + The infoType is typically used in Chile. + CHINA (8): + The infoType is typically used in China. + COLOMBIA (9): + The infoType is typically used in Colombia. + DENMARK (10): + The infoType is typically used in Denmark. + FRANCE (11): + The infoType is typically used in France. + FINLAND (12): + The infoType is typically used in Finland. + GERMANY (13): + The infoType is typically used in Germany. + HONG_KONG (14): + The infoType is typically used in Hong Kong. + INDIA (15): + The infoType is typically used in India. + INDONESIA (16): + The infoType is typically used in Indonesia. + IRELAND (17): + The infoType is typically used in Ireland. + ISRAEL (18): + The infoType is typically used in Israel. + ITALY (19): + The infoType is typically used in Italy. + JAPAN (20): + The infoType is typically used in Japan. + KOREA (21): + The infoType is typically used in Korea. + MEXICO (22): + The infoType is typically used in Mexico. + THE_NETHERLANDS (23): + The infoType is typically used in the + Netherlands. + NORWAY (24): + The infoType is typically used in Norway. + PARAGUAY (25): + The infoType is typically used in Paraguay. + PERU (26): + The infoType is typically used in Peru. + POLAND (27): + The infoType is typically used in Poland. + PORTUGAL (28): + The infoType is typically used in Portugal. + SINGAPORE (29): + The infoType is typically used in Singapore. + SOUTH_AFRICA (30): + The infoType is typically used in South + Africa. + SPAIN (31): + The infoType is typically used in Spain. + SWEDEN (32): + The infoType is typically used in Sweden. + TAIWAN (33): + The infoType is typically used in Taiwan. + THAILAND (34): + The infoType is typically used in Thailand. + TURKEY (35): + The infoType is typically used in Turkey. + UNITED_KINGDOM (36): + The infoType is typically used in the United + Kingdom. + UNITED_STATES (37): + The infoType is typically used in the United + States. + URUGUAY (38): + The infoType is typically used in Uruguay. + VENEZUELA (39): + The infoType is typically used in Venezuela. + INTERNAL (40): + The infoType is typically used in Google + internally. + NEW_ZEALAND (41): + The infoType is typically used in New + Zealand. + """ + LOCATION_UNSPECIFIED = 0 + GLOBAL = 1 + ARGENTINA = 2 + AUSTRALIA = 3 + BELGIUM = 4 + BRAZIL = 5 + CANADA = 6 + CHILE = 7 + CHINA = 8 + COLOMBIA = 9 + DENMARK = 10 + FRANCE = 11 + FINLAND = 12 + GERMANY = 13 + HONG_KONG = 14 + INDIA = 15 + INDONESIA = 16 + IRELAND = 17 + ISRAEL = 18 + ITALY = 19 + JAPAN = 20 + KOREA = 21 + MEXICO = 22 + THE_NETHERLANDS = 23 + NORWAY = 24 + PARAGUAY = 25 + PERU = 26 + POLAND = 27 + PORTUGAL = 28 + SINGAPORE = 29 + SOUTH_AFRICA = 30 + SPAIN = 31 + SWEDEN = 32 + TAIWAN = 33 + THAILAND = 34 + TURKEY = 35 + UNITED_KINGDOM = 36 + UNITED_STATES = 37 + URUGUAY = 38 + VENEZUELA = 39 + INTERNAL = 40 + NEW_ZEALAND = 41 + + class IndustryCategory(proto.Enum): + r"""Enum of the current industries in the category. + We might add more industries in the future. + + Values: + INDUSTRY_UNSPECIFIED (0): + Unused industry + FINANCE (1): + The infoType is typically used in the finance + industry. + HEALTH (2): + The infoType is typically used in the health + industry. + TELECOMMUNICATIONS (3): + The infoType is typically used in the + telecommunications industry. + """ + INDUSTRY_UNSPECIFIED = 0 + FINANCE = 1 + HEALTH = 2 + TELECOMMUNICATIONS = 3 + + class TypeCategory(proto.Enum): + r"""Enum of the current types in the category. + We might add more types in the future. + + Values: + TYPE_UNSPECIFIED (0): + Unused type + PII (1): + Personally identifiable information, for + example, a name or phone number + SPII (2): + Personally identifiable information that is + especially sensitive, for example, a passport + number. + DEMOGRAPHIC (3): + Attributes that can partially identify + someone, especially in combination with other + attributes, like age, height, and gender. + CREDENTIAL (4): + Confidential or secret information, for + example, a password. + GOVERNMENT_ID (5): + An identification document issued by a + government. + DOCUMENT (6): + A document, for example, a resume or source + code. + CONTEXTUAL_INFORMATION (7): + Information that is not sensitive on its own, + but provides details about the circumstances + surrounding an entity or an event. + """ + TYPE_UNSPECIFIED = 0 + PII = 1 + SPII = 2 + DEMOGRAPHIC = 3 + CREDENTIAL = 4 + GOVERNMENT_ID = 5 + DOCUMENT = 6 + CONTEXTUAL_INFORMATION = 7 + + location_category: LocationCategory = proto.Field( + proto.ENUM, + number=1, + oneof="category", + enum=LocationCategory, + ) + industry_category: IndustryCategory = proto.Field( + proto.ENUM, + number=2, + oneof="category", + enum=IndustryCategory, + ) + type_category: TypeCategory = proto.Field( + proto.ENUM, + number=3, + oneof="category", + enum=TypeCategory, + ) + + +class VersionDescription(proto.Message): + r"""Details about each available version for an infotype. + + Attributes: + version (str): + Name of the version + description (str): + Description of the version. + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListInfoTypesRequest(proto.Message): + r"""Request for the list of infoTypes. + + Attributes: + parent (str): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + language_code (str): + BCP-47 language code for localized infoType + friendly names. If omitted, or if localized + strings are not available, en-US strings will be + returned. + filter (str): + filter to only return infoTypes supported by certain parts + of the API. Defaults to supported_by=INSPECT. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + language_code: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + location_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListInfoTypesResponse(proto.Message): + r"""Response to the ListInfoTypes request. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeDescription]): + Set of sensitive infoTypes. + """ + + info_types: MutableSequence["InfoTypeDescription"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="InfoTypeDescription", + ) + + +class RiskAnalysisJobConfig(proto.Message): + r"""Configuration for a risk analysis job. See + https://cloud.google.com/dlp/docs/concepts-risk-analysis to + learn more. + + Attributes: + privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): + Privacy metric to compute. + source_table (google.cloud.dlp_v2.types.BigQueryTable): + Input dataset to compute metrics over. + actions (MutableSequence[google.cloud.dlp_v2.types.Action]): + Actions to execute at the completion of the + job. Are executed in the order provided. + """ + + privacy_metric: "PrivacyMetric" = proto.Field( + proto.MESSAGE, + number=1, + message="PrivacyMetric", + ) + source_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=2, + message=storage.BigQueryTable, + ) + actions: MutableSequence["Action"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Action", + ) + + +class QuasiId(proto.Message): + r"""A column with a semantic tag attached. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Identifies the column. + info_type (google.cloud.dlp_v2.types.InfoType): + A column can be tagged with a InfoType to use the relevant + public dataset as a statistical model of population, if + available. We currently support US ZIP codes, region codes, + ages and genders. To programmatically obtain the list of + supported InfoTypes, use ListInfoTypes with the + supported_by=RISK_ANALYSIS filter. + + This field is a member of `oneof`_ ``tag``. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + + This field is a member of `oneof`_ ``tag``. + inferred (google.protobuf.empty_pb2.Empty): + If no semantic tag is indicated, we infer the + statistical model from the distribution of + values in the input data + + This field is a member of `oneof`_ ``tag``. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + oneof="tag", + message=storage.InfoType, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=3, + oneof="tag", + ) + inferred: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=4, + oneof="tag", + message=empty_pb2.Empty, + ) + + +class StatisticalTable(proto.Message): + r"""An auxiliary table containing statistical information on the + relative frequency of different quasi-identifiers values. It has + one or several quasi-identifiers columns, and one column that + indicates the relative frequency of each quasi-identifier tuple. + If a tuple is present in the data but not in the auxiliary + table, the corresponding relative frequency is assumed to be + zero (and thus, the tuple is highly reidentifiable). + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Required. Auxiliary table location. + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable.QuasiIdentifierField]): + Required. Quasi-identifier columns. + relative_frequency (google.cloud.dlp_v2.types.FieldId): + Required. The relative frequency column must + contain a floating-point number between 0 and 1 + (inclusive). Null values are assumed to be zero. + """ + + class QuasiIdentifierField(proto.Message): + r"""A quasi-identifier column has a custom_tag, used to know which + column in the data corresponds to which column in the statistical + model. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Identifies the column. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=2, + ) + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=3, + message=storage.BigQueryTable, + ) + quasi_ids: MutableSequence[QuasiIdentifierField] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=QuasiIdentifierField, + ) + relative_frequency: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + +class PrivacyMetric(proto.Message): + r"""Privacy metric to compute for reidentification risk analysis. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + numerical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.NumericalStatsConfig): + Numerical stats + + This field is a member of `oneof`_ ``type``. + categorical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.CategoricalStatsConfig): + Categorical stats + + This field is a member of `oneof`_ ``type``. + k_anonymity_config (google.cloud.dlp_v2.types.PrivacyMetric.KAnonymityConfig): + K-anonymity + + This field is a member of `oneof`_ ``type``. + l_diversity_config (google.cloud.dlp_v2.types.PrivacyMetric.LDiversityConfig): + l-diversity + + This field is a member of `oneof`_ ``type``. + k_map_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig): + k-map + + This field is a member of `oneof`_ ``type``. + delta_presence_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.DeltaPresenceEstimationConfig): + delta-presence + + This field is a member of `oneof`_ ``type``. + """ + + class NumericalStatsConfig(proto.Message): + r"""Compute numerical stats over an individual column, including + min, max, and quantiles. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Field to compute numerical stats on. + Supported types are integer, float, date, + datetime, timestamp, time. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + + class CategoricalStatsConfig(proto.Message): + r"""Compute numerical stats over an individual column, including + number of distinct values and value count distribution. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Field to compute categorical stats on. All + column types are supported except for arrays and + structs. However, it may be more informative to + use NumericalStats when the field type is + supported, depending on the data. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + + class KAnonymityConfig(proto.Message): + r"""k-anonymity metric, used for analysis of reidentification + risk. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Set of fields to compute k-anonymity over. + When multiple fields are specified, they are + considered a single composite key. Structs and + repeated data types are not supported; however, + nested fields are supported so long as they are + not structs themselves or nested within a + repeated field. + entity_id (google.cloud.dlp_v2.types.EntityId): + Message indicating that multiple rows might be associated to + a single individual. If the same entity_id is associated to + multiple quasi-identifier tuples over distinct rows, we + consider the entire collection of tuples as the composite + quasi-identifier. This collection is a multiset: the order + in which the different tuples appear in the dataset is + ignored, but their frequency is taken into account. + + Important note: a maximum of 1000 rows can be associated to + a single entity ID. If more rows are associated with the + same entity ID, some might be ignored. + """ + + quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + entity_id: storage.EntityId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.EntityId, + ) + + class LDiversityConfig(proto.Message): + r"""l-diversity metric, used for analysis of reidentification + risk. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Set of quasi-identifiers indicating how + equivalence classes are defined for the + l-diversity computation. When multiple fields + are specified, they are considered a single + composite key. + sensitive_attribute (google.cloud.dlp_v2.types.FieldId): + Sensitive field for computing the l-value. + """ + + quasi_ids: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + sensitive_attribute: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + class KMapEstimationConfig(proto.Message): + r"""Reidentifiability metric. This corresponds to a risk model + similar to what is called "journalist risk" in the literature, + except the attack dataset is statistically modeled instead of + being perfectly known. This can be done using publicly available + data (like the US Census), or using a custom statistical model + (indicated as one or several BigQuery tables), or by + extrapolating from the distribution of values in the input + dataset. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.TaggedField]): + Required. Fields considered to be + quasi-identifiers. No two columns can have the + same tag. + region_code (str): + ISO 3166-1 alpha-2 region code to use in the statistical + modeling. Set if no column is tagged with a region-specific + InfoType (like US_ZIP_5) or a region code. + auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable]): + Several auxiliary tables can be used in the analysis. Each + custom_tag used to tag a quasi-identifiers column must + appear in exactly one column of one auxiliary table. + """ + + class TaggedField(proto.Message): + r"""A column with a semantic tag attached. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Identifies the column. + info_type (google.cloud.dlp_v2.types.InfoType): + A column can be tagged with a InfoType to use the relevant + public dataset as a statistical model of population, if + available. We currently support US ZIP codes, region codes, + ages and genders. To programmatically obtain the list of + supported InfoTypes, use ListInfoTypes with the + supported_by=RISK_ANALYSIS filter. + + This field is a member of `oneof`_ ``tag``. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + + This field is a member of `oneof`_ ``tag``. + inferred (google.protobuf.empty_pb2.Empty): + If no semantic tag is indicated, we infer the + statistical model from the distribution of + values in the input data + + This field is a member of `oneof`_ ``tag``. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + oneof="tag", + message=storage.InfoType, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=3, + oneof="tag", + ) + inferred: empty_pb2.Empty = proto.Field( + proto.MESSAGE, + number=4, + oneof="tag", + message=empty_pb2.Empty, + ) + + class AuxiliaryTable(proto.Message): + r"""An auxiliary table contains statistical information on the + relative frequency of different quasi-identifiers values. It has + one or several quasi-identifiers columns, and one column that + indicates the relative frequency of each quasi-identifier tuple. + If a tuple is present in the data but not in the auxiliary + table, the corresponding relative frequency is assumed to be + zero (and thus, the tuple is highly reidentifiable). + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Required. Auxiliary table location. + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField]): + Required. Quasi-identifier columns. + relative_frequency (google.cloud.dlp_v2.types.FieldId): + Required. The relative frequency column must + contain a floating-point number between 0 and 1 + (inclusive). Null values are assumed to be zero. + """ + + class QuasiIdField(proto.Message): + r"""A quasi-identifier column has a custom_tag, used to know which + column in the data corresponds to which column in the statistical + model. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Identifies the column. + custom_tag (str): + A auxiliary field. + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + custom_tag: str = proto.Field( + proto.STRING, + number=2, + ) + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=3, + message=storage.BigQueryTable, + ) + quasi_ids: MutableSequence[ + "PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField", + ) + relative_frequency: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + quasi_ids: MutableSequence[ + "PrivacyMetric.KMapEstimationConfig.TaggedField" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PrivacyMetric.KMapEstimationConfig.TaggedField", + ) + region_code: str = proto.Field( + proto.STRING, + number=2, + ) + auxiliary_tables: MutableSequence[ + "PrivacyMetric.KMapEstimationConfig.AuxiliaryTable" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="PrivacyMetric.KMapEstimationConfig.AuxiliaryTable", + ) + + class DeltaPresenceEstimationConfig(proto.Message): + r"""δ-presence metric, used to estimate how likely it is for an + attacker to figure out that one given individual appears in a + de-identified dataset. Similarly to the k-map metric, we cannot + compute δ-presence exactly without knowing the attack dataset, + so we use a statistical model instead. + + Attributes: + quasi_ids (MutableSequence[google.cloud.dlp_v2.types.QuasiId]): + Required. Fields considered to be + quasi-identifiers. No two fields can have the + same tag. + region_code (str): + ISO 3166-1 alpha-2 region code to use in the statistical + modeling. Set if no column is tagged with a region-specific + InfoType (like US_ZIP_5) or a region code. + auxiliary_tables (MutableSequence[google.cloud.dlp_v2.types.StatisticalTable]): + Several auxiliary tables can be used in the analysis. Each + custom_tag used to tag a quasi-identifiers field must appear + in exactly one field of one auxiliary table. + """ + + quasi_ids: MutableSequence["QuasiId"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="QuasiId", + ) + region_code: str = proto.Field( + proto.STRING, + number=2, + ) + auxiliary_tables: MutableSequence["StatisticalTable"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="StatisticalTable", + ) + + numerical_stats_config: NumericalStatsConfig = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message=NumericalStatsConfig, + ) + categorical_stats_config: CategoricalStatsConfig = proto.Field( + proto.MESSAGE, + number=2, + oneof="type", + message=CategoricalStatsConfig, + ) + k_anonymity_config: KAnonymityConfig = proto.Field( + proto.MESSAGE, + number=3, + oneof="type", + message=KAnonymityConfig, + ) + l_diversity_config: LDiversityConfig = proto.Field( + proto.MESSAGE, + number=4, + oneof="type", + message=LDiversityConfig, + ) + k_map_estimation_config: KMapEstimationConfig = proto.Field( + proto.MESSAGE, + number=5, + oneof="type", + message=KMapEstimationConfig, + ) + delta_presence_estimation_config: DeltaPresenceEstimationConfig = proto.Field( + proto.MESSAGE, + number=6, + oneof="type", + message=DeltaPresenceEstimationConfig, + ) + + +class AnalyzeDataSourceRiskDetails(proto.Message): + r"""Result of a risk analysis operation request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + requested_privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): + Privacy metric to compute. + requested_source_table (google.cloud.dlp_v2.types.BigQueryTable): + Input dataset to compute metrics over. + numerical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.NumericalStatsResult): + Numerical stats result + + This field is a member of `oneof`_ ``result``. + categorical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult): + Categorical stats result + + This field is a member of `oneof`_ ``result``. + k_anonymity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult): + K-anonymity result + + This field is a member of `oneof`_ ``result``. + l_diversity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult): + L-divesity result + + This field is a member of `oneof`_ ``result``. + k_map_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult): + K-map result + + This field is a member of `oneof`_ ``result``. + delta_presence_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult): + Delta-presence result + + This field is a member of `oneof`_ ``result``. + requested_options (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.RequestedRiskAnalysisOptions): + The configuration used for this job. + """ + + class NumericalStatsResult(proto.Message): + r"""Result of the numerical stats computation. + + Attributes: + min_value (google.cloud.dlp_v2.types.Value): + Minimum value appearing in the column. + max_value (google.cloud.dlp_v2.types.Value): + Maximum value appearing in the column. + quantile_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + List of 99 values that partition the set of + field values into 100 equal sized buckets. + """ + + min_value: "Value" = proto.Field( + proto.MESSAGE, + number=1, + message="Value", + ) + max_value: "Value" = proto.Field( + proto.MESSAGE, + number=2, + message="Value", + ) + quantile_values: MutableSequence["Value"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Value", + ) + + class CategoricalStatsResult(proto.Message): + r"""Result of the categorical stats computation. + + Attributes: + value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket]): + Histogram of value frequencies in the column. + """ + + class CategoricalStatsHistogramBucket(proto.Message): + r"""Histogram of value frequencies in the column. + + Attributes: + value_frequency_lower_bound (int): + Lower bound on the value frequency of the + values in this bucket. + value_frequency_upper_bound (int): + Upper bound on the value frequency of the + values in this bucket. + bucket_size (int): + Total number of values in this bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): + Sample of value frequencies in this bucket. + The total number of values returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct values in this + bucket. + """ + + value_frequency_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + value_frequency_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence["ValueFrequency"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="ValueFrequency", + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + value_frequency_histogram_buckets: MutableSequence[ + "AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket" + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket", + ) + + class KAnonymityResult(proto.Message): + r"""Result of the k-anonymity computation. + + Attributes: + equivalence_class_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket]): + Histogram of k-anonymity equivalence classes. + """ + + class KAnonymityEquivalenceClass(proto.Message): + r"""The set of columns' values that share the same ldiversity + value + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Set of values defining the equivalence class. + One value per quasi-identifier column in the + original KAnonymity metric message. The order is + always the same as the original request. + equivalence_class_size (int): + Size of the equivalence class, for example + number of rows with the above set of values. + """ + + quasi_ids_values: MutableSequence["Value"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Value", + ) + equivalence_class_size: int = proto.Field( + proto.INT64, + number=2, + ) + + class KAnonymityHistogramBucket(proto.Message): + r"""Histogram of k-anonymity equivalence classes. + + Attributes: + equivalence_class_size_lower_bound (int): + Lower bound on the size of the equivalence + classes in this bucket. + equivalence_class_size_upper_bound (int): + Upper bound on the size of the equivalence + classes in this bucket. + bucket_size (int): + Total number of equivalence classes in this + bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass]): + Sample of equivalence classes in this bucket. + The total number of classes returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct equivalence classes + in this bucket. + """ + + equivalence_class_size_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + equivalence_class_size_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence[ + "AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass", + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + equivalence_class_histogram_buckets: MutableSequence[ + "AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket" + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket", + ) + + class LDiversityResult(proto.Message): + r"""Result of the l-diversity computation. + + Attributes: + sensitive_value_frequency_histogram_buckets (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket]): + Histogram of l-diversity equivalence class + sensitive value frequencies. + """ + + class LDiversityEquivalenceClass(proto.Message): + r"""The set of columns' values that share the same ldiversity + value. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + Quasi-identifier values defining the + k-anonymity equivalence class. The order is + always the same as the original request. + equivalence_class_size (int): + Size of the k-anonymity equivalence class. + num_distinct_sensitive_values (int): + Number of distinct sensitive values in this + equivalence class. + top_sensitive_values (MutableSequence[google.cloud.dlp_v2.types.ValueFrequency]): + Estimated frequencies of top sensitive + values. + """ + + quasi_ids_values: MutableSequence["Value"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Value", + ) + equivalence_class_size: int = proto.Field( + proto.INT64, + number=2, + ) + num_distinct_sensitive_values: int = proto.Field( + proto.INT64, + number=3, + ) + top_sensitive_values: MutableSequence[ + "ValueFrequency" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="ValueFrequency", + ) + + class LDiversityHistogramBucket(proto.Message): + r"""Histogram of l-diversity equivalence class sensitive value + frequencies. + + Attributes: + sensitive_value_frequency_lower_bound (int): + Lower bound on the sensitive value + frequencies of the equivalence classes in this + bucket. + sensitive_value_frequency_upper_bound (int): + Upper bound on the sensitive value + frequencies of the equivalence classes in this + bucket. + bucket_size (int): + Total number of equivalence classes in this + bucket. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass]): + Sample of equivalence classes in this bucket. + The total number of classes returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct equivalence classes + in this bucket. + """ + + sensitive_value_frequency_lower_bound: int = proto.Field( + proto.INT64, + number=1, + ) + sensitive_value_frequency_upper_bound: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=3, + ) + bucket_values: MutableSequence[ + "AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass", + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=5, + ) + + sensitive_value_frequency_histogram_buckets: MutableSequence[ + "AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket" + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket", + ) + + class KMapEstimationResult(proto.Message): + r"""Result of the reidentifiability analysis. Note that these + results are an estimation, not exact values. + + Attributes: + k_map_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket]): + The intervals [min_anonymity, max_anonymity] do not overlap. + If a value doesn't correspond to any such interval, the + associated frequency is zero. For example, the following + records: {min_anonymity: 1, max_anonymity: 1, frequency: 17} + {min_anonymity: 2, max_anonymity: 3, frequency: 42} + {min_anonymity: 5, max_anonymity: 10, frequency: 99} mean + that there are no record with an estimated anonymity of 4, + 5, or larger than 10. + """ + + class KMapEstimationQuasiIdValues(proto.Message): + r"""A tuple of values for the quasi-identifier columns. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + The quasi-identifier values. + estimated_anonymity (int): + The estimated anonymity for these + quasi-identifier values. + """ + + quasi_ids_values: MutableSequence["Value"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Value", + ) + estimated_anonymity: int = proto.Field( + proto.INT64, + number=2, + ) + + class KMapEstimationHistogramBucket(proto.Message): + r"""A KMapEstimationHistogramBucket message with the following values: + min_anonymity: 3 max_anonymity: 5 frequency: 42 means that there are + 42 records whose quasi-identifier values correspond to 3, 4 or 5 + people in the overlying population. An important particular case is + when min_anonymity = max_anonymity = 1: the frequency field then + corresponds to the number of uniquely identifiable records. + + Attributes: + min_anonymity (int): + Always positive. + max_anonymity (int): + Always greater than or equal to min_anonymity. + bucket_size (int): + Number of records within these anonymity + bounds. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues]): + Sample of quasi-identifier tuple values in + this bucket. The total number of classes + returned per bucket is capped at 20. + bucket_value_count (int): + Total number of distinct quasi-identifier + tuple values in this bucket. + """ + + min_anonymity: int = proto.Field( + proto.INT64, + number=1, + ) + max_anonymity: int = proto.Field( + proto.INT64, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=5, + ) + bucket_values: MutableSequence[ + "AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues" + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues", + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=7, + ) + + k_map_estimation_histogram: MutableSequence[ + "AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket", + ) + + class DeltaPresenceEstimationResult(proto.Message): + r"""Result of the δ-presence computation. Note that these results + are an estimation, not exact values. + + Attributes: + delta_presence_estimation_histogram (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket]): + The intervals [min_probability, max_probability) do not + overlap. If a value doesn't correspond to any such interval, + the associated frequency is zero. For example, the following + records: {min_probability: 0, max_probability: 0.1, + frequency: 17} {min_probability: 0.2, max_probability: 0.3, + frequency: 42} {min_probability: 0.3, max_probability: 0.4, + frequency: 99} mean that there are no record with an + estimated probability in [0.1, 0.2) nor larger or equal to + 0.4. + """ + + class DeltaPresenceEstimationQuasiIdValues(proto.Message): + r"""A tuple of values for the quasi-identifier columns. + + Attributes: + quasi_ids_values (MutableSequence[google.cloud.dlp_v2.types.Value]): + The quasi-identifier values. + estimated_probability (float): + The estimated probability that a given individual sharing + these quasi-identifier values is in the dataset. This value, + typically called δ, is the ratio between the number of + records in the dataset with these quasi-identifier values, + and the total number of individuals (inside *and* outside + the dataset) with these quasi-identifier values. For + example, if there are 15 individuals in the dataset who + share the same quasi-identifier values, and an estimated 100 + people in the entire population with these values, then δ is + 0.15. + """ + + quasi_ids_values: MutableSequence["Value"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Value", + ) + estimated_probability: float = proto.Field( + proto.DOUBLE, + number=2, + ) + + class DeltaPresenceEstimationHistogramBucket(proto.Message): + r"""A DeltaPresenceEstimationHistogramBucket message with the following + values: min_probability: 0.1 max_probability: 0.2 frequency: 42 + means that there are 42 records for which δ is in [0.1, 0.2). An + important particular case is when min_probability = max_probability + = 1: then, every individual who shares this quasi-identifier + combination is in the dataset. + + Attributes: + min_probability (float): + Between 0 and 1. + max_probability (float): + Always greater than or equal to min_probability. + bucket_size (int): + Number of records within these probability + bounds. + bucket_values (MutableSequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues]): + Sample of quasi-identifier tuple values in + this bucket. The total number of classes + returned per bucket is capped at 20. + bucket_value_count (int): + Total number of distinct quasi-identifier + tuple values in this bucket. + """ + + min_probability: float = proto.Field( + proto.DOUBLE, + number=1, + ) + max_probability: float = proto.Field( + proto.DOUBLE, + number=2, + ) + bucket_size: int = proto.Field( + proto.INT64, + number=5, + ) + bucket_values: MutableSequence[ + "AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues" + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues", + ) + bucket_value_count: int = proto.Field( + proto.INT64, + number=7, + ) + + delta_presence_estimation_histogram: MutableSequence[ + "AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket", + ) + + class RequestedRiskAnalysisOptions(proto.Message): + r"""Risk analysis options. + + Attributes: + job_config (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + The job config for the risk job. + """ + + job_config: "RiskAnalysisJobConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="RiskAnalysisJobConfig", + ) + + requested_privacy_metric: "PrivacyMetric" = proto.Field( + proto.MESSAGE, + number=1, + message="PrivacyMetric", + ) + requested_source_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=2, + message=storage.BigQueryTable, + ) + numerical_stats_result: NumericalStatsResult = proto.Field( + proto.MESSAGE, + number=3, + oneof="result", + message=NumericalStatsResult, + ) + categorical_stats_result: CategoricalStatsResult = proto.Field( + proto.MESSAGE, + number=4, + oneof="result", + message=CategoricalStatsResult, + ) + k_anonymity_result: KAnonymityResult = proto.Field( + proto.MESSAGE, + number=5, + oneof="result", + message=KAnonymityResult, + ) + l_diversity_result: LDiversityResult = proto.Field( + proto.MESSAGE, + number=6, + oneof="result", + message=LDiversityResult, + ) + k_map_estimation_result: KMapEstimationResult = proto.Field( + proto.MESSAGE, + number=7, + oneof="result", + message=KMapEstimationResult, + ) + delta_presence_estimation_result: DeltaPresenceEstimationResult = proto.Field( + proto.MESSAGE, + number=9, + oneof="result", + message=DeltaPresenceEstimationResult, + ) + requested_options: RequestedRiskAnalysisOptions = proto.Field( + proto.MESSAGE, + number=10, + message=RequestedRiskAnalysisOptions, + ) + + +class ValueFrequency(proto.Message): + r"""A value of a field, including its frequency. + + Attributes: + value (google.cloud.dlp_v2.types.Value): + A value contained in the field in question. + count (int): + How many times the value is contained in the + field. + """ + + value: "Value" = proto.Field( + proto.MESSAGE, + number=1, + message="Value", + ) + count: int = proto.Field( + proto.INT64, + number=2, + ) + + +class Value(proto.Message): + r"""Set of primitive values supported by the system. Note that for the + purposes of inspection or transformation, the number of bytes + considered to comprise a 'Value' is based on its representation as a + UTF-8 encoded string. For example, if 'integer_value' is set to + 123456789, the number of bytes would be counted as 9, even though an + int64 only holds up to 8 bytes of data. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + integer_value (int): + integer + + This field is a member of `oneof`_ ``type``. + float_value (float): + float + + This field is a member of `oneof`_ ``type``. + string_value (str): + string + + This field is a member of `oneof`_ ``type``. + boolean_value (bool): + boolean + + This field is a member of `oneof`_ ``type``. + timestamp_value (google.protobuf.timestamp_pb2.Timestamp): + timestamp + + This field is a member of `oneof`_ ``type``. + time_value (google.type.timeofday_pb2.TimeOfDay): + time of day + + This field is a member of `oneof`_ ``type``. + date_value (google.type.date_pb2.Date): + date + + This field is a member of `oneof`_ ``type``. + day_of_week_value (google.type.dayofweek_pb2.DayOfWeek): + day of week + + This field is a member of `oneof`_ ``type``. + """ + + integer_value: int = proto.Field( + proto.INT64, + number=1, + oneof="type", + ) + float_value: float = proto.Field( + proto.DOUBLE, + number=2, + oneof="type", + ) + string_value: str = proto.Field( + proto.STRING, + number=3, + oneof="type", + ) + boolean_value: bool = proto.Field( + proto.BOOL, + number=4, + oneof="type", + ) + timestamp_value: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + oneof="type", + message=timestamp_pb2.Timestamp, + ) + time_value: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=6, + oneof="type", + message=timeofday_pb2.TimeOfDay, + ) + date_value: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=7, + oneof="type", + message=date_pb2.Date, + ) + day_of_week_value: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=8, + oneof="type", + enum=dayofweek_pb2.DayOfWeek, + ) + + +class QuoteInfo(proto.Message): + r"""Message for infoType-dependent details parsed from quote. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + date_time (google.cloud.dlp_v2.types.DateTime): + The date time indicated by the quote. + + This field is a member of `oneof`_ ``parsed_quote``. + """ + + date_time: "DateTime" = proto.Field( + proto.MESSAGE, + number=2, + oneof="parsed_quote", + message="DateTime", + ) + + +class DateTime(proto.Message): + r"""Message for a date time object. + e.g. 2018-01-01, 5th August. + + Attributes: + date (google.type.date_pb2.Date): + One or more of the following must be set. + Must be a valid date or time value. + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Day of week + time (google.type.timeofday_pb2.TimeOfDay): + Time of day + time_zone (google.cloud.dlp_v2.types.DateTime.TimeZone): + Time zone + """ + + class TimeZone(proto.Message): + r"""Time zone of the date time object. + + Attributes: + offset_minutes (int): + Set only if the offset can be determined. + Positive for time ahead of UTC. E.g. For + "UTC-9", this value is -540. + """ + + offset_minutes: int = proto.Field( + proto.INT32, + number=1, + ) + + date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=1, + message=date_pb2.Date, + ) + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=3, + message=timeofday_pb2.TimeOfDay, + ) + time_zone: TimeZone = proto.Field( + proto.MESSAGE, + number=4, + message=TimeZone, + ) + + +class DeidentifyConfig(proto.Message): + r"""The configuration that controls how the data will change. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): + Treat the dataset as free-form text and apply + the same free text transformation everywhere. + + This field is a member of `oneof`_ ``transformation``. + record_transformations (google.cloud.dlp_v2.types.RecordTransformations): + Treat the dataset as structured. + Transformations can be applied to specific + locations within structured datasets, such as + transforming a column within a table. + + This field is a member of `oneof`_ ``transformation``. + image_transformations (google.cloud.dlp_v2.types.ImageTransformations): + Treat the dataset as an image and redact. + + This field is a member of `oneof`_ ``transformation``. + transformation_error_handling (google.cloud.dlp_v2.types.TransformationErrorHandling): + Mode for handling transformation errors. If left + unspecified, the default mode is + ``TransformationErrorHandling.ThrowError``. + """ + + info_type_transformations: "InfoTypeTransformations" = proto.Field( + proto.MESSAGE, + number=1, + oneof="transformation", + message="InfoTypeTransformations", + ) + record_transformations: "RecordTransformations" = proto.Field( + proto.MESSAGE, + number=2, + oneof="transformation", + message="RecordTransformations", + ) + image_transformations: "ImageTransformations" = proto.Field( + proto.MESSAGE, + number=4, + oneof="transformation", + message="ImageTransformations", + ) + transformation_error_handling: "TransformationErrorHandling" = proto.Field( + proto.MESSAGE, + number=3, + message="TransformationErrorHandling", + ) + + +class ImageTransformations(proto.Message): + r"""A type of transformation that is applied over images. + + Attributes: + transforms (MutableSequence[google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation]): + + """ + + class ImageTransformation(proto.Message): + r"""Configuration for determining how redaction of images should + occur. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + selected_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.SelectedInfoTypes): + Apply transformation to the selected info_types. + + This field is a member of `oneof`_ ``target``. + all_info_types (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllInfoTypes): + Apply transformation to all findings not specified in other + ImageTransformation's selected_info_types. Only one instance + is allowed within the ImageTransformations message. + + This field is a member of `oneof`_ ``target``. + all_text (google.cloud.dlp_v2.types.ImageTransformations.ImageTransformation.AllText): + Apply transformation to all text that doesn't + match an infoType. Only one instance is allowed + within the ImageTransformations message. + + This field is a member of `oneof`_ ``target``. + redaction_color (google.cloud.dlp_v2.types.Color): + The color to use when redacting content from + an image. If not specified, the default is + black. + """ + + class SelectedInfoTypes(proto.Message): + r"""Apply transformation to the selected info_types. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + Required. InfoTypes to apply the + transformation to. Required. Provided InfoType + must be unique within the ImageTransformations + message. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=storage.InfoType, + ) + + class AllInfoTypes(proto.Message): + r"""Apply transformation to all findings.""" + + class AllText(proto.Message): + r"""Apply to all text.""" + + selected_info_types: "ImageTransformations.ImageTransformation.SelectedInfoTypes" = proto.Field( + proto.MESSAGE, + number=4, + oneof="target", + message="ImageTransformations.ImageTransformation.SelectedInfoTypes", + ) + all_info_types: "ImageTransformations.ImageTransformation.AllInfoTypes" = ( + proto.Field( + proto.MESSAGE, + number=5, + oneof="target", + message="ImageTransformations.ImageTransformation.AllInfoTypes", + ) + ) + all_text: "ImageTransformations.ImageTransformation.AllText" = proto.Field( + proto.MESSAGE, + number=6, + oneof="target", + message="ImageTransformations.ImageTransformation.AllText", + ) + redaction_color: "Color" = proto.Field( + proto.MESSAGE, + number=3, + message="Color", + ) + + transforms: MutableSequence[ImageTransformation] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=ImageTransformation, + ) + + +class TransformationErrorHandling(proto.Message): + r"""How to handle transformation errors during de-identification. A + transformation error occurs when the requested transformation is + incompatible with the data. For example, trying to de-identify an IP + address using a ``DateShift`` transformation would result in a + transformation error, since date info cannot be extracted from an IP + address. Information about any incompatible transformations, and how + they were handled, is returned in the response as part of the + ``TransformationOverviews``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + throw_error (google.cloud.dlp_v2.types.TransformationErrorHandling.ThrowError): + Throw an error + + This field is a member of `oneof`_ ``mode``. + leave_untransformed (google.cloud.dlp_v2.types.TransformationErrorHandling.LeaveUntransformed): + Ignore errors + + This field is a member of `oneof`_ ``mode``. + """ + + class ThrowError(proto.Message): + r"""Throw an error and fail the request when a transformation + error occurs. + + """ + + class LeaveUntransformed(proto.Message): + r"""Skips the data without modifying it if the requested transformation + would cause an error. For example, if a ``DateShift`` transformation + were applied an an IP address, this mode would leave the IP address + unchanged in the response. + + """ + + throw_error: ThrowError = proto.Field( + proto.MESSAGE, + number=1, + oneof="mode", + message=ThrowError, + ) + leave_untransformed: LeaveUntransformed = proto.Field( + proto.MESSAGE, + number=2, + oneof="mode", + message=LeaveUntransformed, + ) + + +class PrimitiveTransformation(proto.Message): + r"""A rule for transforming a value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + replace_config (google.cloud.dlp_v2.types.ReplaceValueConfig): + Replace with a specified value. + + This field is a member of `oneof`_ ``transformation``. + redact_config (google.cloud.dlp_v2.types.RedactConfig): + Redact + + This field is a member of `oneof`_ ``transformation``. + character_mask_config (google.cloud.dlp_v2.types.CharacterMaskConfig): + Mask + + This field is a member of `oneof`_ ``transformation``. + crypto_replace_ffx_fpe_config (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig): + Ffx-Fpe + + This field is a member of `oneof`_ ``transformation``. + fixed_size_bucketing_config (google.cloud.dlp_v2.types.FixedSizeBucketingConfig): + Fixed size bucketing + + This field is a member of `oneof`_ ``transformation``. + bucketing_config (google.cloud.dlp_v2.types.BucketingConfig): + Bucketing + + This field is a member of `oneof`_ ``transformation``. + replace_with_info_type_config (google.cloud.dlp_v2.types.ReplaceWithInfoTypeConfig): + Replace with infotype + + This field is a member of `oneof`_ ``transformation``. + time_part_config (google.cloud.dlp_v2.types.TimePartConfig): + Time extraction + + This field is a member of `oneof`_ ``transformation``. + crypto_hash_config (google.cloud.dlp_v2.types.CryptoHashConfig): + Crypto + + This field is a member of `oneof`_ ``transformation``. + date_shift_config (google.cloud.dlp_v2.types.DateShiftConfig): + Date Shift + + This field is a member of `oneof`_ ``transformation``. + crypto_deterministic_config (google.cloud.dlp_v2.types.CryptoDeterministicConfig): + Deterministic Crypto + + This field is a member of `oneof`_ ``transformation``. + replace_dictionary_config (google.cloud.dlp_v2.types.ReplaceDictionaryConfig): + Replace with a value randomly drawn (with + replacement) from a dictionary. + + This field is a member of `oneof`_ ``transformation``. + """ + + replace_config: "ReplaceValueConfig" = proto.Field( + proto.MESSAGE, + number=1, + oneof="transformation", + message="ReplaceValueConfig", + ) + redact_config: "RedactConfig" = proto.Field( + proto.MESSAGE, + number=2, + oneof="transformation", + message="RedactConfig", + ) + character_mask_config: "CharacterMaskConfig" = proto.Field( + proto.MESSAGE, + number=3, + oneof="transformation", + message="CharacterMaskConfig", + ) + crypto_replace_ffx_fpe_config: "CryptoReplaceFfxFpeConfig" = proto.Field( + proto.MESSAGE, + number=4, + oneof="transformation", + message="CryptoReplaceFfxFpeConfig", + ) + fixed_size_bucketing_config: "FixedSizeBucketingConfig" = proto.Field( + proto.MESSAGE, + number=5, + oneof="transformation", + message="FixedSizeBucketingConfig", + ) + bucketing_config: "BucketingConfig" = proto.Field( + proto.MESSAGE, + number=6, + oneof="transformation", + message="BucketingConfig", + ) + replace_with_info_type_config: "ReplaceWithInfoTypeConfig" = proto.Field( + proto.MESSAGE, + number=7, + oneof="transformation", + message="ReplaceWithInfoTypeConfig", + ) + time_part_config: "TimePartConfig" = proto.Field( + proto.MESSAGE, + number=8, + oneof="transformation", + message="TimePartConfig", + ) + crypto_hash_config: "CryptoHashConfig" = proto.Field( + proto.MESSAGE, + number=9, + oneof="transformation", + message="CryptoHashConfig", + ) + date_shift_config: "DateShiftConfig" = proto.Field( + proto.MESSAGE, + number=11, + oneof="transformation", + message="DateShiftConfig", + ) + crypto_deterministic_config: "CryptoDeterministicConfig" = proto.Field( + proto.MESSAGE, + number=12, + oneof="transformation", + message="CryptoDeterministicConfig", + ) + replace_dictionary_config: "ReplaceDictionaryConfig" = proto.Field( + proto.MESSAGE, + number=13, + oneof="transformation", + message="ReplaceDictionaryConfig", + ) + + +class TimePartConfig(proto.Message): + r"""For use with ``Date``, ``Timestamp``, and ``TimeOfDay``, extract or + preserve a portion of the value. + + Attributes: + part_to_extract (google.cloud.dlp_v2.types.TimePartConfig.TimePart): + The part of the time to keep. + """ + + class TimePart(proto.Enum): + r"""Components that make up time. + + Values: + TIME_PART_UNSPECIFIED (0): + Unused + YEAR (1): + [0-9999] + MONTH (2): + [1-12] + DAY_OF_MONTH (3): + [1-31] + DAY_OF_WEEK (4): + [1-7] + WEEK_OF_YEAR (5): + [1-53] + HOUR_OF_DAY (6): + [0-23] + """ + TIME_PART_UNSPECIFIED = 0 + YEAR = 1 + MONTH = 2 + DAY_OF_MONTH = 3 + DAY_OF_WEEK = 4 + WEEK_OF_YEAR = 5 + HOUR_OF_DAY = 6 + + part_to_extract: TimePart = proto.Field( + proto.ENUM, + number=1, + enum=TimePart, + ) + + +class CryptoHashConfig(proto.Message): + r"""Pseudonymization method that generates surrogates via + cryptographic hashing. Uses SHA-256. + The key size must be either 32 or 64 bytes. + Outputs a base64 encoded representation of the hashed output + (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). + Currently, only string and integer values can be hashed. See + https://cloud.google.com/dlp/docs/pseudonymization to learn + more. + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + The key used by the hash function. + """ + + crypto_key: "CryptoKey" = proto.Field( + proto.MESSAGE, + number=1, + message="CryptoKey", + ) + + +class CryptoDeterministicConfig(proto.Message): + r"""Pseudonymization method that generates deterministic + encryption for the given input. Outputs a base64 encoded + representation of the encrypted output. Uses AES-SIV based on + the RFC https://tools.ietf.org/html/rfc5297. + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + The key used by the encryption function. For + deterministic encryption using AES-SIV, the + provided key is internally expanded to 64 bytes + prior to use. + surrogate_info_type (google.cloud.dlp_v2.types.InfoType): + The custom info type to annotate the surrogate with. This + annotation will be applied to the surrogate by prefixing it + with the name of the custom info type followed by the number + of characters comprising the surrogate. The following scheme + defines the format: {info type name}({surrogate character + count}):{surrogate} + + For example, if the name of custom info type is + 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full + replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting + content using the custom info type 'Surrogate'. This + facilitates reversal of the surrogate when it occurs in free + text. + + Note: For record transformations where the entire cell in a + table is being transformed, surrogates are not mandatory. + Surrogates are used to denote the location of the token and + are necessary for re-identification in free form text. + + In order for inspection to work properly, the name of this + info type must not occur naturally anywhere in your data; + otherwise, inspection may either + + - reverse a surrogate that does not correspond to an actual + identifier + - be unable to parse the surrogate and result in an error + + Therefore, choose your custom info type name carefully after + considering what your data looks like. One way to select a + name that has a high chance of yielding reliable detection + is to include one or more unicode characters that are highly + improbable to exist in your data. For example, assuming your + data is entered from a regular ASCII keyboard, the symbol + with the hex code point 29DD might be used like so: + ⧝MY_TOKEN_TYPE. + context (google.cloud.dlp_v2.types.FieldId): + A context may be used for higher security and maintaining + referential integrity such that the same identifier in two + different contexts will be given a distinct surrogate. The + context is appended to plaintext value being encrypted. On + decryption the provided context is validated against the + value used during encryption. If a context was provided + during encryption, same context must be provided during + decryption as well. + + If the context is not set, plaintext would be used as is for + encryption. If the context is set but: + + 1. there is no record present when transforming a given + value or + 2. the field is not present when transforming a given value, + + plaintext would be used as is for encryption. + + Note that case (1) is expected when an + ``InfoTypeTransformation`` is applied to both structured and + unstructured ``ContentItem``\ s. + """ + + crypto_key: "CryptoKey" = proto.Field( + proto.MESSAGE, + number=1, + message="CryptoKey", + ) + surrogate_info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=2, + message=storage.InfoType, + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=3, + message=storage.FieldId, + ) + + +class ReplaceValueConfig(proto.Message): + r"""Replace each input value with a given ``Value``. + + Attributes: + new_value (google.cloud.dlp_v2.types.Value): + Value to replace it with. + """ + + new_value: "Value" = proto.Field( + proto.MESSAGE, + number=1, + message="Value", + ) + + +class ReplaceDictionaryConfig(proto.Message): + r"""Replace each input value with a value randomly selected from + the dictionary. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): + A list of words to select from for random replacement. The + `limits `__ page + contains details about the size limits of dictionaries. + + This field is a member of `oneof`_ ``type``. + """ + + word_list: storage.CustomInfoType.Dictionary.WordList = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message=storage.CustomInfoType.Dictionary.WordList, + ) + + +class ReplaceWithInfoTypeConfig(proto.Message): + r"""Replace each matching finding with the name of the info_type.""" + + +class RedactConfig(proto.Message): + r"""Redact a given value. For example, if used with an + ``InfoTypeTransformation`` transforming PHONE_NUMBER, and input 'My + phone number is 206-555-0123', the output would be 'My phone number + is '. + + """ + + +class CharsToIgnore(proto.Message): + r"""Characters to skip when doing deidentification of a value. + These will be left alone and skipped. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + characters_to_skip (str): + Characters to not transform when masking. + + This field is a member of `oneof`_ ``characters``. + common_characters_to_ignore (google.cloud.dlp_v2.types.CharsToIgnore.CommonCharsToIgnore): + Common characters to not transform when + masking. Useful to avoid removing punctuation. + + This field is a member of `oneof`_ ``characters``. + """ + + class CommonCharsToIgnore(proto.Enum): + r"""Convenience enum for indicating common characters to not + transform. + + Values: + COMMON_CHARS_TO_IGNORE_UNSPECIFIED (0): + Unused. + NUMERIC (1): + 0-9 + ALPHA_UPPER_CASE (2): + A-Z + ALPHA_LOWER_CASE (3): + a-z + PUNCTUATION (4): + US Punctuation, one of !"#$%&'()*+,-./:;<=>?@[]^_`{|}~ + WHITESPACE (5): + Whitespace character, one of [ \\t\n\x0B\f\r] + """ + COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0 + NUMERIC = 1 + ALPHA_UPPER_CASE = 2 + ALPHA_LOWER_CASE = 3 + PUNCTUATION = 4 + WHITESPACE = 5 + + characters_to_skip: str = proto.Field( + proto.STRING, + number=1, + oneof="characters", + ) + common_characters_to_ignore: CommonCharsToIgnore = proto.Field( + proto.ENUM, + number=2, + oneof="characters", + enum=CommonCharsToIgnore, + ) + + +class CharacterMaskConfig(proto.Message): + r"""Partially mask a string by replacing a given number of characters + with a fixed character. Masking can start from the beginning or end + of the string. This can be used on data of any type (numbers, longs, + and so on) and when de-identifying structured data we'll attempt to + preserve the original data's type. (This allows you to take a long + like 123 and modify it to a string like \**3. + + Attributes: + masking_character (str): + Character to use to mask the sensitive values—for example, + ``*`` for an alphabetic string such as a name, or ``0`` for + a numeric string such as ZIP code or credit card number. + This string must have a length of 1. If not supplied, this + value defaults to ``*`` for strings, and ``0`` for digits. + number_to_mask (int): + Number of characters to mask. If not set, all matching chars + will be masked. Skipped characters do not count towards this + tally. + + If ``number_to_mask`` is negative, this denotes inverse + masking. Cloud DLP masks all but a number of characters. For + example, suppose you have the following values: + + - ``masking_character`` is ``*`` + - ``number_to_mask`` is ``-4`` + - ``reverse_order`` is ``false`` + - ``CharsToIgnore`` includes ``-`` + - Input string is ``1234-5678-9012-3456`` + + The resulting de-identified string is + ``****-****-****-3456``. Cloud DLP masks all but the last + four characters. If ``reverse_order`` is ``true``, all but + the first four characters are masked as + ``1234-****-****-****``. + reverse_order (bool): + Mask characters in reverse order. For example, if + ``masking_character`` is ``0``, ``number_to_mask`` is + ``14``, and ``reverse_order`` is ``false``, then the input + string ``1234-5678-9012-3456`` is masked as + ``00000000000000-3456``. If ``masking_character`` is ``*``, + ``number_to_mask`` is ``3``, and ``reverse_order`` is + ``true``, then the string ``12345`` is masked as ``12***``. + characters_to_ignore (MutableSequence[google.cloud.dlp_v2.types.CharsToIgnore]): + When masking a string, items in this list will be skipped + when replacing characters. For example, if the input string + is ``555-555-5555`` and you instruct Cloud DLP to skip ``-`` + and mask 5 characters with ``*``, Cloud DLP returns + ``***-**5-5555``. + """ + + masking_character: str = proto.Field( + proto.STRING, + number=1, + ) + number_to_mask: int = proto.Field( + proto.INT32, + number=2, + ) + reverse_order: bool = proto.Field( + proto.BOOL, + number=3, + ) + characters_to_ignore: MutableSequence["CharsToIgnore"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="CharsToIgnore", + ) + + +class FixedSizeBucketingConfig(proto.Message): + r"""Buckets values based on fixed size ranges. The Bucketing + transformation can provide all of this functionality, but requires + more configuration. This message is provided as a convenience to the + user for simple bucketing strategies. + + The transformed value will be a hyphenated string of + {lower_bound}-{upper_bound}. For example, if lower_bound = 10 and + upper_bound = 20, all values that are within this bucket will be + replaced with "10-20". + + This can be used on data of type: double, long. + + If the bound Value type differs from the type of data being + transformed, we will first attempt converting the type of the data + to be transformed to match the type of the bound before comparing. + + See https://cloud.google.com/dlp/docs/concepts-bucketing to learn + more. + + Attributes: + lower_bound (google.cloud.dlp_v2.types.Value): + Required. Lower bound value of buckets. All values less than + ``lower_bound`` are grouped together into a single bucket; + for example if ``lower_bound`` = 10, then all values less + than 10 are replaced with the value "-10". + upper_bound (google.cloud.dlp_v2.types.Value): + Required. Upper bound value of buckets. All values greater + than upper_bound are grouped together into a single bucket; + for example if ``upper_bound`` = 89, then all values greater + than 89 are replaced with the value "89+". + bucket_size (float): + Required. Size of each bucket (except for minimum and + maximum buckets). So if ``lower_bound`` = 10, + ``upper_bound`` = 89, and ``bucket_size`` = 10, then the + following buckets would be used: -10, 10-20, 20-30, 30-40, + 40-50, 50-60, 60-70, 70-80, 80-89, 89+. Precision up to 2 + decimals works. + """ + + lower_bound: "Value" = proto.Field( + proto.MESSAGE, + number=1, + message="Value", + ) + upper_bound: "Value" = proto.Field( + proto.MESSAGE, + number=2, + message="Value", + ) + bucket_size: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + +class BucketingConfig(proto.Message): + r"""Generalization function that buckets values based on ranges. The + ranges and replacement values are dynamically provided by the user + for custom behavior, such as 1-30 -> LOW 31-65 -> MEDIUM 66-100 -> + HIGH This can be used on data of type: number, long, string, + timestamp. If the bound ``Value`` type differs from the type of data + being transformed, we will first attempt converting the type of the + data to be transformed to match the type of the bound before + comparing. See https://cloud.google.com/dlp/docs/concepts-bucketing + to learn more. + + Attributes: + buckets (MutableSequence[google.cloud.dlp_v2.types.BucketingConfig.Bucket]): + Set of buckets. Ranges must be + non-overlapping. + """ + + class Bucket(proto.Message): + r"""Bucket is represented as a range, along with replacement + values. + + Attributes: + min_ (google.cloud.dlp_v2.types.Value): + Lower bound of the range, inclusive. Type + should be the same as max if used. + max_ (google.cloud.dlp_v2.types.Value): + Upper bound of the range, exclusive; type + must match min. + replacement_value (google.cloud.dlp_v2.types.Value): + Required. Replacement value for this bucket. + """ + + min_: "Value" = proto.Field( + proto.MESSAGE, + number=1, + message="Value", + ) + max_: "Value" = proto.Field( + proto.MESSAGE, + number=2, + message="Value", + ) + replacement_value: "Value" = proto.Field( + proto.MESSAGE, + number=3, + message="Value", + ) + + buckets: MutableSequence[Bucket] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Bucket, + ) + + +class CryptoReplaceFfxFpeConfig(proto.Message): + r"""Replaces an identifier with a surrogate using Format Preserving + Encryption (FPE) with the FFX mode of operation; however when used + in the ``ReidentifyContent`` API method, it serves the opposite + function by reversing the surrogate back into the original + identifier. The identifier must be encoded as ASCII. For a given + crypto key and context, the same identifier will be replaced with + the same surrogate. Identifiers must be at least two characters + long. In the case that the identifier is the empty string, it will + be skipped. See https://cloud.google.com/dlp/docs/pseudonymization + to learn more. + + Note: We recommend using CryptoDeterministicConfig for all use cases + which do not require preserving the input alphabet space and size, + plus warrant referential integrity. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + Required. The key used by the encryption + algorithm. + context (google.cloud.dlp_v2.types.FieldId): + The 'tweak', a context may be used for higher security since + the same identifier in two different contexts won't be given + the same surrogate. If the context is not set, a default + tweak will be used. + + If the context is set but: + + 1. there is no record present when transforming a given + value or + 2. the field is not present when transforming a given value, + + a default tweak will be used. + + Note that case (1) is expected when an + ``InfoTypeTransformation`` is applied to both structured and + unstructured ``ContentItem``\ s. Currently, the referenced + field may be of value type integer or string. + + The tweak is constructed as a sequence of bytes in big + endian byte order such that: + + - a 64 bit integer is encoded followed by a single byte of + value 1 + - a string is encoded in UTF-8 format followed by a single + byte of value 2 + common_alphabet (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig.FfxCommonNativeAlphabet): + Common alphabets. + + This field is a member of `oneof`_ ``alphabet``. + custom_alphabet (str): + This is supported by mapping these to the alphanumeric + characters that the FFX mode natively supports. This happens + before/after encryption/decryption. Each character listed + must appear only once. Number of characters must be in the + range [2, 95]. This must be encoded as ASCII. The order of + characters does not matter. The full list of allowed + characters is: + 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz + ~`!@#$%^&*()_-+={[}]|:;"'<,>.?/ + + This field is a member of `oneof`_ ``alphabet``. + radix (int): + The native way to select the alphabet. Must be in the range + [2, 95]. + + This field is a member of `oneof`_ ``alphabet``. + surrogate_info_type (google.cloud.dlp_v2.types.InfoType): + The custom infoType to annotate the surrogate with. This + annotation will be applied to the surrogate by prefixing it + with the name of the custom infoType followed by the number + of characters comprising the surrogate. The following scheme + defines the format: + info_type_name(surrogate_character_count):surrogate + + For example, if the name of custom infoType is + 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full + replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting + content using the custom infoType + ```SurrogateType`` `__. + This facilitates reversal of the surrogate when it occurs in + free text. + + In order for inspection to work properly, the name of this + infoType must not occur naturally anywhere in your data; + otherwise, inspection may find a surrogate that does not + correspond to an actual identifier. Therefore, choose your + custom infoType name carefully after considering what your + data looks like. One way to select a name that has a high + chance of yielding reliable detection is to include one or + more unicode characters that are highly improbable to exist + in your data. For example, assuming your data is entered + from a regular ASCII keyboard, the symbol with the hex code + point 29DD might be used like so: ⧝MY_TOKEN_TYPE + """ + + class FfxCommonNativeAlphabet(proto.Enum): + r"""These are commonly used subsets of the alphabet that the FFX + mode natively supports. In the algorithm, the alphabet is + selected using the "radix". Therefore each corresponds to a + particular radix. + + Values: + FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED (0): + Unused. + NUMERIC (1): + ``[0-9]`` (radix of 10) + HEXADECIMAL (2): + ``[0-9A-F]`` (radix of 16) + UPPER_CASE_ALPHA_NUMERIC (3): + ``[0-9A-Z]`` (radix of 36) + ALPHA_NUMERIC (4): + ``[0-9A-Za-z]`` (radix of 62) + """ + FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0 + NUMERIC = 1 + HEXADECIMAL = 2 + UPPER_CASE_ALPHA_NUMERIC = 3 + ALPHA_NUMERIC = 4 + + crypto_key: "CryptoKey" = proto.Field( + proto.MESSAGE, + number=1, + message="CryptoKey", + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + common_alphabet: FfxCommonNativeAlphabet = proto.Field( + proto.ENUM, + number=4, + oneof="alphabet", + enum=FfxCommonNativeAlphabet, + ) + custom_alphabet: str = proto.Field( + proto.STRING, + number=5, + oneof="alphabet", + ) + radix: int = proto.Field( + proto.INT32, + number=6, + oneof="alphabet", + ) + surrogate_info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=8, + message=storage.InfoType, + ) + + +class CryptoKey(proto.Message): + r"""This is a data encryption key (DEK) (as opposed to + a key encryption key (KEK) stored by Cloud Key Management + Service (Cloud KMS). + When using Cloud KMS to wrap or unwrap a DEK, be sure to set an + appropriate IAM policy on the KEK to ensure an attacker cannot + unwrap the DEK. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + transient (google.cloud.dlp_v2.types.TransientCryptoKey): + Transient crypto key + + This field is a member of `oneof`_ ``source``. + unwrapped (google.cloud.dlp_v2.types.UnwrappedCryptoKey): + Unwrapped crypto key + + This field is a member of `oneof`_ ``source``. + kms_wrapped (google.cloud.dlp_v2.types.KmsWrappedCryptoKey): + Key wrapped using Cloud KMS + + This field is a member of `oneof`_ ``source``. + """ + + transient: "TransientCryptoKey" = proto.Field( + proto.MESSAGE, + number=1, + oneof="source", + message="TransientCryptoKey", + ) + unwrapped: "UnwrappedCryptoKey" = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message="UnwrappedCryptoKey", + ) + kms_wrapped: "KmsWrappedCryptoKey" = proto.Field( + proto.MESSAGE, + number=3, + oneof="source", + message="KmsWrappedCryptoKey", + ) + + +class TransientCryptoKey(proto.Message): + r"""Use this to have a random data crypto key generated. + It will be discarded after the request finishes. + + Attributes: + name (str): + Required. Name of the key. This is an arbitrary string used + to differentiate different keys. A unique key is generated + per name: two separate ``TransientCryptoKey`` protos share + the same generated key if their names are the same. When the + data crypto key is generated, this name is not used in any + way (repeating the api call will result in a different key + being generated). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UnwrappedCryptoKey(proto.Message): + r"""Using raw keys is prone to security risks due to accidentally + leaking the key. Choose another type of key if possible. + + Attributes: + key (bytes): + Required. A 128/192/256 bit key. + """ + + key: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +class KmsWrappedCryptoKey(proto.Message): + r"""Include to use an existing data crypto key wrapped by KMS. The + wrapped key must be a 128-, 192-, or 256-bit key. Authorization + requires the following IAM permissions when sending a request to + perform a crypto transformation using a KMS-wrapped crypto key: + dlp.kms.encrypt + + For more information, see [Creating a wrapped key] + (https://cloud.google.com/dlp/docs/create-wrapped-key). + + Note: When you use Cloud KMS for cryptographic operations, `charges + apply `__. + + Attributes: + wrapped_key (bytes): + Required. The wrapped data crypto key. + crypto_key_name (str): + Required. The resource name of the KMS + CryptoKey to use for unwrapping. + """ + + wrapped_key: bytes = proto.Field( + proto.BYTES, + number=1, + ) + crypto_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DateShiftConfig(proto.Message): + r"""Shifts dates by random number of days, with option to be + consistent for the same context. See + https://cloud.google.com/dlp/docs/concepts-date-shifting to + learn more. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + upper_bound_days (int): + Required. Range of shift in days. Actual + shift will be selected at random within this + range (inclusive ends). Negative means shift to + earlier in time. Must not be more than 365250 + days (1000 years) each direction. + + For example, 3 means shift date to at most 3 + days into the future. + lower_bound_days (int): + Required. For example, -5 means shift date to + at most 5 days back in the past. + context (google.cloud.dlp_v2.types.FieldId): + Points to the field that contains the + context, for example, an entity id. If set, must + also set cryptoKey. If set, shift will be + consistent for the given context. + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + Causes the shift to be computed based on this key and the + context. This results in the same shift for the same context + and crypto_key. If set, must also set context. Can only be + applied to table items. + + This field is a member of `oneof`_ ``method``. + """ + + upper_bound_days: int = proto.Field( + proto.INT32, + number=1, + ) + lower_bound_days: int = proto.Field( + proto.INT32, + number=2, + ) + context: storage.FieldId = proto.Field( + proto.MESSAGE, + number=3, + message=storage.FieldId, + ) + crypto_key: "CryptoKey" = proto.Field( + proto.MESSAGE, + number=4, + oneof="method", + message="CryptoKey", + ) + + +class InfoTypeTransformations(proto.Message): + r"""A type of transformation that will scan unstructured text and apply + various ``PrimitiveTransformation``\ s to each finding, where the + transformation is applied to only values that were identified as a + specific info_type. + + Attributes: + transformations (MutableSequence[google.cloud.dlp_v2.types.InfoTypeTransformations.InfoTypeTransformation]): + Required. Transformation for each infoType. + Cannot specify more than one for a given + infoType. + """ + + class InfoTypeTransformation(proto.Message): + r"""A transformation to apply to text that is identified as a specific + info_type. + + Attributes: + info_types (MutableSequence[google.cloud.dlp_v2.types.InfoType]): + InfoTypes to apply the transformation to. An empty list will + cause this transformation to apply to all findings that + correspond to infoTypes that were requested in + ``InspectConfig``. + primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + Required. Primitive transformation to apply + to the infoType. + """ + + info_types: MutableSequence[storage.InfoType] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + primitive_transformation: "PrimitiveTransformation" = proto.Field( + proto.MESSAGE, + number=2, + message="PrimitiveTransformation", + ) + + transformations: MutableSequence[InfoTypeTransformation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=InfoTypeTransformation, + ) + + +class FieldTransformation(proto.Message): + r"""The transformation to apply to the field. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Required. Input field(s) to apply the transformation to. + When you have columns that reference their position within a + list, omit the index from the FieldId. FieldId name matching + ignores the index. For example, instead of + "contact.nums[0].type", use "contact.nums.type". + condition (google.cloud.dlp_v2.types.RecordCondition): + Only apply the transformation if the condition evaluates to + true for the given ``RecordCondition``. The conditions are + allowed to reference fields that are not used in the actual + transformation. + + Example Use Cases: + + - Apply a different bucket transformation to an age column + if the zip code column for the same record is within a + specific range. + - Redact a field if the date of birth field is greater than + 85. + primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + Apply the transformation to the entire field. + + This field is a member of `oneof`_ ``transformation``. + info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): + Treat the contents of the field as free text, and + selectively transform content that matches an ``InfoType``. + + This field is a member of `oneof`_ ``transformation``. + """ + + fields: MutableSequence[storage.FieldId] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + condition: "RecordCondition" = proto.Field( + proto.MESSAGE, + number=3, + message="RecordCondition", + ) + primitive_transformation: "PrimitiveTransformation" = proto.Field( + proto.MESSAGE, + number=4, + oneof="transformation", + message="PrimitiveTransformation", + ) + info_type_transformations: "InfoTypeTransformations" = proto.Field( + proto.MESSAGE, + number=5, + oneof="transformation", + message="InfoTypeTransformations", + ) + + +class RecordTransformations(proto.Message): + r"""A type of transformation that is applied over structured data + such as a table. + + Attributes: + field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): + Transform the record by applying various + field transformations. + record_suppressions (MutableSequence[google.cloud.dlp_v2.types.RecordSuppression]): + Configuration defining which records get + suppressed entirely. Records that match any + suppression rule are omitted from the output. + """ + + field_transformations: MutableSequence["FieldTransformation"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="FieldTransformation", + ) + record_suppressions: MutableSequence["RecordSuppression"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="RecordSuppression", + ) + + +class RecordSuppression(proto.Message): + r"""Configuration to suppress records whose suppression + conditions evaluate to true. + + Attributes: + condition (google.cloud.dlp_v2.types.RecordCondition): + A condition that when it evaluates to true + will result in the record being evaluated to be + suppressed from the transformed content. + """ + + condition: "RecordCondition" = proto.Field( + proto.MESSAGE, + number=1, + message="RecordCondition", + ) + + +class RecordCondition(proto.Message): + r"""A condition for determining whether a transformation should + be applied to a field. + + Attributes: + expressions (google.cloud.dlp_v2.types.RecordCondition.Expressions): + An expression. + """ + + class Condition(proto.Message): + r"""The field type of ``value`` and ``field`` do not need to match to be + considered equal, but not all comparisons are possible. EQUAL_TO and + NOT_EQUAL_TO attempt to compare even with incompatible types, but + all other comparisons are invalid with incompatible types. A + ``value`` of type: + + - ``string`` can be compared against all other types + - ``boolean`` can only be compared against other booleans + - ``integer`` can be compared against doubles or a string if the + string value can be parsed as an integer. + - ``double`` can be compared against integers or a string if the + string can be parsed as a double. + - ``Timestamp`` can be compared against strings in RFC 3339 date + string format. + - ``TimeOfDay`` can be compared against timestamps and strings in + the format of 'HH:mm:ss'. + + If we fail to compare do to type mismatch, a warning will be given + and the condition will evaluate to false. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Field within the record this + condition is evaluated against. + operator (google.cloud.dlp_v2.types.RelationalOperator): + Required. Operator used to compare the field + or infoType to the value. + value (google.cloud.dlp_v2.types.Value): + Value to compare against. [Mandatory, except for ``EXISTS`` + tests.] + """ + + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + operator: "RelationalOperator" = proto.Field( + proto.ENUM, + number=3, + enum="RelationalOperator", + ) + value: "Value" = proto.Field( + proto.MESSAGE, + number=4, + message="Value", + ) + + class Conditions(proto.Message): + r"""A collection of conditions. + + Attributes: + conditions (MutableSequence[google.cloud.dlp_v2.types.RecordCondition.Condition]): + A collection of conditions. + """ + + conditions: MutableSequence["RecordCondition.Condition"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="RecordCondition.Condition", + ) + + class Expressions(proto.Message): + r"""An expression, consisting of an operator and conditions. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + logical_operator (google.cloud.dlp_v2.types.RecordCondition.Expressions.LogicalOperator): + The operator to apply to the result of conditions. Default + and currently only supported value is ``AND``. + conditions (google.cloud.dlp_v2.types.RecordCondition.Conditions): + Conditions to apply to the expression. + + This field is a member of `oneof`_ ``type``. + """ + + class LogicalOperator(proto.Enum): + r"""Logical operators for conditional checks. + + Values: + LOGICAL_OPERATOR_UNSPECIFIED (0): + Unused + AND (1): + Conditional AND + """ + LOGICAL_OPERATOR_UNSPECIFIED = 0 + AND = 1 + + logical_operator: "RecordCondition.Expressions.LogicalOperator" = proto.Field( + proto.ENUM, + number=1, + enum="RecordCondition.Expressions.LogicalOperator", + ) + conditions: "RecordCondition.Conditions" = proto.Field( + proto.MESSAGE, + number=3, + oneof="type", + message="RecordCondition.Conditions", + ) + + expressions: Expressions = proto.Field( + proto.MESSAGE, + number=3, + message=Expressions, + ) + + +class TransformationOverview(proto.Message): + r"""Overview of the modifications that occurred. + + Attributes: + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + transformation_summaries (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary]): + Transformations applied to the dataset. + """ + + transformed_bytes: int = proto.Field( + proto.INT64, + number=2, + ) + transformation_summaries: MutableSequence[ + "TransformationSummary" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="TransformationSummary", + ) + + +class TransformationSummary(proto.Message): + r"""Summary of a single transformation. Only one of 'transformation', + 'field_transformation', or 'record_suppress' will be set. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Set if the transformation was limited to a + specific InfoType. + field (google.cloud.dlp_v2.types.FieldId): + Set if the transformation was limited to a + specific FieldId. + transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + The specific transformation these stats apply + to. + field_transformations (MutableSequence[google.cloud.dlp_v2.types.FieldTransformation]): + The field transformation that was applied. + If multiple field transformations are requested + for a single field, this list will contain all + of them; otherwise, only one is supplied. + record_suppress (google.cloud.dlp_v2.types.RecordSuppression): + The specific suppression option these stats + apply to. + results (MutableSequence[google.cloud.dlp_v2.types.TransformationSummary.SummaryResult]): + Collection of all transformations that took + place or had an error. + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + """ + + class TransformationResultCode(proto.Enum): + r"""Possible outcomes of transformations. + + Values: + TRANSFORMATION_RESULT_CODE_UNSPECIFIED (0): + Unused + SUCCESS (1): + Transformation completed without an error. + ERROR (2): + Transformation had an error. + """ + TRANSFORMATION_RESULT_CODE_UNSPECIFIED = 0 + SUCCESS = 1 + ERROR = 2 + + class SummaryResult(proto.Message): + r"""A collection that informs the user the number of times a particular + ``TransformationResultCode`` and error details occurred. + + Attributes: + count (int): + Number of transformations counted by this + result. + code (google.cloud.dlp_v2.types.TransformationSummary.TransformationResultCode): + Outcome of the transformation. + details (str): + A place for warnings or errors to show up if + a transformation didn't work as expected. + """ + + count: int = proto.Field( + proto.INT64, + number=1, + ) + code: "TransformationSummary.TransformationResultCode" = proto.Field( + proto.ENUM, + number=2, + enum="TransformationSummary.TransformationResultCode", + ) + details: str = proto.Field( + proto.STRING, + number=3, + ) + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + field: storage.FieldId = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + transformation: "PrimitiveTransformation" = proto.Field( + proto.MESSAGE, + number=3, + message="PrimitiveTransformation", + ) + field_transformations: MutableSequence["FieldTransformation"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="FieldTransformation", + ) + record_suppress: "RecordSuppression" = proto.Field( + proto.MESSAGE, + number=6, + message="RecordSuppression", + ) + results: MutableSequence[SummaryResult] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=SummaryResult, + ) + transformed_bytes: int = proto.Field( + proto.INT64, + number=7, + ) + + +class TransformationDescription(proto.Message): + r"""A flattened description of a ``PrimitiveTransformation`` or + ``RecordSuppression``. + + Attributes: + type_ (google.cloud.dlp_v2.types.TransformationType): + The transformation type. + description (str): + A description of the transformation. This is empty for a + RECORD_SUPPRESSION, or is the output of calling toString() + on the ``PrimitiveTransformation`` protocol buffer message + for any other type of transformation. + condition (str): + A human-readable string representation of the + ``RecordCondition`` corresponding to this transformation. + Set if a ``RecordCondition`` was used to determine whether + or not to apply this transformation. + + Examples: \* (age_field > 85) \* (age_field <= 18) \* + (zip_field exists) \* (zip_field == 01234) && (city_field != + "Springville") \* (zip_field == 01234) && (age_field <= 18) + && (city_field exists) + info_type (google.cloud.dlp_v2.types.InfoType): + Set if the transformation was limited to a specific + ``InfoType``. + """ + + type_: "TransformationType" = proto.Field( + proto.ENUM, + number=1, + enum="TransformationType", + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + condition: str = proto.Field( + proto.STRING, + number=3, + ) + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=4, + message=storage.InfoType, + ) + + +class TransformationDetails(proto.Message): + r"""Details about a single transformation. This object contains a + description of the transformation, information about whether the + transformation was successfully applied, and the precise + location where the transformation occurred. These details are + stored in a user-specified BigQuery table. + + Attributes: + resource_name (str): + The name of the job that completed the + transformation. + container_name (str): + The top level name of the container where the + transformation is located (this will be the + source file name or table name). + transformation (MutableSequence[google.cloud.dlp_v2.types.TransformationDescription]): + Description of transformation. This would only contain more + than one element if there were multiple matching + transformations and which one to apply was ambiguous. Not + set for states that contain no transformation, currently + only state that contains no transformation is + TransformationResultStateType.METADATA_UNRETRIEVABLE. + status_details (google.cloud.dlp_v2.types.TransformationResultStatus): + Status of the transformation, if + transformation was not successful, this will + specify what caused it to fail, otherwise it + will show that the transformation was + successful. + transformed_bytes (int): + The number of bytes that were transformed. If + transformation was unsuccessful or did not take + place because there was no content to transform, + this will be zero. + transformation_location (google.cloud.dlp_v2.types.TransformationLocation): + The precise location of the transformed + content in the original container. + """ + + resource_name: str = proto.Field( + proto.STRING, + number=1, + ) + container_name: str = proto.Field( + proto.STRING, + number=2, + ) + transformation: MutableSequence["TransformationDescription"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="TransformationDescription", + ) + status_details: "TransformationResultStatus" = proto.Field( + proto.MESSAGE, + number=4, + message="TransformationResultStatus", + ) + transformed_bytes: int = proto.Field( + proto.INT64, + number=5, + ) + transformation_location: "TransformationLocation" = proto.Field( + proto.MESSAGE, + number=6, + message="TransformationLocation", + ) + + +class TransformationLocation(proto.Message): + r"""Specifies the location of a transformation. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + finding_id (str): + For infotype transformations, link to the + corresponding findings ID so that location + information does not need to be duplicated. Each + findings ID correlates to an entry in the + findings output table, this table only gets + created when users specify to save findings (add + the save findings action to the request). + + This field is a member of `oneof`_ ``location_type``. + record_transformation (google.cloud.dlp_v2.types.RecordTransformation): + For record transformations, provide a field + and container information. + + This field is a member of `oneof`_ ``location_type``. + container_type (google.cloud.dlp_v2.types.TransformationContainerType): + Information about the functionality of the + container where this finding occurred, if + available. + """ + + finding_id: str = proto.Field( + proto.STRING, + number=1, + oneof="location_type", + ) + record_transformation: "RecordTransformation" = proto.Field( + proto.MESSAGE, + number=2, + oneof="location_type", + message="RecordTransformation", + ) + container_type: "TransformationContainerType" = proto.Field( + proto.ENUM, + number=3, + enum="TransformationContainerType", + ) + + +class RecordTransformation(proto.Message): + r""" + + Attributes: + field_id (google.cloud.dlp_v2.types.FieldId): + For record transformations, provide a field. + container_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Findings container modification timestamp, if + applicable. + container_version (str): + Container version, if available ("generation" + for Cloud Storage). + """ + + field_id: storage.FieldId = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + container_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + container_version: str = proto.Field( + proto.STRING, + number=3, + ) + + +class TransformationResultStatus(proto.Message): + r""" + + Attributes: + result_status_type (google.cloud.dlp_v2.types.TransformationResultStatusType): + Transformation result status type, this will + be either SUCCESS, or it will be the reason for + why the transformation was not completely + successful. + details (google.rpc.status_pb2.Status): + Detailed error codes and messages + """ + + result_status_type: "TransformationResultStatusType" = proto.Field( + proto.ENUM, + number=1, + enum="TransformationResultStatusType", + ) + details: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + + +class TransformationDetailsStorageConfig(proto.Message): + r"""Config for storing transformation details. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + The BigQuery table in which to store the output. This may be + an existing table or in a new table in an existing dataset. + If table_id is not set a new one will be generated for you + with the following format: + dlp_googleapis_transformation_details_yyyy_mm_dd_[dlp_job_id]. + Pacific time zone will be used for generating the date + details. + + This field is a member of `oneof`_ ``type``. + """ + + table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message=storage.BigQueryTable, + ) + + +class Schedule(proto.Message): + r"""Schedule for inspect job triggers. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + recurrence_period_duration (google.protobuf.duration_pb2.Duration): + With this option a job is started on a + regular periodic basis. For example: every day + (86400 seconds). + + A scheduled start time will be skipped if the + previous execution has not ended when its + scheduled time occurs. + + This value must be set to a time duration + greater than or equal to 1 day and can be no + longer than 60 days. + + This field is a member of `oneof`_ ``option``. + """ + + recurrence_period_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + oneof="option", + message=duration_pb2.Duration, + ) + + +class Manual(proto.Message): + r"""Job trigger option for hybrid jobs. Jobs must be manually + created and finished. + + """ + + +class InspectTemplate(proto.Message): + r"""The inspectTemplate contains a configuration (set of types of + sensitive data to be detected) to be used anywhere you otherwise + would normally specify InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates to learn + more. + + Attributes: + name (str): + Output only. The template name. + + The template will have one of the following formats: + ``projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID`` OR + ``organizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID``; + display_name (str): + Display name (max 256 chars). + description (str): + Short description (max 256 chars). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of an + inspectTemplate. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of an + inspectTemplate. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + The core content of the template. + Configuration of the scanning process. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + inspect_config: "InspectConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="InspectConfig", + ) + + +class DeidentifyTemplate(proto.Message): + r"""DeidentifyTemplates contains instructions on how to + de-identify content. See + https://cloud.google.com/dlp/docs/concepts-templates to learn + more. + + Attributes: + name (str): + Output only. The template name. + + The template will have one of the following formats: + ``projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID`` OR + ``organizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID`` + display_name (str): + Display name (max 256 chars). + description (str): + Short description (max 256 chars). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of an + inspectTemplate. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of an + inspectTemplate. + deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + The core content of the template. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + deidentify_config: "DeidentifyConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="DeidentifyConfig", + ) + + +class Error(proto.Message): + r"""Details information about an error encountered during job + execution or the results of an unsuccessful activation of the + JobTrigger. + + Attributes: + details (google.rpc.status_pb2.Status): + Detailed error codes and messages. + timestamps (MutableSequence[google.protobuf.timestamp_pb2.Timestamp]): + The times the error occurred. + """ + + details: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + timestamps: MutableSequence[timestamp_pb2.Timestamp] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class JobTrigger(proto.Message): + r"""Contains a configuration to make dlp api calls on a repeating + basis. See + https://cloud.google.com/dlp/docs/concepts-job-triggers to learn + more. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Unique resource name for the triggeredJob, assigned by the + service when the triggeredJob is created, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + display_name (str): + Display name (max 100 chars) + description (str): + User provided description (max 256 chars) + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + For inspect jobs, a snapshot of the + configuration. + + This field is a member of `oneof`_ ``job``. + triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger.Trigger]): + A list of triggers which will be OR'ed + together. Only one in the list needs to trigger + for a job to be started. The list may contain + only a single Schedule trigger and must have at + least one object. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + Output only. A stream of errors encountered + when the trigger was activated. Repeated errors + may result in the JobTrigger automatically being + paused. Will return the last 100 errors. + Whenever the JobTrigger is modified this list + will be cleared. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of a + triggeredJob. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of a + triggeredJob. + last_run_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp of the last time + this trigger executed. + status (google.cloud.dlp_v2.types.JobTrigger.Status): + Required. A status for this trigger. + """ + + class Status(proto.Enum): + r"""Whether the trigger is currently active. If PAUSED or + CANCELLED, no jobs will be created with this configuration. The + service may automatically pause triggers experiencing frequent + errors. To restart a job, set the status to HEALTHY after + correcting user errors. + + Values: + STATUS_UNSPECIFIED (0): + Unused. + HEALTHY (1): + Trigger is healthy. + PAUSED (2): + Trigger is temporarily paused. + CANCELLED (3): + Trigger is cancelled and can not be resumed. + """ + STATUS_UNSPECIFIED = 0 + HEALTHY = 1 + PAUSED = 2 + CANCELLED = 3 + + class Trigger(proto.Message): + r"""What event needs to occur for a new job to be started. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + schedule (google.cloud.dlp_v2.types.Schedule): + Create a job on a repeating basis based on + the elapse of time. + + This field is a member of `oneof`_ ``trigger``. + manual (google.cloud.dlp_v2.types.Manual): + For use with hybrid jobs. Jobs must be + manually created and finished. + + This field is a member of `oneof`_ ``trigger``. + """ + + schedule: "Schedule" = proto.Field( + proto.MESSAGE, + number=1, + oneof="trigger", + message="Schedule", + ) + manual: "Manual" = proto.Field( + proto.MESSAGE, + number=2, + oneof="trigger", + message="Manual", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + inspect_job: "InspectJobConfig" = proto.Field( + proto.MESSAGE, + number=4, + oneof="job", + message="InspectJobConfig", + ) + triggers: MutableSequence[Trigger] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=Trigger, + ) + errors: MutableSequence["Error"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="Error", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + last_run_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + status: Status = proto.Field( + proto.ENUM, + number=10, + enum=Status, + ) + + +class Action(proto.Message): + r"""A task to execute on the completion of a job. + See https://cloud.google.com/dlp/docs/concepts-actions to learn + more. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + save_findings (google.cloud.dlp_v2.types.Action.SaveFindings): + Save resulting findings in a provided + location. + + This field is a member of `oneof`_ ``action``. + pub_sub (google.cloud.dlp_v2.types.Action.PublishToPubSub): + Publish a notification to a Pub/Sub topic. + + This field is a member of `oneof`_ ``action``. + publish_summary_to_cscc (google.cloud.dlp_v2.types.Action.PublishSummaryToCscc): + Publish summary to Cloud Security Command + Center (Alpha). + + This field is a member of `oneof`_ ``action``. + publish_findings_to_cloud_data_catalog (google.cloud.dlp_v2.types.Action.PublishFindingsToCloudDataCatalog): + Publish findings to Cloud Datahub. + + This field is a member of `oneof`_ ``action``. + deidentify (google.cloud.dlp_v2.types.Action.Deidentify): + Create a de-identified copy of the input + data. + + This field is a member of `oneof`_ ``action``. + job_notification_emails (google.cloud.dlp_v2.types.Action.JobNotificationEmails): + Sends an email when the job completes. The email goes to IAM + project owners and technical `Essential + Contacts `__. + + This field is a member of `oneof`_ ``action``. + publish_to_stackdriver (google.cloud.dlp_v2.types.Action.PublishToStackdriver): + Enable Stackdriver metric dlp.googleapis.com/finding_count. + + This field is a member of `oneof`_ ``action``. + """ + + class SaveFindings(proto.Message): + r"""If set, the detailed findings will be persisted to the + specified OutputStorageConfig. Only a single instance of this + action can be specified. + Compatible with: Inspect, Risk + + Attributes: + output_config (google.cloud.dlp_v2.types.OutputStorageConfig): + Location to store findings outside of DLP. + """ + + output_config: "OutputStorageConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="OutputStorageConfig", + ) + + class PublishToPubSub(proto.Message): + r"""Publish a message into a given Pub/Sub topic when DlpJob has + completed. The message contains a single field, ``DlpJobName``, + which is equal to the finished job's + ```DlpJob.name`` `__. + Compatible with: Inspect, Risk + + Attributes: + topic (str): + Cloud Pub/Sub topic to send notifications to. + The topic must have given publishing access + rights to the DLP API service account executing + the long running DlpJob sending the + notifications. Format is + projects/{project}/topics/{topic}. + """ + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + + class PublishSummaryToCscc(proto.Message): + r"""Publish the result summary of a DlpJob to the Cloud Security + Command Center (CSCC Alpha). + This action is only available for projects which are parts of an + organization and whitelisted for the alpha Cloud Security + Command Center. + The action will publish the count of finding instances and their + info types. The summary of findings will be persisted in CSCC + and are governed by CSCC service-specific policy, see + https://cloud.google.com/terms/service-terms Only a single + instance of this action can be specified. Compatible with: + Inspect + + """ + + class PublishFindingsToCloudDataCatalog(proto.Message): + r"""Publish findings of a DlpJob to Data Catalog. In Data Catalog, tag + templates are applied to the resource that Cloud DLP scanned. Data + Catalog tag templates are stored in the same project and region + where the BigQuery table exists. For Cloud DLP to create and apply + the tag template, the Cloud DLP service agent must have the + ``roles/datacatalog.tagTemplateOwner`` permission on the project. + The tag template contains fields summarizing the results of the + DlpJob. Any field values previously written by another DlpJob are + deleted. [InfoType naming patterns][google.privacy.dlp.v2.InfoType] + are strictly enforced when using this feature. + + Findings are persisted in Data Catalog storage and are governed by + service-specific policies for Data Catalog. For more information, + see `Service Specific + Terms `__. + + Only a single instance of this action can be specified. This action + is allowed only if all resources being scanned are BigQuery tables. + Compatible with: Inspect + + """ + + class Deidentify(proto.Message): + r"""Create a de-identified copy of the requested table or files. + + A TransformationDetail will be created for each transformation. + + If any rows in BigQuery are skipped during de-identification + (transformation errors or row size exceeds BigQuery insert API + limits) they are placed in the failure output table. If the original + row exceeds the BigQuery insert API limit it will be truncated when + written to the failure output table. The failure output table can be + set in the + action.deidentify.output.big_query_output.deidentified_failure_output_table + field, if no table is set, a table will be automatically created in + the same project and dataset as the original table. + + Compatible with: Inspect + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + transformation_config (google.cloud.dlp_v2.types.TransformationConfig): + User specified deidentify templates and + configs for structured, unstructured, and image + files. + transformation_details_storage_config (google.cloud.dlp_v2.types.TransformationDetailsStorageConfig): + Config for storing transformation details. This is separate + from the de-identified content, and contains metadata about + the successful transformations and/or failures that occurred + while de-identifying. This needs to be set in order for + users to access information about the status of each + transformation (see + [TransformationDetails][google.privacy.dlp.v2.TransformationDetails] + message for more information about what is noted). + cloud_storage_output (str): + Required. User settable Cloud Storage bucket + and folders to store de-identified files. This + field must be set for cloud storage + deidentification. The output Cloud Storage + bucket must be different from the input bucket. + De-identified files will overwrite files in the + output path. + + Form of: gs://bucket/folder/ or gs://bucket + + This field is a member of `oneof`_ ``output``. + file_types_to_transform (MutableSequence[google.cloud.dlp_v2.types.FileType]): + List of user-specified file type groups to transform. If + specified, only the files with these filetypes will be + transformed. If empty, all supported files will be + transformed. Supported types may be automatically added over + time. If a file type is set in this field that isn't + supported by the Deidentify action then the job will fail + and will not be successfully created/started. Currently the + only filetypes supported are: IMAGES, TEXT_FILES, CSV, TSV. + """ + + transformation_config: "TransformationConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="TransformationConfig", + ) + transformation_details_storage_config: "TransformationDetailsStorageConfig" = ( + proto.Field( + proto.MESSAGE, + number=3, + message="TransformationDetailsStorageConfig", + ) + ) + cloud_storage_output: str = proto.Field( + proto.STRING, + number=9, + oneof="output", + ) + file_types_to_transform: MutableSequence[ + storage.FileType + ] = proto.RepeatedField( + proto.ENUM, + number=8, + enum=storage.FileType, + ) + + class JobNotificationEmails(proto.Message): + r"""Sends an email when the job completes. The email goes to IAM project + owners and technical `Essential + Contacts `__. + + """ + + class PublishToStackdriver(proto.Message): + r"""Enable Stackdriver metric dlp.googleapis.com/finding_count. This + will publish a metric to stack driver on each infotype requested and + how many findings were found for it. CustomDetectors will be + bucketed as 'Custom' under the Stackdriver label 'info_type'. + + """ + + save_findings: SaveFindings = proto.Field( + proto.MESSAGE, + number=1, + oneof="action", + message=SaveFindings, + ) + pub_sub: PublishToPubSub = proto.Field( + proto.MESSAGE, + number=2, + oneof="action", + message=PublishToPubSub, + ) + publish_summary_to_cscc: PublishSummaryToCscc = proto.Field( + proto.MESSAGE, + number=3, + oneof="action", + message=PublishSummaryToCscc, + ) + publish_findings_to_cloud_data_catalog: PublishFindingsToCloudDataCatalog = ( + proto.Field( + proto.MESSAGE, + number=5, + oneof="action", + message=PublishFindingsToCloudDataCatalog, + ) + ) + deidentify: Deidentify = proto.Field( + proto.MESSAGE, + number=7, + oneof="action", + message=Deidentify, + ) + job_notification_emails: JobNotificationEmails = proto.Field( + proto.MESSAGE, + number=8, + oneof="action", + message=JobNotificationEmails, + ) + publish_to_stackdriver: PublishToStackdriver = proto.Field( + proto.MESSAGE, + number=9, + oneof="action", + message=PublishToStackdriver, + ) + + +class TransformationConfig(proto.Message): + r"""User specified templates and configs for how to deidentify + structured, unstructures, and image files. User must provide + either a unstructured deidentify template or at least one redact + image config. + + Attributes: + deidentify_template (str): + De-identify template. If this template is specified, it will + serve as the default de-identify template. This template + cannot contain ``record_transformations`` since it can be + used for unstructured content such as free-form text files. + If this template is not set, a default + ``ReplaceWithInfoTypeConfig`` will be used to de-identify + unstructured content. + structured_deidentify_template (str): + Structured de-identify template. If this template is + specified, it will serve as the de-identify template for + structured content such as delimited files and tables. If + this template is not set but the ``deidentify_template`` is + set, then ``deidentify_template`` will also apply to the + structured content. If neither template is set, a default + ``ReplaceWithInfoTypeConfig`` will be used to de-identify + structured content. + image_redact_template (str): + Image redact template. + If this template is specified, it will serve as + the de-identify template for images. If this + template is not set, all findings in the image + will be redacted with a black box. + """ + + deidentify_template: str = proto.Field( + proto.STRING, + number=1, + ) + structured_deidentify_template: str = proto.Field( + proto.STRING, + number=2, + ) + image_redact_template: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CreateInspectTemplateRequest(proto.Message): + r"""Request message for CreateInspectTemplate. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + Required. The InspectTemplate to create. + template_id (str): + The template id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_template: "InspectTemplate" = proto.Field( + proto.MESSAGE, + number=2, + message="InspectTemplate", + ) + template_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateInspectTemplateRequest(proto.Message): + r"""Request message for UpdateInspectTemplate. + + Attributes: + name (str): + Required. Resource name of organization and inspectTemplate + to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + New InspectTemplate value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_template: "InspectTemplate" = proto.Field( + proto.MESSAGE, + number=2, + message="InspectTemplate", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetInspectTemplateRequest(proto.Message): + r"""Request message for GetInspectTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListInspectTemplatesRequest(proto.Message): + r"""Request message for ListInspectTemplates. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListInspectTemplates``. + page_size (int): + Size of the page, can be limited by the + server. If zero server returns a page of max + size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the template was + created. + - ``update_time``: corresponds to the time the template was + last updated. + - ``name``: corresponds to the template's name. + - ``display_name``: corresponds to the template's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInspectTemplatesResponse(proto.Message): + r"""Response message for ListInspectTemplates. + + Attributes: + inspect_templates (MutableSequence[google.cloud.dlp_v2.types.InspectTemplate]): + List of inspectTemplates, up to page_size in + ListInspectTemplatesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListInspectTemplates request. + """ + + @property + def raw_page(self): + return self + + inspect_templates: MutableSequence["InspectTemplate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="InspectTemplate", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteInspectTemplateRequest(proto.Message): + r"""Request message for DeleteInspectTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateJobTriggerRequest(proto.Message): + r"""Request message for CreateJobTrigger. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + Required. The JobTrigger to create. + trigger_id (str): + The trigger id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + job_trigger: "JobTrigger" = proto.Field( + proto.MESSAGE, + number=2, + message="JobTrigger", + ) + trigger_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ActivateJobTriggerRequest(proto.Message): + r"""Request message for ActivateJobTrigger. + + Attributes: + name (str): + Required. Resource name of the trigger to activate, for + example ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateJobTriggerRequest(proto.Message): + r"""Request message for UpdateJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + New JobTrigger value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + job_trigger: "JobTrigger" = proto.Field( + proto.MESSAGE, + number=2, + message="JobTrigger", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetJobTriggerRequest(proto.Message): + r"""Request message for GetJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDlpJobRequest(proto.Message): + r"""Request message for CreateDlpJobRequest. Used to initiate + long running jobs such as calculating risk metrics or inspecting + Google Cloud Storage. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + An inspection job scans a storage repository + for InfoTypes. + + This field is a member of `oneof`_ ``job``. + risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + A risk analysis job calculates + re-identification risk metrics for a BigQuery + table. + + This field is a member of `oneof`_ ``job``. + job_id (str): + The job id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inspect_job: "InspectJobConfig" = proto.Field( + proto.MESSAGE, + number=2, + oneof="job", + message="InspectJobConfig", + ) + risk_job: "RiskAnalysisJobConfig" = proto.Field( + proto.MESSAGE, + number=3, + oneof="job", + message="RiskAnalysisJobConfig", + ) + job_id: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListJobTriggersRequest(proto.Message): + r"""Request message for ListJobTriggers. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ListJobTriggers. ``order_by`` field must not change for + subsequent calls. + page_size (int): + Size of the page, can be limited by a server. + order_by (str): + Comma separated list of triggeredJob fields to order by, + followed by ``asc`` or ``desc`` postfix. This list is + case-insensitive, default sorting order is ascending, + redundant space characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the JobTrigger + was created. + - ``update_time``: corresponds to the time the JobTrigger + was last updated. + - ``last_run_time``: corresponds to the last time the + JobTrigger ran. + - ``name``: corresponds to the JobTrigger's name. + - ``display_name``: corresponds to the JobTrigger's display + name. + - ``status``: corresponds to JobTrigger's status. + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values for inspect triggers: + + - ``status`` - HEALTHY|PAUSED|CANCELLED + - ``inspected_storage`` - + DATASTORE|CLOUD_STORAGE|BIGQUERY + - 'last_run_time\` - RFC 3339 formatted timestamp, + surrounded by quotation marks. Nanoseconds are + ignored. + - 'error_count' - Number of errors that have occurred + while running. + + - The operator must be ``=`` or ``!=`` for status and + inspected_storage. + + Examples: + + - inspected_storage = cloud_storage AND status = HEALTHY + - inspected_storage = cloud_storage OR inspected_storage = + bigquery + - inspected_storage = cloud_storage AND (state = PAUSED OR + state = HEALTHY) + - last_run_time > "2017-12-12T00:00:00+00:00" + + The length of this field should be no more than 500 + characters. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of jobs. Will use ``DlpJobType.INSPECT`` if not + set. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + type_: "DlpJobType" = proto.Field( + proto.ENUM, + number=6, + enum="DlpJobType", + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListJobTriggersResponse(proto.Message): + r"""Response message for ListJobTriggers. + + Attributes: + job_triggers (MutableSequence[google.cloud.dlp_v2.types.JobTrigger]): + List of triggeredJobs, up to page_size in + ListJobTriggersRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListJobTriggers request. + """ + + @property + def raw_page(self): + return self + + job_triggers: MutableSequence["JobTrigger"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="JobTrigger", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteJobTriggerRequest(proto.Message): + r"""Request message for DeleteJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class InspectJobConfig(proto.Message): + r"""Controls what and how to inspect for findings. + + Attributes: + storage_config (google.cloud.dlp_v2.types.StorageConfig): + The data to scan. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + How and what to scan for. + inspect_template_name (str): + If provided, will be used as the default for all values in + InspectConfig. ``inspect_config`` will be merged into the + values persisted as part of the template. + actions (MutableSequence[google.cloud.dlp_v2.types.Action]): + Actions to execute at the completion of the + job. + """ + + storage_config: storage.StorageConfig = proto.Field( + proto.MESSAGE, + number=1, + message=storage.StorageConfig, + ) + inspect_config: "InspectConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="InspectConfig", + ) + inspect_template_name: str = proto.Field( + proto.STRING, + number=3, + ) + actions: MutableSequence["Action"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Action", + ) + + +class DataProfileAction(proto.Message): + r"""A task to execute when a data profile has been generated. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + export_data (google.cloud.dlp_v2.types.DataProfileAction.Export): + Export data profiles into a provided + location. + + This field is a member of `oneof`_ ``action``. + pub_sub_notification (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification): + Publish a message into the Pub/Sub topic. + + This field is a member of `oneof`_ ``action``. + """ + + class EventType(proto.Enum): + r"""Types of event that can trigger an action. + + Values: + EVENT_TYPE_UNSPECIFIED (0): + Unused. + NEW_PROFILE (1): + New profile (not a re-profile). + CHANGED_PROFILE (2): + Changed one of the following profile metrics: + + - Table data risk score + - Table sensitivity score + - Table resource visibility + - Table encryption type + - Table predicted infoTypes + - Table other infoTypes + SCORE_INCREASED (3): + Table data risk score or sensitivity score + increased. + ERROR_CHANGED (4): + A user (non-internal) error occurred. + """ + EVENT_TYPE_UNSPECIFIED = 0 + NEW_PROFILE = 1 + CHANGED_PROFILE = 2 + SCORE_INCREASED = 3 + ERROR_CHANGED = 4 + + class Export(proto.Message): + r"""If set, the detailed data profiles will be persisted to the + location of your choice whenever updated. + + Attributes: + profile_table (google.cloud.dlp_v2.types.BigQueryTable): + Store all table and column profiles in an + existing table or a new table in an existing + dataset. Each re-generation will result in a new + row in BigQuery. + """ + + profile_table: storage.BigQueryTable = proto.Field( + proto.MESSAGE, + number=1, + message=storage.BigQueryTable, + ) + + class PubSubNotification(proto.Message): + r"""Send a Pub/Sub message into the given Pub/Sub topic to connect other + systems to data profile generation. The message payload data will be + the byte serialization of ``DataProfilePubSubMessage``. + + Attributes: + topic (str): + Cloud Pub/Sub topic to send notifications to. + Format is projects/{project}/topics/{topic}. + event (google.cloud.dlp_v2.types.DataProfileAction.EventType): + The type of event that triggers a Pub/Sub. At most one + ``PubSubNotification`` per EventType is permitted. + pubsub_condition (google.cloud.dlp_v2.types.DataProfilePubSubCondition): + Conditions (e.g., data risk or sensitivity + level) for triggering a Pub/Sub. + detail_of_message (google.cloud.dlp_v2.types.DataProfileAction.PubSubNotification.DetailLevel): + How much data to include in the Pub/Sub message. If the user + wishes to limit the size of the message, they can use + resource_name and fetch the profile fields they wish to. Per + table profile (not per column). + """ + + class DetailLevel(proto.Enum): + r"""The levels of detail that can be included in the Pub/Sub + message. + + Values: + DETAIL_LEVEL_UNSPECIFIED (0): + Unused. + TABLE_PROFILE (1): + The full table data profile. + RESOURCE_NAME (2): + The resource name of the table. + """ + DETAIL_LEVEL_UNSPECIFIED = 0 + TABLE_PROFILE = 1 + RESOURCE_NAME = 2 + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + event: "DataProfileAction.EventType" = proto.Field( + proto.ENUM, + number=2, + enum="DataProfileAction.EventType", + ) + pubsub_condition: "DataProfilePubSubCondition" = proto.Field( + proto.MESSAGE, + number=3, + message="DataProfilePubSubCondition", + ) + detail_of_message: "DataProfileAction.PubSubNotification.DetailLevel" = ( + proto.Field( + proto.ENUM, + number=4, + enum="DataProfileAction.PubSubNotification.DetailLevel", + ) + ) + + export_data: Export = proto.Field( + proto.MESSAGE, + number=1, + oneof="action", + message=Export, + ) + pub_sub_notification: PubSubNotification = proto.Field( + proto.MESSAGE, + number=2, + oneof="action", + message=PubSubNotification, + ) + + +class DataProfileJobConfig(proto.Message): + r"""Configuration for setting up a job to scan resources for profile + generation. Only one data profile configuration may exist per + organization, folder, or project. + + The generated data profiles are retained according to the [data + retention policy] + (https://cloud.google.com/dlp/docs/data-profiles#retention). + + Attributes: + location (google.cloud.dlp_v2.types.DataProfileLocation): + The data to scan. + project_id (str): + The project that will run the scan. The DLP + service account that exists within this project + must have access to all resources that are + profiled, and the Cloud DLP API must be enabled. + inspect_templates (MutableSequence[str]): + Detection logic for profile generation. + + Not all template features are used by profiles. + FindingLimits, include_quote and exclude_info_types have no + impact on data profiling. + + Multiple templates may be provided if there is data in + multiple regions. At most one template must be specified + per-region (including "global"). Each region is scanned + using the applicable template. If no region-specific + template is specified, but a "global" template is specified, + it will be copied to that region and used instead. If no + global or region-specific template is provided for a region + with data, that region's data will not be scanned. + + For more information, see + https://cloud.google.com/dlp/docs/data-profiles#data_residency. + data_profile_actions (MutableSequence[google.cloud.dlp_v2.types.DataProfileAction]): + Actions to execute at the completion of the + job. + """ + + location: "DataProfileLocation" = proto.Field( + proto.MESSAGE, + number=1, + message="DataProfileLocation", + ) + project_id: str = proto.Field( + proto.STRING, + number=5, + ) + inspect_templates: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + data_profile_actions: MutableSequence["DataProfileAction"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="DataProfileAction", + ) + + +class DataProfileLocation(proto.Message): + r"""The data that will be profiled. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + organization_id (int): + The ID of an organization to scan. + + This field is a member of `oneof`_ ``location``. + folder_id (int): + The ID of the Folder within an organization + to scan. + + This field is a member of `oneof`_ ``location``. + """ + + organization_id: int = proto.Field( + proto.INT64, + number=1, + oneof="location", + ) + folder_id: int = proto.Field( + proto.INT64, + number=2, + oneof="location", + ) + + +class DlpJob(proto.Message): + r"""Combines all of the information about a DLP job. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The server-assigned name. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of job. + state (google.cloud.dlp_v2.types.DlpJob.JobState): + State of a job. + risk_details (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails): + Results from analyzing risk of a data source. + + This field is a member of `oneof`_ ``details``. + inspect_details (google.cloud.dlp_v2.types.InspectDataSourceDetails): + Results from inspecting a data source. + + This field is a member of `oneof`_ ``details``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job finished. + job_trigger_name (str): + If created by a job trigger, the resource + name of the trigger that instantiated the job. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + A stream of errors encountered running the + job. + """ + + class JobState(proto.Enum): + r"""Possible states of a job. New items may be added. + + Values: + JOB_STATE_UNSPECIFIED (0): + Unused. + PENDING (1): + The job has not yet started. + RUNNING (2): + The job is currently running. Once a job has + finished it will transition to FAILED or DONE. + DONE (3): + The job is no longer running. + CANCELED (4): + The job was canceled before it could be + completed. + FAILED (5): + The job had an error and did not complete. + ACTIVE (6): + The job is currently accepting findings via + hybridInspect. A hybrid job in ACTIVE state may + continue to have findings added to it through + the calling of hybridInspect. After the job has + finished no more calls to hybridInspect may be + made. ACTIVE jobs can transition to DONE. + """ + JOB_STATE_UNSPECIFIED = 0 + PENDING = 1 + RUNNING = 2 + DONE = 3 + CANCELED = 4 + FAILED = 5 + ACTIVE = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: "DlpJobType" = proto.Field( + proto.ENUM, + number=2, + enum="DlpJobType", + ) + state: JobState = proto.Field( + proto.ENUM, + number=3, + enum=JobState, + ) + risk_details: "AnalyzeDataSourceRiskDetails" = proto.Field( + proto.MESSAGE, + number=4, + oneof="details", + message="AnalyzeDataSourceRiskDetails", + ) + inspect_details: "InspectDataSourceDetails" = proto.Field( + proto.MESSAGE, + number=5, + oneof="details", + message="InspectDataSourceDetails", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + job_trigger_name: str = proto.Field( + proto.STRING, + number=10, + ) + errors: MutableSequence["Error"] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="Error", + ) + + +class GetDlpJobRequest(proto.Message): + r"""The request message for [DlpJobs.GetDlpJob][]. + + Attributes: + name (str): + Required. The name of the DlpJob resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDlpJobsRequest(proto.Message): + r"""The request message for listing DLP jobs. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values for inspect jobs: + + - ``state`` - PENDING|RUNNING|CANCELED|FINISHED|FAILED + - ``inspected_storage`` - + DATASTORE|CLOUD_STORAGE|BIGQUERY + - ``trigger_name`` - The name of the trigger that + created the job. + - 'end_time\` - Corresponds to the time the job + finished. + - 'start_time\` - Corresponds to the time the job + finished. + + - Supported fields for risk analysis jobs: + + - ``state`` - RUNNING|CANCELED|FINISHED|FAILED + - 'end_time\` - Corresponds to the time the job + finished. + - 'start_time\` - Corresponds to the time the job + finished. + + - The operator must be ``=`` or ``!=``. + + Examples: + + - inspected_storage = cloud_storage AND state = done + - inspected_storage = cloud_storage OR inspected_storage = + bigquery + - inspected_storage = cloud_storage AND (state = done OR + state = canceled) + - end_time > "2017-12-12T00:00:00+00:00" + + The length of this field should be no more than 500 + characters. + page_size (int): + The standard list page size. + page_token (str): + The standard list page token. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of job. Defaults to ``DlpJobType.INSPECT`` + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc, end_time asc, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the job was + created. + - ``end_time``: corresponds to the time the job ended. + - ``name``: corresponds to the job's name. + - ``state``: corresponds to ``state`` + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + type_: "DlpJobType" = proto.Field( + proto.ENUM, + number=5, + enum="DlpJobType", + ) + order_by: str = proto.Field( + proto.STRING, + number=6, + ) + location_id: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListDlpJobsResponse(proto.Message): + r"""The response message for listing DLP jobs. + + Attributes: + jobs (MutableSequence[google.cloud.dlp_v2.types.DlpJob]): + A list of DlpJobs that matches the specified + filter in the request. + next_page_token (str): + The standard List next-page token. + """ + + @property + def raw_page(self): + return self + + jobs: MutableSequence["DlpJob"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DlpJob", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CancelDlpJobRequest(proto.Message): + r"""The request message for canceling a DLP job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be cancelled. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FinishDlpJobRequest(proto.Message): + r"""The request message for finishing a DLP hybrid job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be cancelled. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteDlpJobRequest(proto.Message): + r"""The request message for deleting a DLP job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDeidentifyTemplateRequest(proto.Message): + r"""Request message for CreateDeidentifyTemplate. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Required. The DeidentifyTemplate to create. + template_id (str): + The template id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_template: "DeidentifyTemplate" = proto.Field( + proto.MESSAGE, + number=2, + message="DeidentifyTemplate", + ) + template_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateDeidentifyTemplateRequest(proto.Message): + r"""Request message for UpdateDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + New DeidentifyTemplate value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + deidentify_template: "DeidentifyTemplate" = proto.Field( + proto.MESSAGE, + number=2, + message="DeidentifyTemplate", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetDeidentifyTemplateRequest(proto.Message): + r"""Request message for GetDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and deidentify + template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDeidentifyTemplatesRequest(proto.Message): + r"""Request message for ListDeidentifyTemplates. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListDeidentifyTemplates``. + page_size (int): + Size of the page, can be limited by the + server. If zero server returns a page of max + size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the template was + created. + - ``update_time``: corresponds to the time the template was + last updated. + - ``name``: corresponds to the template's name. + - ``display_name``: corresponds to the template's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDeidentifyTemplatesResponse(proto.Message): + r"""Response message for ListDeidentifyTemplates. + + Attributes: + deidentify_templates (MutableSequence[google.cloud.dlp_v2.types.DeidentifyTemplate]): + List of deidentify templates, up to page_size in + ListDeidentifyTemplatesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListDeidentifyTemplates request. + """ + + @property + def raw_page(self): + return self + + deidentify_templates: MutableSequence["DeidentifyTemplate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DeidentifyTemplate", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteDeidentifyTemplateRequest(proto.Message): + r"""Request message for DeleteDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and deidentify + template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LargeCustomDictionaryConfig(proto.Message): + r"""Configuration for a custom dictionary created from a data source of + any size up to the maximum size defined in the + `limits `__ page. The artifacts + of dictionary creation are stored in the specified Cloud Storage + location. Consider using ``CustomInfoType.Dictionary`` for smaller + dictionaries that satisfy the size requirements. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + output_path (google.cloud.dlp_v2.types.CloudStoragePath): + Location to store dictionary artifacts in + Cloud Storage. These files will only be + accessible by project owners and the DLP API. If + any of these artifacts are modified, the + dictionary is considered invalid and can no + longer be used. + cloud_storage_file_set (google.cloud.dlp_v2.types.CloudStorageFileSet): + Set of files containing newline-delimited + lists of dictionary phrases. + + This field is a member of `oneof`_ ``source``. + big_query_field (google.cloud.dlp_v2.types.BigQueryField): + Field in a BigQuery table where each cell + represents a dictionary phrase. + + This field is a member of `oneof`_ ``source``. + """ + + output_path: storage.CloudStoragePath = proto.Field( + proto.MESSAGE, + number=1, + message=storage.CloudStoragePath, + ) + cloud_storage_file_set: storage.CloudStorageFileSet = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message=storage.CloudStorageFileSet, + ) + big_query_field: storage.BigQueryField = proto.Field( + proto.MESSAGE, + number=3, + oneof="source", + message=storage.BigQueryField, + ) + + +class LargeCustomDictionaryStats(proto.Message): + r"""Summary statistics of a custom dictionary. + + Attributes: + approx_num_phrases (int): + Approximate number of distinct phrases in the + dictionary. + """ + + approx_num_phrases: int = proto.Field( + proto.INT64, + number=1, + ) + + +class StoredInfoTypeConfig(proto.Message): + r"""Configuration for stored infoTypes. All fields and subfield + are provided by the user. For more information, see + https://cloud.google.com/dlp/docs/creating-custom-infotypes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + display_name (str): + Display name of the StoredInfoType (max 256 + characters). + description (str): + Description of the StoredInfoType (max 256 + characters). + large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryConfig): + StoredInfoType where findings are defined by + a dictionary of phrases. + + This field is a member of `oneof`_ ``type``. + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + Store dictionary-based CustomInfoType. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Store regular expression-based + StoredInfoType. + + This field is a member of `oneof`_ ``type``. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + large_custom_dictionary: "LargeCustomDictionaryConfig" = proto.Field( + proto.MESSAGE, + number=3, + oneof="type", + message="LargeCustomDictionaryConfig", + ) + dictionary: storage.CustomInfoType.Dictionary = proto.Field( + proto.MESSAGE, + number=4, + oneof="type", + message=storage.CustomInfoType.Dictionary, + ) + regex: storage.CustomInfoType.Regex = proto.Field( + proto.MESSAGE, + number=5, + oneof="type", + message=storage.CustomInfoType.Regex, + ) + + +class StoredInfoTypeStats(proto.Message): + r"""Statistics for a StoredInfoType. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryStats): + StoredInfoType where findings are defined by + a dictionary of phrases. + + This field is a member of `oneof`_ ``type``. + """ + + large_custom_dictionary: "LargeCustomDictionaryStats" = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message="LargeCustomDictionaryStats", + ) + + +class StoredInfoTypeVersion(proto.Message): + r"""Version of a StoredInfoType, including the configuration used + to build it, create timestamp, and current state. + + Attributes: + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + StoredInfoType configuration. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Create timestamp of the version. Read-only, + determined by the system when the version is + created. + state (google.cloud.dlp_v2.types.StoredInfoTypeState): + Stored info type version state. Read-only, + updated by the system during dictionary + creation. + errors (MutableSequence[google.cloud.dlp_v2.types.Error]): + Errors that occurred when creating this storedInfoType + version, or anomalies detected in the storedInfoType data + that render it unusable. Only the five most recent errors + will be displayed, with the most recent error appearing + first. + + For example, some of the data for stored custom dictionaries + is put in the user's Cloud Storage bucket, and if this data + is modified or deleted by the user or another system, the + dictionary becomes invalid. + + If any errors occur, fix the problem indicated by the error + message and use the UpdateStoredInfoType API method to + create another version of the storedInfoType to continue + using it, reusing the same ``config`` if it was not the + source of the error. + stats (google.cloud.dlp_v2.types.StoredInfoTypeStats): + Statistics about this storedInfoType version. + """ + + config: "StoredInfoTypeConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="StoredInfoTypeConfig", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: "StoredInfoTypeState" = proto.Field( + proto.ENUM, + number=3, + enum="StoredInfoTypeState", + ) + errors: MutableSequence["Error"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="Error", + ) + stats: "StoredInfoTypeStats" = proto.Field( + proto.MESSAGE, + number=5, + message="StoredInfoTypeStats", + ) + + +class StoredInfoType(proto.Message): + r"""StoredInfoType resource message that contains information + about the current version and any pending updates. + + Attributes: + name (str): + Resource name. + current_version (google.cloud.dlp_v2.types.StoredInfoTypeVersion): + Current version of the stored info type. + pending_versions (MutableSequence[google.cloud.dlp_v2.types.StoredInfoTypeVersion]): + Pending versions of the stored info type. + Empty if no versions are pending. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + current_version: "StoredInfoTypeVersion" = proto.Field( + proto.MESSAGE, + number=2, + message="StoredInfoTypeVersion", + ) + pending_versions: MutableSequence["StoredInfoTypeVersion"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="StoredInfoTypeVersion", + ) + + +class CreateStoredInfoTypeRequest(proto.Message): + r"""Request message for CreateStoredInfoType. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Required. Configuration of the storedInfoType + to create. + stored_info_type_id (str): + The storedInfoType ID can contain uppercase and lowercase + letters, numbers, and hyphens; that is, it must match the + regular expression: ``[a-zA-Z\d-_]+``. The maximum length is + 100 characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + config: "StoredInfoTypeConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="StoredInfoTypeConfig", + ) + stored_info_type_id: str = proto.Field( + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateStoredInfoTypeRequest(proto.Message): + r"""Request message for UpdateStoredInfoType. + + Attributes: + name (str): + Required. Resource name of organization and storedInfoType + to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Updated configuration for the storedInfoType. + If not provided, a new version of the + storedInfoType will be created with the existing + configuration. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + config: "StoredInfoTypeConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="StoredInfoTypeConfig", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetStoredInfoTypeRequest(proto.Message): + r"""Request message for GetStoredInfoType. + + Attributes: + name (str): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListStoredInfoTypesRequest(proto.Message): + r"""Request message for ListStoredInfoTypes. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListStoredInfoTypes``. + page_size (int): + Size of the page, can be limited by the + server. If zero server returns a page of max + size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc, display_name, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to the time the most recent + version of the resource was created. + - ``state``: corresponds to the state of the resource. + - ``name``: corresponds to resource name. + - ``display_name``: corresponds to info type's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListStoredInfoTypesResponse(proto.Message): + r"""Response message for ListStoredInfoTypes. + + Attributes: + stored_info_types (MutableSequence[google.cloud.dlp_v2.types.StoredInfoType]): + List of storedInfoTypes, up to page_size in + ListStoredInfoTypesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListStoredInfoTypes request. + """ + + @property + def raw_page(self): + return self + + stored_info_types: MutableSequence["StoredInfoType"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="StoredInfoType", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteStoredInfoTypeRequest(proto.Message): + r"""Request message for DeleteStoredInfoType. + + Attributes: + name (str): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class HybridInspectJobTriggerRequest(proto.Message): + r"""Request to search for potentially sensitive info in a custom + location. + + Attributes: + name (str): + Required. Resource name of the trigger to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): + The item to inspect. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + hybrid_item: "HybridContentItem" = proto.Field( + proto.MESSAGE, + number=3, + message="HybridContentItem", + ) + + +class HybridInspectDlpJobRequest(proto.Message): + r"""Request to search for potentially sensitive info in a custom + location. + + Attributes: + name (str): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): + The item to inspect. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + hybrid_item: "HybridContentItem" = proto.Field( + proto.MESSAGE, + number=3, + message="HybridContentItem", + ) + + +class HybridContentItem(proto.Message): + r"""An individual hybrid item to inspect. Will be stored + temporarily during processing. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The item to inspect. + finding_details (google.cloud.dlp_v2.types.HybridFindingDetails): + Supplementary information that will be added + to each finding. + """ + + item: "ContentItem" = proto.Field( + proto.MESSAGE, + number=1, + message="ContentItem", + ) + finding_details: "HybridFindingDetails" = proto.Field( + proto.MESSAGE, + number=2, + message="HybridFindingDetails", + ) + + +class HybridFindingDetails(proto.Message): + r"""Populate to associate additional data with each finding. + + Attributes: + container_details (google.cloud.dlp_v2.types.Container): + Details about the container where the content + being inspected is from. + file_offset (int): + Offset in bytes of the line, from the + beginning of the file, where the finding is + located. Populate if the item being scanned is + only part of a bigger item, such as a shard of a + file and you want to track the absolute position + of the finding. + row_offset (int): + Offset of the row for tables. Populate if the + row(s) being scanned are part of a bigger + dataset and you want to keep track of their + absolute position. + table_options (google.cloud.dlp_v2.types.TableOptions): + If the container is a table, additional information to make + findings meaningful such as the columns that are primary + keys. If not known ahead of time, can also be set within + each inspect hybrid call and the two will be merged. Note + that identifying_fields will only be stored to BigQuery, and + only if the BigQuery action has been included. + labels (MutableMapping[str, str]): + Labels to represent user provided metadata about the data + being inspected. If configured by the job, some key values + may be required. The labels associated with ``Finding``'s + produced by hybrid inspection. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + """ + + container_details: "Container" = proto.Field( + proto.MESSAGE, + number=1, + message="Container", + ) + file_offset: int = proto.Field( + proto.INT64, + number=2, + ) + row_offset: int = proto.Field( + proto.INT64, + number=3, + ) + table_options: storage.TableOptions = proto.Field( + proto.MESSAGE, + number=4, + message=storage.TableOptions, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + + +class HybridInspectResponse(proto.Message): + r"""Quota exceeded errors will be thrown once quota has been met.""" + + +class DataRiskLevel(proto.Message): + r"""Score is a summary of all elements in the data profile. + A higher number means more risk. + + Attributes: + score (google.cloud.dlp_v2.types.DataRiskLevel.DataRiskLevelScore): + The score applied to the resource. + """ + + class DataRiskLevelScore(proto.Enum): + r"""Various score levels for resources. + + Values: + RISK_SCORE_UNSPECIFIED (0): + Unused. + RISK_LOW (10): + Low risk - Lower indication of sensitive data + that appears to have additional access + restrictions in place or no indication of + sensitive data found. + RISK_MODERATE (20): + Medium risk - Sensitive data may be present + but additional access or fine grain access + restrictions appear to be present. Consider + limiting access even further or transform data + to mask. + RISK_HIGH (30): + High risk – SPII may be present. Access + controls may include public ACLs. Exfiltration + of data may lead to user data loss. + Re-identification of users may be possible. + Consider limiting usage and or removing SPII. + """ + RISK_SCORE_UNSPECIFIED = 0 + RISK_LOW = 10 + RISK_MODERATE = 20 + RISK_HIGH = 30 + + score: DataRiskLevelScore = proto.Field( + proto.ENUM, + number=1, + enum=DataRiskLevelScore, + ) + + +class DataProfileConfigSnapshot(proto.Message): + r"""Snapshot of the configurations used to generate the profile. + + Attributes: + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + A copy of the inspection config used to generate this + profile. This is a copy of the inspect_template specified in + ``DataProfileJobConfig``. + data_profile_job (google.cloud.dlp_v2.types.DataProfileJobConfig): + A copy of the configuration used to generate + this profile. + """ + + inspect_config: "InspectConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="InspectConfig", + ) + data_profile_job: "DataProfileJobConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="DataProfileJobConfig", + ) + + +class TableDataProfile(proto.Message): + r"""The profile for a scanned table. + + Attributes: + name (str): + The name of the profile. + project_data_profile (str): + The resource name to the project data profile + for this table. + dataset_project_id (str): + The GCP project ID that owns the BigQuery + dataset. + dataset_location (str): + The BigQuery location where the dataset's + data is stored. See + https://cloud.google.com/bigquery/docs/locations + for supported locations. + dataset_id (str): + The BigQuery dataset ID. + table_id (str): + The BigQuery table ID. + full_resource (str): + The resource name of the table. + https://cloud.google.com/apis/design/resource_names#full_resource_name + profile_status (google.cloud.dlp_v2.types.ProfileStatus): + Success or error status from the most recent + profile generation attempt. May be empty if the + profile is still being generated. + state (google.cloud.dlp_v2.types.TableDataProfile.State): + State of a profile. + sensitivity_score (google.cloud.dlp_v2.types.SensitivityScore): + The sensitivity score of this table. + data_risk_level (google.cloud.dlp_v2.types.DataRiskLevel): + The data risk level of this table. + predicted_info_types (MutableSequence[google.cloud.dlp_v2.types.InfoTypeSummary]): + The infoTypes predicted from this table's + data. + other_info_types (MutableSequence[google.cloud.dlp_v2.types.OtherInfoTypeSummary]): + Other infoTypes found in this table's data. + config_snapshot (google.cloud.dlp_v2.types.DataProfileConfigSnapshot): + The snapshot of the configurations used to + generate the profile. + last_modified_time (google.protobuf.timestamp_pb2.Timestamp): + The time when this table was last modified + expiration_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time when this table expires. + scanned_column_count (int): + The number of columns profiled in the table. + failed_column_count (int): + The number of columns skipped in the table + because of an error. + table_size_bytes (int): + The size of the table when the profile was + generated. + row_count (int): + Number of rows in the table when the profile + was generated. This will not be populated for + BigLake tables. + encryption_status (google.cloud.dlp_v2.types.EncryptionStatus): + How the table is encrypted. + resource_visibility (google.cloud.dlp_v2.types.ResourceVisibility): + How broadly a resource has been shared. + profile_last_generated (google.protobuf.timestamp_pb2.Timestamp): + The last time the profile was generated. + resource_labels (MutableMapping[str, str]): + The labels applied to the resource at the + time the profile was generated. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the table was created. + """ + + class State(proto.Enum): + r"""Possible states of a profile. New items may be added. + + Values: + STATE_UNSPECIFIED (0): + Unused. + RUNNING (1): + The profile is currently running. Once a + profile has finished it will transition to DONE. + DONE (2): + The profile is no longer generating. If + profile_status.status.code is 0, the profile succeeded, + otherwise, it failed. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + DONE = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + project_data_profile: str = proto.Field( + proto.STRING, + number=2, + ) + dataset_project_id: str = proto.Field( + proto.STRING, + number=24, + ) + dataset_location: str = proto.Field( + proto.STRING, + number=29, + ) + dataset_id: str = proto.Field( + proto.STRING, + number=25, + ) + table_id: str = proto.Field( + proto.STRING, + number=26, + ) + full_resource: str = proto.Field( + proto.STRING, + number=3, + ) + profile_status: "ProfileStatus" = proto.Field( + proto.MESSAGE, + number=21, + message="ProfileStatus", + ) + state: State = proto.Field( + proto.ENUM, + number=22, + enum=State, + ) + sensitivity_score: storage.SensitivityScore = proto.Field( + proto.MESSAGE, + number=5, + message=storage.SensitivityScore, + ) + data_risk_level: "DataRiskLevel" = proto.Field( + proto.MESSAGE, + number=6, + message="DataRiskLevel", + ) + predicted_info_types: MutableSequence["InfoTypeSummary"] = proto.RepeatedField( + proto.MESSAGE, + number=27, + message="InfoTypeSummary", + ) + other_info_types: MutableSequence["OtherInfoTypeSummary"] = proto.RepeatedField( + proto.MESSAGE, + number=28, + message="OtherInfoTypeSummary", + ) + config_snapshot: "DataProfileConfigSnapshot" = proto.Field( + proto.MESSAGE, + number=7, + message="DataProfileConfigSnapshot", + ) + last_modified_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + expiration_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + scanned_column_count: int = proto.Field( + proto.INT64, + number=10, + ) + failed_column_count: int = proto.Field( + proto.INT64, + number=11, + ) + table_size_bytes: int = proto.Field( + proto.INT64, + number=12, + ) + row_count: int = proto.Field( + proto.INT64, + number=13, + ) + encryption_status: "EncryptionStatus" = proto.Field( + proto.ENUM, + number=14, + enum="EncryptionStatus", + ) + resource_visibility: "ResourceVisibility" = proto.Field( + proto.ENUM, + number=15, + enum="ResourceVisibility", + ) + profile_last_generated: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=16, + message=timestamp_pb2.Timestamp, + ) + resource_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=17, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=23, + message=timestamp_pb2.Timestamp, + ) + + +class ProfileStatus(proto.Message): + r""" + + Attributes: + status (google.rpc.status_pb2.Status): + Profiling status code and optional message + timestamp (google.protobuf.timestamp_pb2.Timestamp): + Time when the profile generation status was + updated + """ + + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class InfoTypeSummary(proto.Message): + r"""The infoType details for this column. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The infoType. + estimated_prevalence (int): + Not populated for predicted infotypes. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + estimated_prevalence: int = proto.Field( + proto.INT32, + number=2, + ) + + +class OtherInfoTypeSummary(proto.Message): + r"""Infotype details for other infoTypes found within a column. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The other infoType. + estimated_prevalence (int): + Approximate percentage of non-null rows that + contained data detected by this infotype. + """ + + info_type: storage.InfoType = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + estimated_prevalence: int = proto.Field( + proto.INT32, + number=2, + ) + + +class DataProfilePubSubCondition(proto.Message): + r"""A condition for determining whether a Pub/Sub should be + triggered. + + Attributes: + expressions (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions): + An expression. + """ + + class ProfileScoreBucket(proto.Enum): + r"""Various score levels for resources. + + Values: + PROFILE_SCORE_BUCKET_UNSPECIFIED (0): + Unused. + HIGH (1): + High risk/sensitivity detected. + MEDIUM_OR_HIGH (2): + Medium or high risk/sensitivity detected. + """ + PROFILE_SCORE_BUCKET_UNSPECIFIED = 0 + HIGH = 1 + MEDIUM_OR_HIGH = 2 + + class PubSubCondition(proto.Message): + r"""A condition consisting of a value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + minimum_risk_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): + The minimum data risk score that triggers the + condition. + + This field is a member of `oneof`_ ``value``. + minimum_sensitivity_score (google.cloud.dlp_v2.types.DataProfilePubSubCondition.ProfileScoreBucket): + The minimum sensitivity level that triggers + the condition. + + This field is a member of `oneof`_ ``value``. + """ + + minimum_risk_score: "DataProfilePubSubCondition.ProfileScoreBucket" = ( + proto.Field( + proto.ENUM, + number=1, + oneof="value", + enum="DataProfilePubSubCondition.ProfileScoreBucket", + ) + ) + minimum_sensitivity_score: "DataProfilePubSubCondition.ProfileScoreBucket" = ( + proto.Field( + proto.ENUM, + number=2, + oneof="value", + enum="DataProfilePubSubCondition.ProfileScoreBucket", + ) + ) + + class PubSubExpressions(proto.Message): + r"""An expression, consisting of an operator and conditions. + + Attributes: + logical_operator (google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator): + The operator to apply to the collection of + conditions. + conditions (MutableSequence[google.cloud.dlp_v2.types.DataProfilePubSubCondition.PubSubCondition]): + Conditions to apply to the expression. + """ + + class PubSubLogicalOperator(proto.Enum): + r"""Logical operators for conditional checks. + + Values: + LOGICAL_OPERATOR_UNSPECIFIED (0): + Unused. + OR (1): + Conditional OR. + AND (2): + Conditional AND. + """ + LOGICAL_OPERATOR_UNSPECIFIED = 0 + OR = 1 + AND = 2 + + logical_operator: "DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator" = proto.Field( + proto.ENUM, + number=1, + enum="DataProfilePubSubCondition.PubSubExpressions.PubSubLogicalOperator", + ) + conditions: MutableSequence[ + "DataProfilePubSubCondition.PubSubCondition" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="DataProfilePubSubCondition.PubSubCondition", + ) + + expressions: PubSubExpressions = proto.Field( + proto.MESSAGE, + number=1, + message=PubSubExpressions, + ) + + +class DataProfilePubSubMessage(proto.Message): + r"""Pub/Sub topic message for a + DataProfileAction.PubSubNotification event. To receive a message + of protocol buffer schema type, convert the message data to an + object of this proto class. + + Attributes: + profile (google.cloud.dlp_v2.types.TableDataProfile): + If ``DetailLevel`` is ``TABLE_PROFILE`` this will be fully + populated. Otherwise, if ``DetailLevel`` is + ``RESOURCE_NAME``, then only ``name`` and ``full_resource`` + will be populated. + event (google.cloud.dlp_v2.types.DataProfileAction.EventType): + The event that caused the Pub/Sub message to + be sent. + """ + + profile: "TableDataProfile" = proto.Field( + proto.MESSAGE, + number=1, + message="TableDataProfile", + ) + event: "DataProfileAction.EventType" = proto.Field( + proto.ENUM, + number=2, + enum="DataProfileAction.EventType", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/types/storage.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/storage.py new file mode 100644 index 000000000000..c844d07551d0 --- /dev/null +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/types/storage.py @@ -0,0 +1,1489 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.privacy.dlp.v2", + manifest={ + "Likelihood", + "FileType", + "InfoType", + "SensitivityScore", + "StoredType", + "CustomInfoType", + "FieldId", + "PartitionId", + "KindExpression", + "DatastoreOptions", + "CloudStorageRegexFileSet", + "CloudStorageOptions", + "CloudStorageFileSet", + "CloudStoragePath", + "BigQueryOptions", + "StorageConfig", + "HybridOptions", + "BigQueryKey", + "DatastoreKey", + "Key", + "RecordKey", + "BigQueryTable", + "BigQueryField", + "EntityId", + "TableOptions", + }, +) + + +class Likelihood(proto.Enum): + r"""Categorization of results based on how likely they are to + represent a match, based on the number of elements they contain + which imply a match. + + Values: + LIKELIHOOD_UNSPECIFIED (0): + Default value; same as POSSIBLE. + VERY_UNLIKELY (1): + Few matching elements. + UNLIKELY (2): + No description available. + POSSIBLE (3): + Some matching elements. + LIKELY (4): + No description available. + VERY_LIKELY (5): + Many matching elements. + """ + LIKELIHOOD_UNSPECIFIED = 0 + VERY_UNLIKELY = 1 + UNLIKELY = 2 + POSSIBLE = 3 + LIKELY = 4 + VERY_LIKELY = 5 + + +class FileType(proto.Enum): + r"""Definitions of file type groups to scan. New types will be + added to this list. + + Values: + FILE_TYPE_UNSPECIFIED (0): + Includes all files. + BINARY_FILE (1): + Includes all file extensions not covered by another entry. + Binary scanning attempts to convert the content of the file + to utf_8 to scan the file. If you wish to avoid this fall + back, specify one or more of the other FileType's in your + storage scan. + TEXT_FILE (2): + Included file extensions: + + asc,asp, aspx, brf, c, cc,cfm, cgi, cpp, csv, + cxx, c++, cs, css, dart, dat, dot, eml,, + epbub, ged, go, h, hh, hpp, hxx, h++, hs, html, + htm, mkd, markdown, m, ml, mli, perl, pl, + plist, pm, php, phtml, pht, properties, py, + pyw, rb, rbw, rs, rss, rc, scala, sh, sql, + swift, tex, shtml, shtm, xhtml, lhs, ics, ini, + java, js, json, kix, kml, ocaml, md, txt, + text, tsv, vb, vcard, vcs, wml, xcodeproj, xml, + xsl, xsd, yml, yaml. + IMAGE (3): + Included file extensions: bmp, gif, jpg, jpeg, jpe, png. + bytes_limit_per_file has no effect on image files. Image + inspection is restricted to 'global', 'us', 'asia', and + 'europe'. + WORD (5): + Word files >30 MB will be scanned as binary + files. Included file extensions: + + docx, dotx, docm, dotm + PDF (6): + PDF files >30 MB will be scanned as binary + files. Included file extensions: + + pdf + AVRO (7): + Included file extensions: + + avro + CSV (8): + Included file extensions: + + csv + TSV (9): + Included file extensions: + + tsv + POWERPOINT (11): + Powerpoint files >30 MB will be scanned as + binary files. Included file extensions: + + pptx, pptm, potx, potm, pot + EXCEL (12): + Excel files >30 MB will be scanned as binary + files. Included file extensions: + + xlsx, xlsm, xltx, xltm + """ + FILE_TYPE_UNSPECIFIED = 0 + BINARY_FILE = 1 + TEXT_FILE = 2 + IMAGE = 3 + WORD = 5 + PDF = 6 + AVRO = 7 + CSV = 8 + TSV = 9 + POWERPOINT = 11 + EXCEL = 12 + + +class InfoType(proto.Message): + r"""Type of information detected by the API. + + Attributes: + name (str): + Name of the information type. Either a name of your choosing + when creating a CustomInfoType, or one of the names listed + at https://cloud.google.com/dlp/docs/infotypes-reference + when specifying a built-in type. When sending Cloud DLP + results to Data Catalog, infoType names should conform to + the pattern ``[A-Za-z0-9$_-]{1,64}``. + version (str): + Optional version name for this InfoType. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SensitivityScore(proto.Message): + r"""Score is a summary of all elements in the data profile. + A higher number means more sensitive. + + Attributes: + score (google.cloud.dlp_v2.types.SensitivityScore.SensitivityScoreLevel): + The score applied to the resource. + """ + + class SensitivityScoreLevel(proto.Enum): + r"""Various score levels for resources. + + Values: + SENSITIVITY_SCORE_UNSPECIFIED (0): + Unused. + SENSITIVITY_LOW (10): + No sensitive information detected. Limited + access. + SENSITIVITY_MODERATE (20): + Medium risk - PII, potentially sensitive + data, or fields with free-text data that are at + higher risk of having intermittent sensitive + data. Consider limiting access. + SENSITIVITY_HIGH (30): + High risk – SPII may be present. Exfiltration + of data may lead to user data loss. + Re-identification of users may be possible. + Consider limiting usage and or removing SPII. + """ + SENSITIVITY_SCORE_UNSPECIFIED = 0 + SENSITIVITY_LOW = 10 + SENSITIVITY_MODERATE = 20 + SENSITIVITY_HIGH = 30 + + score: SensitivityScoreLevel = proto.Field( + proto.ENUM, + number=1, + enum=SensitivityScoreLevel, + ) + + +class StoredType(proto.Message): + r"""A reference to a StoredInfoType to use with scanning. + + Attributes: + name (str): + Resource name of the requested ``StoredInfoType``, for + example + ``organizations/433245324/storedInfoTypes/432452342`` or + ``projects/project-id/storedInfoTypes/432452342``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp indicating when the version of the + ``StoredInfoType`` used for inspection was created. + Output-only field, populated by the system. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class CustomInfoType(proto.Message): + r"""Custom information type provided by the user. Used to find + domain-specific sensitive information configurable to the data + in question. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + CustomInfoType can either be a new infoType, or an extension + of built-in infoType, when the name matches one of existing + infoTypes and that infoType is specified in + ``InspectContent.info_types`` field. Specifying the latter + adds findings to the one detected by the system. If built-in + info type is not specified in ``InspectContent.info_types`` + list then the name is treated as a custom info type. + likelihood (google.cloud.dlp_v2.types.Likelihood): + Likelihood to return for this CustomInfoType. This base + value can be altered by a detection rule if the finding + meets the criteria specified by the rule. Defaults to + ``VERY_LIKELY`` if not specified. + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + A list of phrases to detect as a + CustomInfoType. + + This field is a member of `oneof`_ ``type``. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression based CustomInfoType. + + This field is a member of `oneof`_ ``type``. + surrogate_type (google.cloud.dlp_v2.types.CustomInfoType.SurrogateType): + Message for detecting output from + deidentification transformations that support + reversing. + + This field is a member of `oneof`_ ``type``. + stored_type (google.cloud.dlp_v2.types.StoredType): + Load an existing ``StoredInfoType`` resource for use in + ``InspectDataSource``. Not currently supported in + ``InspectContent``. + + This field is a member of `oneof`_ ``type``. + detection_rules (MutableSequence[google.cloud.dlp_v2.types.CustomInfoType.DetectionRule]): + Set of detection rules to apply to all findings of this + CustomInfoType. Rules are applied in order that they are + specified. Not supported for the ``surrogate_type`` + CustomInfoType. + exclusion_type (google.cloud.dlp_v2.types.CustomInfoType.ExclusionType): + If set to EXCLUSION_TYPE_EXCLUDE this infoType will not + cause a finding to be returned. It still can be used for + rules matching. + """ + + class ExclusionType(proto.Enum): + r""" + + Values: + EXCLUSION_TYPE_UNSPECIFIED (0): + A finding of this custom info type will not + be excluded from results. + EXCLUSION_TYPE_EXCLUDE (1): + A finding of this custom info type will be + excluded from final results, but can still + affect rule execution. + """ + EXCLUSION_TYPE_UNSPECIFIED = 0 + EXCLUSION_TYPE_EXCLUDE = 1 + + class Dictionary(proto.Message): + r"""Custom information type based on a dictionary of words or phrases. + This can be used to match sensitive information specific to the + data, such as a list of employee IDs or job titles. + + Dictionary words are case-insensitive and all characters other than + letters and digits in the unicode `Basic Multilingual + Plane `__ + will be replaced with whitespace when scanning for matches, so the + dictionary phrase "Sam Johnson" will match all three phrases "sam + johnson", "Sam, Johnson", and "Sam (Johnson)". Additionally, the + characters surrounding any match must be of a different type than + the adjacent characters within the word, so letters must be next to + non-letters and digits next to non-digits. For example, the + dictionary word "jen" will match the first three letters of the text + "jen123" but will return no matches for "jennifer". + + Dictionary words containing a large number of characters that are + not letters or digits may result in unexpected findings because such + characters are treated as whitespace. The + `limits `__ page contains + details about the size limits of dictionaries. For dictionaries that + do not fit within these constraints, consider using + ``LargeCustomDictionaryConfig`` in the ``StoredInfoType`` API. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): + List of words or phrases to search for. + + This field is a member of `oneof`_ ``source``. + cloud_storage_path (google.cloud.dlp_v2.types.CloudStoragePath): + Newline-delimited file of words in Cloud + Storage. Only a single file is accepted. + + This field is a member of `oneof`_ ``source``. + """ + + class WordList(proto.Message): + r"""Message defining a list of words or phrases to search for in + the data. + + Attributes: + words (MutableSequence[str]): + Words or phrases defining the dictionary. The dictionary + must contain at least one phrase and every phrase must + contain at least 2 characters that are letters or digits. + [required] + """ + + words: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + word_list: "CustomInfoType.Dictionary.WordList" = proto.Field( + proto.MESSAGE, + number=1, + oneof="source", + message="CustomInfoType.Dictionary.WordList", + ) + cloud_storage_path: "CloudStoragePath" = proto.Field( + proto.MESSAGE, + number=3, + oneof="source", + message="CloudStoragePath", + ) + + class Regex(proto.Message): + r"""Message defining a custom regular expression. + + Attributes: + pattern (str): + Pattern defining the regular expression. Its + syntax + (https://github.com/google/re2/wiki/Syntax) can + be found under the google/re2 repository on + GitHub. + group_indexes (MutableSequence[int]): + The index of the submatch to extract as + findings. When not specified, the entire match + is returned. No more than 3 may be included. + """ + + pattern: str = proto.Field( + proto.STRING, + number=1, + ) + group_indexes: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=2, + ) + + class SurrogateType(proto.Message): + r"""Message for detecting output from deidentification transformations + such as + ```CryptoReplaceFfxFpeConfig`` `__. + These types of transformations are those that perform + pseudonymization, thereby producing a "surrogate" as output. This + should be used in conjunction with a field on the transformation + such as ``surrogate_info_type``. This CustomInfoType does not + support the use of ``detection_rules``. + + """ + + class DetectionRule(proto.Message): + r"""Deprecated; use ``InspectionRuleSet`` instead. Rule for modifying a + ``CustomInfoType`` to alter behavior under certain circumstances, + depending on the specific details of the rule. Not supported for the + ``surrogate_type`` custom infoType. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): + Hotword-based detection rule. + + This field is a member of `oneof`_ ``type``. + """ + + class Proximity(proto.Message): + r"""Message for specifying a window around a finding to apply a + detection rule. + + Attributes: + window_before (int): + Number of characters before the finding to consider. For + tabular data, if you want to modify the likelihood of an + entire column of findngs, set this to 1. For more + information, see [Hotword example: Set the match likelihood + of a table column] + (https://cloud.google.com/dlp/docs/creating-custom-infotypes-likelihood#match-column-values). + window_after (int): + Number of characters after the finding to + consider. + """ + + window_before: int = proto.Field( + proto.INT32, + number=1, + ) + window_after: int = proto.Field( + proto.INT32, + number=2, + ) + + class LikelihoodAdjustment(proto.Message): + r"""Message for specifying an adjustment to the likelihood of a + finding as part of a detection rule. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fixed_likelihood (google.cloud.dlp_v2.types.Likelihood): + Set the likelihood of a finding to a fixed + value. + + This field is a member of `oneof`_ ``adjustment``. + relative_likelihood (int): + Increase or decrease the likelihood by the specified number + of levels. For example, if a finding would be ``POSSIBLE`` + without the detection rule and ``relative_likelihood`` is 1, + then it is upgraded to ``LIKELY``, while a value of -1 would + downgrade it to ``UNLIKELY``. Likelihood may never drop + below ``VERY_UNLIKELY`` or exceed ``VERY_LIKELY``, so + applying an adjustment of 1 followed by an adjustment of -1 + when base likelihood is ``VERY_LIKELY`` will result in a + final likelihood of ``LIKELY``. + + This field is a member of `oneof`_ ``adjustment``. + """ + + fixed_likelihood: "Likelihood" = proto.Field( + proto.ENUM, + number=1, + oneof="adjustment", + enum="Likelihood", + ) + relative_likelihood: int = proto.Field( + proto.INT32, + number=2, + oneof="adjustment", + ) + + class HotwordRule(proto.Message): + r"""The rule that adjusts the likelihood of findings within a + certain proximity of hotwords. + + Attributes: + hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression pattern defining what + qualifies as a hotword. + proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): + Range of characters within which the entire hotword must + reside. The total length of the window cannot exceed 1000 + characters. The finding itself will be included in the + window, so that hotwords can be used to match substrings of + the finding itself. Suppose you want Cloud DLP to promote + the likelihood of the phone number regex "(\d{3}) + \\d{3}-\d{4}" if the area code is known to be the area code + of a company's office. In this case, use the hotword regex + "(xxx)", where "xxx" is the area code in question. + + For tabular data, if you want to modify the likelihood of an + entire column of findngs, see [Hotword example: Set the + match likelihood of a table column] + (https://cloud.google.com/dlp/docs/creating-custom-infotypes-likelihood#match-column-values). + likelihood_adjustment (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.LikelihoodAdjustment): + Likelihood adjustment to apply to all + matching findings. + """ + + hotword_regex: "CustomInfoType.Regex" = proto.Field( + proto.MESSAGE, + number=1, + message="CustomInfoType.Regex", + ) + proximity: "CustomInfoType.DetectionRule.Proximity" = proto.Field( + proto.MESSAGE, + number=2, + message="CustomInfoType.DetectionRule.Proximity", + ) + likelihood_adjustment: "CustomInfoType.DetectionRule.LikelihoodAdjustment" = proto.Field( + proto.MESSAGE, + number=3, + message="CustomInfoType.DetectionRule.LikelihoodAdjustment", + ) + + hotword_rule: "CustomInfoType.DetectionRule.HotwordRule" = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message="CustomInfoType.DetectionRule.HotwordRule", + ) + + info_type: "InfoType" = proto.Field( + proto.MESSAGE, + number=1, + message="InfoType", + ) + likelihood: "Likelihood" = proto.Field( + proto.ENUM, + number=6, + enum="Likelihood", + ) + dictionary: Dictionary = proto.Field( + proto.MESSAGE, + number=2, + oneof="type", + message=Dictionary, + ) + regex: Regex = proto.Field( + proto.MESSAGE, + number=3, + oneof="type", + message=Regex, + ) + surrogate_type: SurrogateType = proto.Field( + proto.MESSAGE, + number=4, + oneof="type", + message=SurrogateType, + ) + stored_type: "StoredType" = proto.Field( + proto.MESSAGE, + number=5, + oneof="type", + message="StoredType", + ) + detection_rules: MutableSequence[DetectionRule] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=DetectionRule, + ) + exclusion_type: ExclusionType = proto.Field( + proto.ENUM, + number=8, + enum=ExclusionType, + ) + + +class FieldId(proto.Message): + r"""General identifier of a data field in a storage service. + + Attributes: + name (str): + Name describing the field. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PartitionId(proto.Message): + r"""Datastore partition ID. + A partition ID identifies a grouping of entities. The grouping + is always by project and namespace, however the namespace ID may + be empty. + + A partition ID contains several dimensions: + + project ID and namespace ID. + + Attributes: + project_id (str): + The ID of the project to which the entities + belong. + namespace_id (str): + If not empty, the ID of the namespace to + which the entities belong. + """ + + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + namespace_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class KindExpression(proto.Message): + r"""A representation of a Datastore kind. + + Attributes: + name (str): + The name of the kind. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DatastoreOptions(proto.Message): + r"""Options defining a data set within Google Cloud Datastore. + + Attributes: + partition_id (google.cloud.dlp_v2.types.PartitionId): + A partition ID identifies a grouping of + entities. The grouping is always by project and + namespace, however the namespace ID may be + empty. + kind (google.cloud.dlp_v2.types.KindExpression): + The kind to process. + """ + + partition_id: "PartitionId" = proto.Field( + proto.MESSAGE, + number=1, + message="PartitionId", + ) + kind: "KindExpression" = proto.Field( + proto.MESSAGE, + number=2, + message="KindExpression", + ) + + +class CloudStorageRegexFileSet(proto.Message): + r"""Message representing a set of files in a Cloud Storage bucket. + Regular expressions are used to allow fine-grained control over + which files in the bucket to include. + + Included files are those that match at least one item in + ``include_regex`` and do not match any items in ``exclude_regex``. + Note that a file that matches items from both lists will *not* be + included. For a match to occur, the entire file path (i.e., + everything in the url after the bucket name) must match the regular + expression. + + For example, given the input + ``{bucket_name: "mybucket", include_regex: ["directory1/.*"], exclude_regex: ["directory1/excluded.*"]}``: + + - ``gs://mybucket/directory1/myfile`` will be included + - ``gs://mybucket/directory1/directory2/myfile`` will be included + (``.*`` matches across ``/``) + - ``gs://mybucket/directory0/directory1/myfile`` will *not* be + included (the full path doesn't match any items in + ``include_regex``) + - ``gs://mybucket/directory1/excludedfile`` will *not* be included + (the path matches an item in ``exclude_regex``) + + If ``include_regex`` is left empty, it will match all files by + default (this is equivalent to setting ``include_regex: [".*"]``). + + Some other common use cases: + + - ``{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}`` will + include all files in ``mybucket`` except for .pdf files + - ``{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}`` + will include all files directly under + ``gs://mybucket/directory/``, without matching across ``/`` + + Attributes: + bucket_name (str): + The name of a Cloud Storage bucket. Required. + include_regex (MutableSequence[str]): + A list of regular expressions matching file paths to + include. All files in the bucket that match at least one of + these regular expressions will be included in the set of + files, except for those that also match an item in + ``exclude_regex``. Leaving this field empty will match all + files by default (this is equivalent to including ``.*`` in + the list). + + Regular expressions use RE2 + `syntax `__; a + guide can be found under the google/re2 repository on + GitHub. + exclude_regex (MutableSequence[str]): + A list of regular expressions matching file paths to + exclude. All files in the bucket that match at least one of + these regular expressions will be excluded from the scan. + + Regular expressions use RE2 + `syntax `__; a + guide can be found under the google/re2 repository on + GitHub. + """ + + bucket_name: str = proto.Field( + proto.STRING, + number=1, + ) + include_regex: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + exclude_regex: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CloudStorageOptions(proto.Message): + r"""Options defining a file or a set of files within a Cloud + Storage bucket. + + Attributes: + file_set (google.cloud.dlp_v2.types.CloudStorageOptions.FileSet): + The set of one or more files to scan. + bytes_limit_per_file (int): + Max number of bytes to scan from a file. If a scanned file's + size is bigger than this value then the rest of the bytes + are omitted. Only one of bytes_limit_per_file and + bytes_limit_per_file_percent can be specified. Cannot be set + if de-identification is requested. + bytes_limit_per_file_percent (int): + Max percentage of bytes to scan from a file. The rest are + omitted. The number of bytes scanned is rounded down. Must + be between 0 and 100, inclusively. Both 0 and 100 means no + limit. Defaults to 0. Only one of bytes_limit_per_file and + bytes_limit_per_file_percent can be specified. Cannot be set + if de-identification is requested. + file_types (MutableSequence[google.cloud.dlp_v2.types.FileType]): + List of file type groups to include in the scan. If empty, + all files are scanned and available data format processors + are applied. In addition, the binary content of the selected + files is always scanned as well. Images are scanned only as + binary if the specified region does not support image + inspection and no file_types were specified. Image + inspection is restricted to 'global', 'us', 'asia', and + 'europe'. + sample_method (google.cloud.dlp_v2.types.CloudStorageOptions.SampleMethod): + + files_limit_percent (int): + Limits the number of files to scan to this + percentage of the input FileSet. Number of files + scanned is rounded down. Must be between 0 and + 100, inclusively. Both 0 and 100 means no limit. + Defaults to 0. + """ + + class SampleMethod(proto.Enum): + r"""How to sample bytes if not all bytes are scanned. Meaningful only + when used in conjunction with bytes_limit_per_file. If not + specified, scanning would start from the top. + + Values: + SAMPLE_METHOD_UNSPECIFIED (0): + No description available. + TOP (1): + Scan from the top (default). + RANDOM_START (2): + For each file larger than bytes_limit_per_file, randomly + pick the offset to start scanning. The scanned bytes are + contiguous. + """ + SAMPLE_METHOD_UNSPECIFIED = 0 + TOP = 1 + RANDOM_START = 2 + + class FileSet(proto.Message): + r"""Set of files to scan. + + Attributes: + url (str): + The Cloud Storage url of the file(s) to scan, in the format + ``gs:///``. Trailing wildcard in the path is + allowed. + + If the url ends in a trailing slash, the bucket or directory + represented by the url will be scanned non-recursively + (content in sub-directories will not be scanned). This means + that ``gs://mybucket/`` is equivalent to + ``gs://mybucket/*``, and ``gs://mybucket/directory/`` is + equivalent to ``gs://mybucket/directory/*``. + + Exactly one of ``url`` or ``regex_file_set`` must be set. + regex_file_set (google.cloud.dlp_v2.types.CloudStorageRegexFileSet): + The regex-filtered set of files to scan. Exactly one of + ``url`` or ``regex_file_set`` must be set. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + regex_file_set: "CloudStorageRegexFileSet" = proto.Field( + proto.MESSAGE, + number=2, + message="CloudStorageRegexFileSet", + ) + + file_set: FileSet = proto.Field( + proto.MESSAGE, + number=1, + message=FileSet, + ) + bytes_limit_per_file: int = proto.Field( + proto.INT64, + number=4, + ) + bytes_limit_per_file_percent: int = proto.Field( + proto.INT32, + number=8, + ) + file_types: MutableSequence["FileType"] = proto.RepeatedField( + proto.ENUM, + number=5, + enum="FileType", + ) + sample_method: SampleMethod = proto.Field( + proto.ENUM, + number=6, + enum=SampleMethod, + ) + files_limit_percent: int = proto.Field( + proto.INT32, + number=7, + ) + + +class CloudStorageFileSet(proto.Message): + r"""Message representing a set of files in Cloud Storage. + + Attributes: + url (str): + The url, in the format ``gs:///``. Trailing + wildcard in the path is allowed. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CloudStoragePath(proto.Message): + r"""Message representing a single file or path in Cloud Storage. + + Attributes: + path (str): + A url representing a file or path (no wildcards) in Cloud + Storage. Example: gs://[BUCKET_NAME]/dictionary.txt + """ + + path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BigQueryOptions(proto.Message): + r"""Options defining BigQuery table and row identifiers. + + Attributes: + table_reference (google.cloud.dlp_v2.types.BigQueryTable): + Complete BigQuery table reference. + identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Table fields that may uniquely identify a row within the + table. When ``actions.saveFindings.outputConfig.table`` is + specified, the values of columns specified here are + available in the output table under + ``location.content_locations.record_location.record_key.id_values``. + Nested fields such as ``person.birthdate.year`` are allowed. + rows_limit (int): + Max number of rows to scan. If the table has more rows than + this value, the rest of the rows are omitted. If not set, or + if set to 0, all rows will be scanned. Only one of + rows_limit and rows_limit_percent can be specified. Cannot + be used in conjunction with TimespanConfig. + rows_limit_percent (int): + Max percentage of rows to scan. The rest are omitted. The + number of rows scanned is rounded down. Must be between 0 + and 100, inclusively. Both 0 and 100 means no limit. + Defaults to 0. Only one of rows_limit and rows_limit_percent + can be specified. Cannot be used in conjunction with + TimespanConfig. + sample_method (google.cloud.dlp_v2.types.BigQueryOptions.SampleMethod): + + excluded_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + References to fields excluded from scanning. + This allows you to skip inspection of entire + columns which you know have no findings. + included_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + Limit scanning only to these fields. + """ + + class SampleMethod(proto.Enum): + r"""How to sample rows if not all rows are scanned. Meaningful only when + used in conjunction with either rows_limit or rows_limit_percent. If + not specified, rows are scanned in the order BigQuery reads them. + + Values: + SAMPLE_METHOD_UNSPECIFIED (0): + No description available. + TOP (1): + Scan groups of rows in the order BigQuery + provides (default). Multiple groups of rows may + be scanned in parallel, so results may not + appear in the same order the rows are read. + RANDOM_START (2): + Randomly pick groups of rows to scan. + """ + SAMPLE_METHOD_UNSPECIFIED = 0 + TOP = 1 + RANDOM_START = 2 + + table_reference: "BigQueryTable" = proto.Field( + proto.MESSAGE, + number=1, + message="BigQueryTable", + ) + identifying_fields: MutableSequence["FieldId"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="FieldId", + ) + rows_limit: int = proto.Field( + proto.INT64, + number=3, + ) + rows_limit_percent: int = proto.Field( + proto.INT32, + number=6, + ) + sample_method: SampleMethod = proto.Field( + proto.ENUM, + number=4, + enum=SampleMethod, + ) + excluded_fields: MutableSequence["FieldId"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="FieldId", + ) + included_fields: MutableSequence["FieldId"] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="FieldId", + ) + + +class StorageConfig(proto.Message): + r"""Shared message indicating Cloud storage type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + datastore_options (google.cloud.dlp_v2.types.DatastoreOptions): + Google Cloud Datastore options. + + This field is a member of `oneof`_ ``type``. + cloud_storage_options (google.cloud.dlp_v2.types.CloudStorageOptions): + Cloud Storage options. + + This field is a member of `oneof`_ ``type``. + big_query_options (google.cloud.dlp_v2.types.BigQueryOptions): + BigQuery options. + + This field is a member of `oneof`_ ``type``. + hybrid_options (google.cloud.dlp_v2.types.HybridOptions): + Hybrid inspection options. + + This field is a member of `oneof`_ ``type``. + timespan_config (google.cloud.dlp_v2.types.StorageConfig.TimespanConfig): + + """ + + class TimespanConfig(proto.Message): + r"""Configuration of the timespan of the items to include in + scanning. Currently only supported when inspecting Cloud Storage + and BigQuery. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Exclude files, tables, or rows older than + this value. If not set, no lower time limit is + applied. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Exclude files, tables, or rows newer than + this value. If not set, no upper time limit is + applied. + timestamp_field (google.cloud.dlp_v2.types.FieldId): + Specification of the field containing the timestamp of + scanned items. Used for data sources like Datastore and + BigQuery. + + For BigQuery + + If this value is not specified and the table was modified + between the given start and end times, the entire table will + be scanned. If this value is specified, then rows are + filtered based on the given start and end times. Rows with a + ``NULL`` value in the provided BigQuery column are skipped. + Valid data types of the provided BigQuery column are: + ``INTEGER``, ``DATE``, ``TIMESTAMP``, and ``DATETIME``. + + If your BigQuery table is `partitioned at ingestion + time `__, + you can use any of the following pseudo-columns as your + timestamp field. When used with Cloud DLP, these + pseudo-column names are case sensitive. + + .. raw:: html + +
    +
  • _PARTITIONTIME
  • +
  • _PARTITIONDATE
  • +
  • _PARTITION_LOAD_TIME
  • +
+ + For Datastore + + If this value is specified, then entities are filtered based + on the given start and end times. If an entity does not + contain the provided timestamp property or contains empty or + invalid values, then it is included. Valid data types of the + provided timestamp property are: ``TIMESTAMP``. + + See the `known + issue `__ + related to this operation. + enable_auto_population_of_timespan_config (bool): + When the job is started by a JobTrigger we will + automatically figure out a valid start_time to avoid + scanning files that have not been modified since the last + time the JobTrigger executed. This will be based on the time + of the execution of the last run of the JobTrigger or the + timespan end_time used in the last run of the JobTrigger. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + timestamp_field: "FieldId" = proto.Field( + proto.MESSAGE, + number=3, + message="FieldId", + ) + enable_auto_population_of_timespan_config: bool = proto.Field( + proto.BOOL, + number=4, + ) + + datastore_options: "DatastoreOptions" = proto.Field( + proto.MESSAGE, + number=2, + oneof="type", + message="DatastoreOptions", + ) + cloud_storage_options: "CloudStorageOptions" = proto.Field( + proto.MESSAGE, + number=3, + oneof="type", + message="CloudStorageOptions", + ) + big_query_options: "BigQueryOptions" = proto.Field( + proto.MESSAGE, + number=4, + oneof="type", + message="BigQueryOptions", + ) + hybrid_options: "HybridOptions" = proto.Field( + proto.MESSAGE, + number=9, + oneof="type", + message="HybridOptions", + ) + timespan_config: TimespanConfig = proto.Field( + proto.MESSAGE, + number=6, + message=TimespanConfig, + ) + + +class HybridOptions(proto.Message): + r"""Configuration to control jobs where the content being + inspected is outside of Google Cloud Platform. + + Attributes: + description (str): + A short description of where the data is + coming from. Will be stored once in the job. 256 + max length. + required_finding_label_keys (MutableSequence[str]): + These are labels that each inspection request must include + within their 'finding_labels' map. Request may contain + others, but any missing one of these will be rejected. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + No more than 10 keys can be required. + labels (MutableMapping[str, str]): + To organize findings, these labels will be added to each + finding. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + table_options (google.cloud.dlp_v2.types.TableOptions): + If the container is a table, additional + information to make findings meaningful such as + the columns that are primary keys. + """ + + description: str = proto.Field( + proto.STRING, + number=1, + ) + required_finding_label_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + table_options: "TableOptions" = proto.Field( + proto.MESSAGE, + number=4, + message="TableOptions", + ) + + +class BigQueryKey(proto.Message): + r"""Row key for identifying a record in BigQuery table. + + Attributes: + table_reference (google.cloud.dlp_v2.types.BigQueryTable): + Complete BigQuery table reference. + row_number (int): + Row number inferred at the time the table was scanned. This + value is nondeterministic, cannot be queried, and may be + null for inspection jobs. To locate findings within a table, + specify + ``inspect_job.storage_config.big_query_options.identifying_fields`` + in ``CreateDlpJobRequest``. + """ + + table_reference: "BigQueryTable" = proto.Field( + proto.MESSAGE, + number=1, + message="BigQueryTable", + ) + row_number: int = proto.Field( + proto.INT64, + number=2, + ) + + +class DatastoreKey(proto.Message): + r"""Record key for a finding in Cloud Datastore. + + Attributes: + entity_key (google.cloud.dlp_v2.types.Key): + Datastore entity key. + """ + + entity_key: "Key" = proto.Field( + proto.MESSAGE, + number=1, + message="Key", + ) + + +class Key(proto.Message): + r"""A unique identifier for a Datastore entity. + If a key's partition ID or any of its path kinds or names are + reserved/read-only, the key is reserved/read-only. + A reserved/read-only key is forbidden in certain documented + contexts. + + Attributes: + partition_id (google.cloud.dlp_v2.types.PartitionId): + Entities are partitioned into subsets, + currently identified by a project ID and + namespace ID. Queries are scoped to a single + partition. + path (MutableSequence[google.cloud.dlp_v2.types.Key.PathElement]): + The entity path. An entity path consists of one or more + elements composed of a kind and a string or numerical + identifier, which identify entities. The first element + identifies a *root entity*, the second element identifies a + *child* of the root entity, the third element identifies a + child of the second entity, and so forth. The entities + identified by all prefixes of the path are called the + element's *ancestors*. + + A path can never be empty, and a path can have at most 100 + elements. + """ + + class PathElement(proto.Message): + r"""A (kind, ID/name) pair used to construct a key path. + + If either name or ID is set, the element is complete. If neither + is set, the element is incomplete. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kind (str): + The kind of the entity. A kind matching regex ``__.*__`` is + reserved/read-only. A kind must not contain more than 1500 + bytes when UTF-8 encoded. Cannot be ``""``. + id (int): + The auto-allocated ID of the entity. + Never equal to zero. Values less than zero are + discouraged and may not be supported in the + future. + + This field is a member of `oneof`_ ``id_type``. + name (str): + The name of the entity. A name matching regex ``__.*__`` is + reserved/read-only. A name must not be more than 1500 bytes + when UTF-8 encoded. Cannot be ``""``. + + This field is a member of `oneof`_ ``id_type``. + """ + + kind: str = proto.Field( + proto.STRING, + number=1, + ) + id: int = proto.Field( + proto.INT64, + number=2, + oneof="id_type", + ) + name: str = proto.Field( + proto.STRING, + number=3, + oneof="id_type", + ) + + partition_id: "PartitionId" = proto.Field( + proto.MESSAGE, + number=1, + message="PartitionId", + ) + path: MutableSequence[PathElement] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=PathElement, + ) + + +class RecordKey(proto.Message): + r"""Message for a unique key indicating a record that contains a + finding. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + datastore_key (google.cloud.dlp_v2.types.DatastoreKey): + + This field is a member of `oneof`_ ``type``. + big_query_key (google.cloud.dlp_v2.types.BigQueryKey): + + This field is a member of `oneof`_ ``type``. + id_values (MutableSequence[str]): + Values of identifying columns in the given row. Order of + values matches the order of ``identifying_fields`` specified + in the scanning request. + """ + + datastore_key: "DatastoreKey" = proto.Field( + proto.MESSAGE, + number=2, + oneof="type", + message="DatastoreKey", + ) + big_query_key: "BigQueryKey" = proto.Field( + proto.MESSAGE, + number=3, + oneof="type", + message="BigQueryKey", + ) + id_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class BigQueryTable(proto.Message): + r"""Message defining the location of a BigQuery table. A table is + uniquely identified by its project_id, dataset_id, and table_name. + Within a query a table is often referenced with a string in the + format of: ``:.`` or + ``..``. + + Attributes: + project_id (str): + The Google Cloud Platform project ID of the + project containing the table. If omitted, + project ID is inferred from the API call. + dataset_id (str): + Dataset ID of the table. + table_id (str): + Name of the table. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + dataset_id: str = proto.Field( + proto.STRING, + number=2, + ) + table_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class BigQueryField(proto.Message): + r"""Message defining a field of a BigQuery table. + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Source table of the field. + field (google.cloud.dlp_v2.types.FieldId): + Designated field in the BigQuery table. + """ + + table: "BigQueryTable" = proto.Field( + proto.MESSAGE, + number=1, + message="BigQueryTable", + ) + field: "FieldId" = proto.Field( + proto.MESSAGE, + number=2, + message="FieldId", + ) + + +class EntityId(proto.Message): + r"""An entity in a dataset is a field or set of fields that correspond + to a single person. For example, in medical records the ``EntityId`` + might be a patient identifier, or for financial records it might be + an account identifier. This message is used when generalizations or + analysis must take into account that multiple rows correspond to the + same entity. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Composite key indicating which field contains + the entity identifier. + """ + + field: "FieldId" = proto.Field( + proto.MESSAGE, + number=1, + message="FieldId", + ) + + +class TableOptions(proto.Message): + r"""Instructions regarding the table content being inspected. + + Attributes: + identifying_fields (MutableSequence[google.cloud.dlp_v2.types.FieldId]): + The columns that are the primary keys for + table objects included in ContentItem. A copy of + this cell's value will stored alongside + alongside each finding so that the finding can + be traced to the specific row it came from. No + more than 3 may be provided. + """ + + identifying_fields: MutableSequence["FieldId"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="FieldId", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dlp/mypy.ini b/packages/google-cloud-dlp/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-dlp/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-dlp/noxfile.py b/packages/google-cloud-dlp/noxfile.py new file mode 100644 index 000000000000..be54712bfa8f --- /dev/null +++ b/packages/google-cloud-dlp/noxfile.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-dlp/renovate.json b/packages/google-cloud-dlp/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-dlp/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-dlp/samples/AUTHORING_GUIDE.md b/packages/google-cloud-dlp/samples/AUTHORING_GUIDE.md new file mode 100644 index 000000000000..55c97b32f4c1 --- /dev/null +++ b/packages/google-cloud-dlp/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/google-cloud-dlp/samples/CONTRIBUTING.md b/packages/google-cloud-dlp/samples/CONTRIBUTING.md new file mode 100644 index 000000000000..34c882b6f1a3 --- /dev/null +++ b/packages/google-cloud-dlp/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/packages/google-cloud-dlp/samples/snippets/README.rst b/packages/google-cloud-dlp/samples/snippets/README.rst new file mode 100644 index 000000000000..a996c69decbb --- /dev/null +++ b/packages/google-cloud-dlp/samples/snippets/README.rst @@ -0,0 +1,2 @@ +The DLP samples have moved to a new repository (https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/dlp/snippets) in PR: https://github.com/GoogleCloudPlatform/python-docs-samples/pull/9091 +Moving forward, all DLP samples will be added/ updated in the python-docs-samples repository. \ No newline at end of file diff --git a/packages/google-cloud-dlp/scripts/decrypt-secrets.sh b/packages/google-cloud-dlp/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-dlp/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-dlp/scripts/fixup_dlp_v2_keywords.py b/packages/google-cloud-dlp/scripts/fixup_dlp_v2_keywords.py new file mode 100644 index 000000000000..52bd6a5412c8 --- /dev/null +++ b/packages/google-cloud-dlp/scripts/fixup_dlp_v2_keywords.py @@ -0,0 +1,209 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class dlpCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'activate_job_trigger': ('name', ), + 'cancel_dlp_job': ('name', ), + 'create_deidentify_template': ('parent', 'deidentify_template', 'template_id', 'location_id', ), + 'create_dlp_job': ('parent', 'inspect_job', 'risk_job', 'job_id', 'location_id', ), + 'create_inspect_template': ('parent', 'inspect_template', 'template_id', 'location_id', ), + 'create_job_trigger': ('parent', 'job_trigger', 'trigger_id', 'location_id', ), + 'create_stored_info_type': ('parent', 'config', 'stored_info_type_id', 'location_id', ), + 'deidentify_content': ('parent', 'deidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'deidentify_template_name', 'location_id', ), + 'delete_deidentify_template': ('name', ), + 'delete_dlp_job': ('name', ), + 'delete_inspect_template': ('name', ), + 'delete_job_trigger': ('name', ), + 'delete_stored_info_type': ('name', ), + 'finish_dlp_job': ('name', ), + 'get_deidentify_template': ('name', ), + 'get_dlp_job': ('name', ), + 'get_inspect_template': ('name', ), + 'get_job_trigger': ('name', ), + 'get_stored_info_type': ('name', ), + 'hybrid_inspect_dlp_job': ('name', 'hybrid_item', ), + 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), + 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), + 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type_', 'order_by', 'location_id', ), + 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), + 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'type_', 'location_id', ), + 'list_stored_info_types': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'redact_image': ('parent', 'location_id', 'inspect_config', 'image_redaction_configs', 'include_findings', 'byte_item', ), + 'reidentify_content': ('parent', 'reidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'reidentify_template_name', 'location_id', ), + 'update_deidentify_template': ('name', 'deidentify_template', 'update_mask', ), + 'update_inspect_template': ('name', 'inspect_template', 'update_mask', ), + 'update_job_trigger': ('name', 'job_trigger', 'update_mask', ), + 'update_stored_info_type': ('name', 'config', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=dlpCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the dlp client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-dlp/scripts/readme-gen/readme_gen.py b/packages/google-cloud-dlp/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..1acc119835b5 --- /dev/null +++ b/packages/google-cloud-dlp/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-dlp/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-dlp/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-dlp/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-dlp/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-dlp/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-dlp/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-dlp/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-dlp/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-dlp/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-dlp/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-dlp/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-dlp/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-dlp/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-dlp/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-dlp/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-dlp/setup.cfg b/packages/google-cloud-dlp/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-dlp/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-dlp/setup.py b/packages/google-cloud-dlp/setup.py new file mode 100644 index 000000000000..4d0dd40a8710 --- /dev/null +++ b/packages/google-cloud-dlp/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-dlp" + + +description = "Google Cloud Dlp API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/dlp/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-dlp/testing/.gitignore b/packages/google-cloud-dlp/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-dlp/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-dlp/testing/constraints-3.10.txt b/packages/google-cloud-dlp/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-dlp/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-dlp/testing/constraints-3.11.txt b/packages/google-cloud-dlp/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-dlp/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-dlp/testing/constraints-3.12.txt b/packages/google-cloud-dlp/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-dlp/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-dlp/testing/constraints-3.7.txt b/packages/google-cloud-dlp/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-dlp/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-dlp/testing/constraints-3.8.txt b/packages/google-cloud-dlp/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-dlp/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-dlp/testing/constraints-3.9.txt b/packages/google-cloud-dlp/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-dlp/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-dlp/tests/__init__.py b/packages/google-cloud-dlp/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-dlp/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-dlp/tests/system/__init__.py b/packages/google-cloud-dlp/tests/system/__init__.py new file mode 100644 index 000000000000..6cfb48f4cdf8 --- /dev/null +++ b/packages/google-cloud-dlp/tests/system/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-dlp/tests/system/smoke_test.py b/packages/google-cloud-dlp/tests/system/smoke_test.py new file mode 100644 index 000000000000..7c1fe6726a5e --- /dev/null +++ b/packages/google-cloud-dlp/tests/system/smoke_test.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + +from google.cloud import dlp_v2 + + +@pytest.fixture(scope="session") +def project_id(): + return os.environ["PROJECT_ID"] + + +@pytest.mark.parametrize("transport", ["grpc", "rest"]) +def test_list_dlp_jobs(project_id: str, transport: str): + client = dlp_v2.DlpServiceClient(transport=transport) + + parent = client.common_location_path(project_id, location="us-central1") + client.list_dlp_jobs(parent=parent) + + # The purpose of this smoke test is to test the communication with the API server, + # rather than API-specific functionality. + # If the smoke test fails, we won't reach this line. + assert True diff --git a/packages/google-cloud-dlp/tests/unit/__init__.py b/packages/google-cloud-dlp/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-dlp/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-dlp/tests/unit/gapic/__init__.py b/packages/google-cloud-dlp/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-dlp/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/__init__.py b/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py b/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py new file mode 100644 index 000000000000..13b08513ed96 --- /dev/null +++ b/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -0,0 +1,19095 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.dlp_v2.services.dlp_service import ( + DlpServiceAsyncClient, + DlpServiceClient, + pagers, + transports, +) +from google.cloud.dlp_v2.types import dlp, storage + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DlpServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DlpServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + DlpServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DlpServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DlpServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert DlpServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DlpServiceClient, "grpc"), + (DlpServiceAsyncClient, "grpc_asyncio"), + (DlpServiceClient, "rest"), + ], +) +def test_dlp_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dlp.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dlp.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DlpServiceGrpcTransport, "grpc"), + (transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DlpServiceRestTransport, "rest"), + ], +) +def test_dlp_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DlpServiceClient, "grpc"), + (DlpServiceAsyncClient, "grpc_asyncio"), + (DlpServiceClient, "rest"), + ], +) +def test_dlp_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dlp.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dlp.googleapis.com" + ) + + +def test_dlp_service_client_get_transport_class(): + transport = DlpServiceClient.get_transport_class() + available_transports = [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceRestTransport, + ] + assert transport in available_transports + + transport = DlpServiceClient.get_transport_class("grpc") + assert transport == transports.DlpServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + ( + DlpServiceAsyncClient, + transports.DlpServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient) +) +@mock.patch.object( + DlpServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DlpServiceAsyncClient), +) +def test_dlp_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DlpServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DlpServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "true"), + ( + DlpServiceAsyncClient, + transports.DlpServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "false"), + ( + DlpServiceAsyncClient, + transports.DlpServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "true"), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient) +) +@mock.patch.object( + DlpServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DlpServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_dlp_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [DlpServiceClient, DlpServiceAsyncClient]) +@mock.patch.object( + DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient) +) +@mock.patch.object( + DlpServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DlpServiceAsyncClient), +) +def test_dlp_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + ( + DlpServiceAsyncClient, + transports.DlpServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest"), + ], +) +def test_dlp_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), + ( + DlpServiceAsyncClient, + transports.DlpServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (DlpServiceClient, transports.DlpServiceRestTransport, "rest", None), + ], +) +def test_dlp_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_dlp_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DlpServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", grpc_helpers), + ( + DlpServiceAsyncClient, + transports.DlpServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_dlp_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dlp.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="dlp.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.InspectContentRequest, + dict, + ], +) +def test_inspect_content(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.inspect_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectContentResponse() + response = client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +def test_inspect_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.inspect_content), "__call__") as call: + client.inspect_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + + +@pytest.mark.asyncio +async def test_inspect_content_async( + transport: str = "grpc_asyncio", request_type=dlp.InspectContentRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.inspect_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.InspectContentResponse() + ) + response = await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +@pytest.mark.asyncio +async def test_inspect_content_async_from_dict(): + await test_inspect_content_async(request_type=dict) + + +def test_inspect_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.InspectContentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.inspect_content), "__call__") as call: + call.return_value = dlp.InspectContentResponse() + client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_inspect_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.InspectContentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.inspect_content), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.InspectContentResponse() + ) + await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.RedactImageRequest, + dict, + ], +) +def test_redact_image(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.redact_image), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.RedactImageResponse( + redacted_image=b"redacted_image_blob", + extracted_text="extracted_text_value", + ) + response = client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b"redacted_image_blob" + assert response.extracted_text == "extracted_text_value" + + +def test_redact_image_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.redact_image), "__call__") as call: + client.redact_image() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + + +@pytest.mark.asyncio +async def test_redact_image_async( + transport: str = "grpc_asyncio", request_type=dlp.RedactImageRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.redact_image), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.RedactImageResponse( + redacted_image=b"redacted_image_blob", + extracted_text="extracted_text_value", + ) + ) + response = await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b"redacted_image_blob" + assert response.extracted_text == "extracted_text_value" + + +@pytest.mark.asyncio +async def test_redact_image_async_from_dict(): + await test_redact_image_async(request_type=dict) + + +def test_redact_image_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.RedactImageRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.redact_image), "__call__") as call: + call.return_value = dlp.RedactImageResponse() + client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_redact_image_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.RedactImageRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.redact_image), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.RedactImageResponse() + ) + await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeidentifyContentRequest, + dict, + ], +) +def test_deidentify_content(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyContentResponse() + response = client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +def test_deidentify_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), "__call__" + ) as call: + client.deidentify_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + + +@pytest.mark.asyncio +async def test_deidentify_content_async( + transport: str = "grpc_asyncio", request_type=dlp.DeidentifyContentRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyContentResponse() + ) + response = await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +@pytest.mark.asyncio +async def test_deidentify_content_async_from_dict(): + await test_deidentify_content_async(request_type=dict) + + +def test_deidentify_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeidentifyContentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), "__call__" + ) as call: + call.return_value = dlp.DeidentifyContentResponse() + client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_deidentify_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeidentifyContentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyContentResponse() + ) + await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ReidentifyContentRequest, + dict, + ], +) +def test_reidentify_content(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ReidentifyContentResponse() + response = client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +def test_reidentify_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), "__call__" + ) as call: + client.reidentify_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + + +@pytest.mark.asyncio +async def test_reidentify_content_async( + transport: str = "grpc_asyncio", request_type=dlp.ReidentifyContentRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ReidentifyContentResponse() + ) + response = await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +@pytest.mark.asyncio +async def test_reidentify_content_async_from_dict(): + await test_reidentify_content_async(request_type=dict) + + +def test_reidentify_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ReidentifyContentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), "__call__" + ) as call: + call.return_value = dlp.ReidentifyContentResponse() + client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_reidentify_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ReidentifyContentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ReidentifyContentResponse() + ) + await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListInfoTypesRequest, + dict, + ], +) +def test_list_info_types(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_info_types), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + response = client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +def test_list_info_types_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_info_types), "__call__") as call: + client.list_info_types() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + + +@pytest.mark.asyncio +async def test_list_info_types_async( + transport: str = "grpc_asyncio", request_type=dlp.ListInfoTypesRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_info_types), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListInfoTypesResponse() + ) + response = await client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +@pytest.mark.asyncio +async def test_list_info_types_async_from_dict(): + await test_list_info_types_async(request_type=dict) + + +def test_list_info_types_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_info_types), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_info_types( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_info_types_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_info_types( + dlp.ListInfoTypesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_info_types_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_info_types), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListInfoTypesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_info_types( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_info_types_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_info_types( + dlp.ListInfoTypesRequest(), + parent="parent_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateInspectTemplateRequest, + dict, + ], +) +def test_create_inspect_template(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + response = client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_create_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), "__call__" + ) as call: + client.create_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + + +@pytest.mark.asyncio +async def test_create_inspect_template_async( + transport: str = "grpc_asyncio", request_type=dlp.CreateInspectTemplateRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + response = await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_create_inspect_template_async_from_dict(): + await test_create_inspect_template_async(request_type=dict) + + +def test_create_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateInspectTemplateRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), "__call__" + ) as call: + call.return_value = dlp.InspectTemplate() + client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateInspectTemplateRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_inspect_template( + parent="parent_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name="name_value") + assert arg == mock_val + + +def test_create_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent="parent_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_inspect_template( + parent="parent_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent="parent_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.UpdateInspectTemplateRequest, + dict, + ], +) +def test_update_inspect_template(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + response = client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_update_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), "__call__" + ) as call: + client.update_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + + +@pytest.mark.asyncio +async def test_update_inspect_template_async( + transport: str = "grpc_asyncio", request_type=dlp.UpdateInspectTemplateRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + response = await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_update_inspect_template_async_from_dict(): + await test_update_inspect_template_async(request_type=dict) + + +def test_update_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateInspectTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), "__call__" + ) as call: + call.return_value = dlp.InspectTemplate() + client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateInspectTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_update_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_inspect_template( + name="name_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name="name_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_inspect_template( + name="name_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].inspect_template + mock_val = dlp.InspectTemplate(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name="name_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetInspectTemplateRequest, + dict, + ], +) +def test_get_inspect_template(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + response = client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_get_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), "__call__" + ) as call: + client.get_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + + +@pytest.mark.asyncio +async def test_get_inspect_template_async( + transport: str = "grpc_asyncio", request_type=dlp.GetInspectTemplateRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + response = await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_get_inspect_template_async_from_dict(): + await test_get_inspect_template_async(request_type=dict) + + +def test_get_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetInspectTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), "__call__" + ) as call: + call.return_value = dlp.InspectTemplate() + client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetInspectTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_inspect_template( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_inspect_template( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListInspectTemplatesRequest, + dict, + ], +) +def test_list_inspect_templates(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_inspect_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), "__call__" + ) as call: + client.list_inspect_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async( + transport: str = "grpc_asyncio", request_type=dlp.ListInspectTemplatesRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListInspectTemplatesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_from_dict(): + await test_list_inspect_templates_async(request_type=dict) + + +def test_list_inspect_templates_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListInspectTemplatesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), "__call__" + ) as call: + call.return_value = dlp.ListInspectTemplatesResponse() + client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_inspect_templates_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListInspectTemplatesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListInspectTemplatesResponse() + ) + await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_inspect_templates_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_inspect_templates( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_inspect_templates_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_inspect_templates_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListInspectTemplatesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_inspect_templates( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_inspect_templates_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent="parent_value", + ) + + +def test_list_inspect_templates_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token="abc", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token="def", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token="ghi", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_inspect_templates(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.InspectTemplate) for i in results) + + +def test_list_inspect_templates_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token="abc", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token="def", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token="ghi", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_inspect_templates(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token="abc", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token="def", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token="ghi", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_inspect_templates( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.InspectTemplate) for i in responses) + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token="abc", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token="def", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token="ghi", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_inspect_templates(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteInspectTemplateRequest, + dict, + ], +) +def test_delete_inspect_template(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), "__call__" + ) as call: + client.delete_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + + +@pytest.mark.asyncio +async def test_delete_inspect_template_async( + transport: str = "grpc_asyncio", request_type=dlp.DeleteInspectTemplateRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_inspect_template_async_from_dict(): + await test_delete_inspect_template_async(request_type=dict) + + +def test_delete_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteInspectTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), "__call__" + ) as call: + call.return_value = None + client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteInspectTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_inspect_template( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_inspect_template( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateDeidentifyTemplateRequest, + dict, + ], +) +def test_create_deidentify_template(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + response = client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_create_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), "__call__" + ) as call: + client.create_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + + +@pytest.mark.asyncio +async def test_create_deidentify_template_async( + transport: str = "grpc_asyncio", request_type=dlp.CreateDeidentifyTemplateRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + response = await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_create_deidentify_template_async_from_dict(): + await test_create_deidentify_template_async(request_type=dict) + + +def test_create_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDeidentifyTemplateRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), "__call__" + ) as call: + call.return_value = dlp.DeidentifyTemplate() + client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDeidentifyTemplateRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate() + ) + await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_deidentify_template( + parent="parent_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name="name_value") + assert arg == mock_val + + +def test_create_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent="parent_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_deidentify_template( + parent="parent_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent="parent_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.UpdateDeidentifyTemplateRequest, + dict, + ], +) +def test_update_deidentify_template(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + response = client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_update_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), "__call__" + ) as call: + client.update_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + + +@pytest.mark.asyncio +async def test_update_deidentify_template_async( + transport: str = "grpc_asyncio", request_type=dlp.UpdateDeidentifyTemplateRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + response = await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_update_deidentify_template_async_from_dict(): + await test_update_deidentify_template_async(request_type=dict) + + +def test_update_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDeidentifyTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), "__call__" + ) as call: + call.return_value = dlp.DeidentifyTemplate() + client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDeidentifyTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate() + ) + await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_update_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_deidentify_template( + name="name_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name="name_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_deidentify_template( + name="name_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].deidentify_template + mock_val = dlp.DeidentifyTemplate(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name="name_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetDeidentifyTemplateRequest, + dict, + ], +) +def test_get_deidentify_template(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + response = client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_get_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), "__call__" + ) as call: + client.get_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + + +@pytest.mark.asyncio +async def test_get_deidentify_template_async( + transport: str = "grpc_asyncio", request_type=dlp.GetDeidentifyTemplateRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + response = await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_get_deidentify_template_async_from_dict(): + await test_get_deidentify_template_async(request_type=dict) + + +def test_get_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDeidentifyTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), "__call__" + ) as call: + call.return_value = dlp.DeidentifyTemplate() + client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDeidentifyTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate() + ) + await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_deidentify_template( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_deidentify_template( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListDeidentifyTemplatesRequest, + dict, + ], +) +def test_list_deidentify_templates(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_deidentify_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), "__call__" + ) as call: + client.list_deidentify_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async( + transport: str = "grpc_asyncio", request_type=dlp.ListDeidentifyTemplatesRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListDeidentifyTemplatesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_from_dict(): + await test_list_deidentify_templates_async(request_type=dict) + + +def test_list_deidentify_templates_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDeidentifyTemplatesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), "__call__" + ) as call: + call.return_value = dlp.ListDeidentifyTemplatesResponse() + client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDeidentifyTemplatesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListDeidentifyTemplatesResponse() + ) + await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_deidentify_templates_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_deidentify_templates( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_deidentify_templates_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListDeidentifyTemplatesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_deidentify_templates( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent="parent_value", + ) + + +def test_list_deidentify_templates_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token="abc", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token="def", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token="ghi", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_deidentify_templates(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) for i in results) + + +def test_list_deidentify_templates_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token="abc", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token="def", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token="ghi", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_deidentify_templates(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token="abc", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token="def", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token="ghi", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_deidentify_templates( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) for i in responses) + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token="abc", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token="def", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token="ghi", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_deidentify_templates(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteDeidentifyTemplateRequest, + dict, + ], +) +def test_delete_deidentify_template(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), "__call__" + ) as call: + client.delete_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_async( + transport: str = "grpc_asyncio", request_type=dlp.DeleteDeidentifyTemplateRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_async_from_dict(): + await test_delete_deidentify_template_async(request_type=dict) + + +def test_delete_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDeidentifyTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), "__call__" + ) as call: + call.return_value = None + client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDeidentifyTemplateRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_deidentify_template( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_deidentify_template( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateJobTriggerRequest, + dict, + ], +) +def test_create_job_trigger(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + ) + response = client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_create_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), "__call__" + ) as call: + client.create_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + + +@pytest.mark.asyncio +async def test_create_job_trigger_async( + transport: str = "grpc_asyncio", request_type=dlp.CreateJobTriggerRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + ) + ) + response = await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_create_job_trigger_async_from_dict(): + await test_create_job_trigger_async(request_type=dict) + + +def test_create_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateJobTriggerRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), "__call__" + ) as call: + call.return_value = dlp.JobTrigger() + client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateJobTriggerRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job_trigger( + parent="parent_value", + job_trigger=dlp.JobTrigger(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name="name_value") + assert arg == mock_val + + +def test_create_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent="parent_value", + job_trigger=dlp.JobTrigger(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job_trigger( + parent="parent_value", + job_trigger=dlp.JobTrigger(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent="parent_value", + job_trigger=dlp.JobTrigger(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.UpdateJobTriggerRequest, + dict, + ], +) +def test_update_job_trigger(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + ) + response = client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_update_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), "__call__" + ) as call: + client.update_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + + +@pytest.mark.asyncio +async def test_update_job_trigger_async( + transport: str = "grpc_asyncio", request_type=dlp.UpdateJobTriggerRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + ) + ) + response = await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_update_job_trigger_async_from_dict(): + await test_update_job_trigger_async(request_type=dict) + + +def test_update_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateJobTriggerRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), "__call__" + ) as call: + call.return_value = dlp.JobTrigger() + client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateJobTriggerRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_update_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_job_trigger( + name="name_value", + job_trigger=dlp.JobTrigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name="name_value", + job_trigger=dlp.JobTrigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_job_trigger( + name="name_value", + job_trigger=dlp.JobTrigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].job_trigger + mock_val = dlp.JobTrigger(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name="name_value", + job_trigger=dlp.JobTrigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.HybridInspectJobTriggerRequest, + dict, + ], +) +def test_hybrid_inspect_job_trigger(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + response = client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), "__call__" + ) as call: + client.hybrid_inspect_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async( + transport: str = "grpc_asyncio", request_type=dlp.HybridInspectJobTriggerRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.HybridInspectResponse() + ) + response = await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async_from_dict(): + await test_hybrid_inspect_job_trigger_async(request_type=dict) + + +def test_hybrid_inspect_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectJobTriggerRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), "__call__" + ) as call: + call.return_value = dlp.HybridInspectResponse() + client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectJobTriggerRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.HybridInspectResponse() + ) + await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_hybrid_inspect_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.hybrid_inspect_job_trigger( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_hybrid_inspect_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.HybridInspectResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.hybrid_inspect_job_trigger( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetJobTriggerRequest, + dict, + ], +) +def test_get_job_trigger(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + ) + response = client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_get_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: + client.get_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + + +@pytest.mark.asyncio +async def test_get_job_trigger_async( + transport: str = "grpc_asyncio", request_type=dlp.GetJobTriggerRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + ) + ) + response = await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_get_job_trigger_async_from_dict(): + await test_get_job_trigger_async(request_type=dict) + + +def test_get_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetJobTriggerRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: + call.return_value = dlp.JobTrigger() + client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetJobTriggerRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job_trigger( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job_trigger( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListJobTriggersRequest, + dict, + ], +) +def test_list_job_triggers(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse( + next_page_token="next_page_token_value", + ) + response = client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_job_triggers_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), "__call__" + ) as call: + client.list_job_triggers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + + +@pytest.mark.asyncio +async def test_list_job_triggers_async( + transport: str = "grpc_asyncio", request_type=dlp.ListJobTriggersRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListJobTriggersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_job_triggers_async_from_dict(): + await test_list_job_triggers_async(request_type=dict) + + +def test_list_job_triggers_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListJobTriggersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), "__call__" + ) as call: + call.return_value = dlp.ListJobTriggersResponse() + client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_job_triggers_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListJobTriggersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListJobTriggersResponse() + ) + await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_job_triggers_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_job_triggers( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_job_triggers_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_job_triggers_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListJobTriggersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_job_triggers( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_job_triggers_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent="parent_value", + ) + + +def test_list_job_triggers_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token="abc", + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token="def", + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token="ghi", + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_job_triggers(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.JobTrigger) for i in results) + + +def test_list_job_triggers_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token="abc", + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token="def", + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token="ghi", + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + pages = list(client.list_job_triggers(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_job_triggers_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token="abc", + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token="def", + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token="ghi", + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_job_triggers( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.JobTrigger) for i in responses) + + +@pytest.mark.asyncio +async def test_list_job_triggers_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token="abc", + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token="def", + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token="ghi", + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_job_triggers(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteJobTriggerRequest, + dict, + ], +) +def test_delete_job_trigger(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), "__call__" + ) as call: + client.delete_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + + +@pytest.mark.asyncio +async def test_delete_job_trigger_async( + transport: str = "grpc_asyncio", request_type=dlp.DeleteJobTriggerRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_trigger_async_from_dict(): + await test_delete_job_trigger_async(request_type=dict) + + +def test_delete_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteJobTriggerRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), "__call__" + ) as call: + call.return_value = None + client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteJobTriggerRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job_trigger( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job_trigger( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ActivateJobTriggerRequest, + dict, + ], +) +def test_activate_job_trigger(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name="name_value", + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + ) + response = client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == "name_value" + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_activate_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), "__call__" + ) as call: + client.activate_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + + +@pytest.mark.asyncio +async def test_activate_job_trigger_async( + transport: str = "grpc_asyncio", request_type=dlp.ActivateJobTriggerRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DlpJob( + name="name_value", + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + ) + ) + response = await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == "name_value" + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == "job_trigger_name_value" + + +@pytest.mark.asyncio +async def test_activate_job_trigger_async_from_dict(): + await test_activate_job_trigger_async(request_type=dict) + + +def test_activate_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ActivateJobTriggerRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), "__call__" + ) as call: + call.return_value = dlp.DlpJob() + client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_activate_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ActivateJobTriggerRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateDlpJobRequest, + dict, + ], +) +def test_create_dlp_job(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name="name_value", + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + ) + response = client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == "name_value" + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_create_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: + client.create_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + + +@pytest.mark.asyncio +async def test_create_dlp_job_async( + transport: str = "grpc_asyncio", request_type=dlp.CreateDlpJobRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DlpJob( + name="name_value", + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + ) + ) + response = await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == "name_value" + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == "job_trigger_name_value" + + +@pytest.mark.asyncio +async def test_create_dlp_job_async_from_dict(): + await test_create_dlp_job_async(request_type=dict) + + +def test_create_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDlpJobRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: + call.return_value = dlp.DlpJob() + client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDlpJobRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_dlp_job( + parent="parent_value", + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + risk_job=dlp.RiskAnalysisJobConfig( + privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + assert args[0].risk_job == dlp.RiskAnalysisJobConfig( + privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ) + + +def test_create_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent="parent_value", + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + risk_job=dlp.RiskAnalysisJobConfig( + privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + +@pytest.mark.asyncio +async def test_create_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_dlp_job( + parent="parent_value", + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + risk_job=dlp.RiskAnalysisJobConfig( + privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + assert args[0].risk_job == dlp.RiskAnalysisJobConfig( + privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ) + + +@pytest.mark.asyncio +async def test_create_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent="parent_value", + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + risk_job=dlp.RiskAnalysisJobConfig( + privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListDlpJobsRequest, + dict, + ], +) +def test_list_dlp_jobs(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_dlp_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: + client.list_dlp_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async( + transport: str = "grpc_asyncio", request_type=dlp.ListDlpJobsRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListDlpJobsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_from_dict(): + await test_list_dlp_jobs_async(request_type=dict) + + +def test_list_dlp_jobs_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDlpJobsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: + call.return_value = dlp.ListDlpJobsResponse() + client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDlpJobsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListDlpJobsResponse() + ) + await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_dlp_jobs_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_dlp_jobs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_dlp_jobs_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListDlpJobsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_dlp_jobs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent="parent_value", + ) + + +def test_list_dlp_jobs_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token="abc", + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token="def", + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token="ghi", + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_dlp_jobs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DlpJob) for i in results) + + +def test_list_dlp_jobs_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token="abc", + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token="def", + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token="ghi", + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_dlp_jobs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token="abc", + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token="def", + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token="ghi", + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_dlp_jobs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DlpJob) for i in responses) + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token="abc", + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token="def", + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token="ghi", + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_dlp_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetDlpJobRequest, + dict, + ], +) +def test_get_dlp_job(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name="name_value", + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + ) + response = client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == "name_value" + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_get_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: + client.get_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + + +@pytest.mark.asyncio +async def test_get_dlp_job_async( + transport: str = "grpc_asyncio", request_type=dlp.GetDlpJobRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DlpJob( + name="name_value", + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + ) + ) + response = await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == "name_value" + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == "job_trigger_name_value" + + +@pytest.mark.asyncio +async def test_get_dlp_job_async_from_dict(): + await test_get_dlp_job_async(request_type=dict) + + +def test_get_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDlpJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: + call.return_value = dlp.DlpJob() + client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDlpJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_dlp_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dlp_job( + dlp.GetDlpJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_dlp_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_dlp_job( + dlp.GetDlpJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteDlpJobRequest, + dict, + ], +) +def test_delete_dlp_job(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: + client.delete_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + + +@pytest.mark.asyncio +async def test_delete_dlp_job_async( + transport: str = "grpc_asyncio", request_type=dlp.DeleteDlpJobRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_dlp_job_async_from_dict(): + await test_delete_dlp_job_async(request_type=dict) + + +def test_delete_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDlpJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: + call.return_value = None + client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDlpJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_dlp_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_dlp_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CancelDlpJobRequest, + dict, + ], +) +def test_cancel_dlp_job(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_dlp_job), "__call__") as call: + client.cancel_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_async( + transport: str = "grpc_asyncio", request_type=dlp.CancelDlpJobRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_async_from_dict(): + await test_cancel_dlp_job_async(request_type=dict) + + +def test_cancel_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CancelDlpJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_dlp_job), "__call__") as call: + call.return_value = None + client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CancelDlpJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_dlp_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateStoredInfoTypeRequest, + dict, + ], +) +def test_create_stored_info_type(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name="name_value", + ) + response = client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == "name_value" + + +def test_create_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), "__call__" + ) as call: + client.create_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + + +@pytest.mark.asyncio +async def test_create_stored_info_type_async( + transport: str = "grpc_asyncio", request_type=dlp.CreateStoredInfoTypeRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.StoredInfoType( + name="name_value", + ) + ) + response = await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_create_stored_info_type_async_from_dict(): + await test_create_stored_info_type_async(request_type=dict) + + +def test_create_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateStoredInfoTypeRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), "__call__" + ) as call: + call.return_value = dlp.StoredInfoType() + client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateStoredInfoTypeRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_stored_info_type( + parent="parent_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name="display_name_value") + assert arg == mock_val + + +def test_create_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent="parent_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_stored_info_type( + parent="parent_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name="display_name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent="parent_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.UpdateStoredInfoTypeRequest, + dict, + ], +) +def test_update_stored_info_type(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name="name_value", + ) + response = client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == "name_value" + + +def test_update_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), "__call__" + ) as call: + client.update_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + + +@pytest.mark.asyncio +async def test_update_stored_info_type_async( + transport: str = "grpc_asyncio", request_type=dlp.UpdateStoredInfoTypeRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.StoredInfoType( + name="name_value", + ) + ) + response = await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_update_stored_info_type_async_from_dict(): + await test_update_stored_info_type_async(request_type=dict) + + +def test_update_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateStoredInfoTypeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), "__call__" + ) as call: + call.return_value = dlp.StoredInfoType() + client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateStoredInfoTypeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_update_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_stored_info_type( + name="name_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name="display_name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name="name_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_stored_info_type( + name="name_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].config + mock_val = dlp.StoredInfoTypeConfig(display_name="display_name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name="name_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetStoredInfoTypeRequest, + dict, + ], +) +def test_get_stored_info_type(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name="name_value", + ) + response = client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == "name_value" + + +def test_get_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), "__call__" + ) as call: + client.get_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + + +@pytest.mark.asyncio +async def test_get_stored_info_type_async( + transport: str = "grpc_asyncio", request_type=dlp.GetStoredInfoTypeRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.StoredInfoType( + name="name_value", + ) + ) + response = await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_stored_info_type_async_from_dict(): + await test_get_stored_info_type_async(request_type=dict) + + +def test_get_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetStoredInfoTypeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), "__call__" + ) as call: + call.return_value = dlp.StoredInfoType() + client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetStoredInfoTypeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_stored_info_type( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_stored_info_type( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListStoredInfoTypesRequest, + dict, + ], +) +def test_list_stored_info_types(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_stored_info_types_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), "__call__" + ) as call: + client.list_stored_info_types() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async( + transport: str = "grpc_asyncio", request_type=dlp.ListStoredInfoTypesRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListStoredInfoTypesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_from_dict(): + await test_list_stored_info_types_async(request_type=dict) + + +def test_list_stored_info_types_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListStoredInfoTypesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), "__call__" + ) as call: + call.return_value = dlp.ListStoredInfoTypesResponse() + client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_stored_info_types_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListStoredInfoTypesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListStoredInfoTypesResponse() + ) + await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_stored_info_types_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_stored_info_types( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_stored_info_types_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_stored_info_types_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListStoredInfoTypesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_stored_info_types( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_stored_info_types_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent="parent_value", + ) + + +def test_list_stored_info_types_pager(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token="abc", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token="def", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token="ghi", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_stored_info_types(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.StoredInfoType) for i in results) + + +def test_list_stored_info_types_pages(transport_name: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token="abc", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token="def", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token="ghi", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + pages = list(client.list_stored_info_types(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token="abc", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token="def", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token="ghi", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_stored_info_types( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.StoredInfoType) for i in responses) + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token="abc", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token="def", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token="ghi", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_stored_info_types(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteStoredInfoTypeRequest, + dict, + ], +) +def test_delete_stored_info_type(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), "__call__" + ) as call: + client.delete_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_async( + transport: str = "grpc_asyncio", request_type=dlp.DeleteStoredInfoTypeRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_async_from_dict(): + await test_delete_stored_info_type_async(request_type=dict) + + +def test_delete_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteStoredInfoTypeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), "__call__" + ) as call: + call.return_value = None + client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteStoredInfoTypeRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_stored_info_type( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_stored_info_type( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.HybridInspectDlpJobRequest, + dict, + ], +) +def test_hybrid_inspect_dlp_job(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + response = client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), "__call__" + ) as call: + client.hybrid_inspect_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async( + transport: str = "grpc_asyncio", request_type=dlp.HybridInspectDlpJobRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.HybridInspectResponse() + ) + response = await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async_from_dict(): + await test_hybrid_inspect_dlp_job_async(request_type=dict) + + +def test_hybrid_inspect_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectDlpJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), "__call__" + ) as call: + call.return_value = dlp.HybridInspectResponse() + client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectDlpJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.HybridInspectResponse() + ) + await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_hybrid_inspect_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.hybrid_inspect_dlp_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_hybrid_inspect_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.HybridInspectResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.hybrid_inspect_dlp_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.FinishDlpJobRequest, + dict, + ], +) +def test_finish_dlp_job(request_type, transport: str = "grpc"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.finish_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_finish_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.finish_dlp_job), "__call__") as call: + client.finish_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + + +@pytest.mark.asyncio +async def test_finish_dlp_job_async( + transport: str = "grpc_asyncio", request_type=dlp.FinishDlpJobRequest +): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.finish_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_finish_dlp_job_async_from_dict(): + await test_finish_dlp_job_async(request_type=dict) + + +def test_finish_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.FinishDlpJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.finish_dlp_job), "__call__") as call: + call.return_value = None + client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_finish_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.FinishDlpJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.finish_dlp_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.InspectContentRequest, + dict, + ], +) +def test_inspect_content_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectContentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.inspect_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_inspect_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_inspect_content" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_inspect_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.InspectContentRequest.pb(dlp.InspectContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectContentResponse.to_json( + dlp.InspectContentResponse() + ) + + request = dlp.InspectContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectContentResponse() + + client.inspect_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_inspect_content_rest_bad_request( + transport: str = "rest", request_type=dlp.InspectContentRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.inspect_content(request) + + +def test_inspect_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.RedactImageRequest, + dict, + ], +) +def test_redact_image_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.RedactImageResponse( + redacted_image=b"redacted_image_blob", + extracted_text="extracted_text_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.RedactImageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.redact_image(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b"redacted_image_blob" + assert response.extracted_text == "extracted_text_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_redact_image_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_redact_image" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_redact_image" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.RedactImageRequest.pb(dlp.RedactImageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.RedactImageResponse.to_json( + dlp.RedactImageResponse() + ) + + request = dlp.RedactImageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.RedactImageResponse() + + client.redact_image( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_redact_image_rest_bad_request( + transport: str = "rest", request_type=dlp.RedactImageRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.redact_image(request) + + +def test_redact_image_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeidentifyContentRequest, + dict, + ], +) +def test_deidentify_content_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyContentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.deidentify_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_deidentify_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_deidentify_content" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_deidentify_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.DeidentifyContentRequest.pb(dlp.DeidentifyContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyContentResponse.to_json( + dlp.DeidentifyContentResponse() + ) + + request = dlp.DeidentifyContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyContentResponse() + + client.deidentify_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_deidentify_content_rest_bad_request( + transport: str = "rest", request_type=dlp.DeidentifyContentRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.deidentify_content(request) + + +def test_deidentify_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ReidentifyContentRequest, + dict, + ], +) +def test_reidentify_content_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ReidentifyContentResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.reidentify_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +def test_reidentify_content_rest_required_fields( + request_type=dlp.ReidentifyContentRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reidentify_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reidentify_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ReidentifyContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ReidentifyContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.reidentify_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_reidentify_content_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.reidentify_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reidentify_content_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_reidentify_content" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_reidentify_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ReidentifyContentRequest.pb(dlp.ReidentifyContentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ReidentifyContentResponse.to_json( + dlp.ReidentifyContentResponse() + ) + + request = dlp.ReidentifyContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ReidentifyContentResponse() + + client.reidentify_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reidentify_content_rest_bad_request( + transport: str = "rest", request_type=dlp.ReidentifyContentRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reidentify_content(request) + + +def test_reidentify_content_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListInfoTypesRequest, + dict, + ], +) +def test_list_info_types_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInfoTypesResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_info_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_info_types_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_info_types" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_list_info_types" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListInfoTypesRequest.pb(dlp.ListInfoTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListInfoTypesResponse.to_json( + dlp.ListInfoTypesResponse() + ) + + request = dlp.ListInfoTypesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListInfoTypesResponse() + + client.list_info_types( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_info_types_rest_bad_request( + transport: str = "rest", request_type=dlp.ListInfoTypesRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_info_types(request) + + +def test_list_info_types_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInfoTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_info_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/infoTypes" % client.transport._host, args[1] + ) + + +def test_list_info_types_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_info_types( + dlp.ListInfoTypesRequest(), + parent="parent_value", + ) + + +def test_list_info_types_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateInspectTemplateRequest, + dict, + ], +) +def test_create_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_create_inspect_template_rest_required_fields( + request_type=dlp.CreateInspectTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_inspect_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "inspectTemplate", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_create_inspect_template" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_create_inspect_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateInspectTemplateRequest.pb( + dlp.CreateInspectTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.CreateInspectTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.create_inspect_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_inspect_template_rest_bad_request( + transport: str = "rest", request_type=dlp.CreateInspectTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_inspect_template(request) + + +def test_create_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, + args[1], + ) + + +def test_create_inspect_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent="parent_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + ) + + +def test_create_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.UpdateInspectTemplateRequest, + dict, + ], +) +def test_update_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/inspectTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_update_inspect_template_rest_required_fields( + request_type=dlp.UpdateInspectTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_inspect_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_update_inspect_template" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_update_inspect_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateInspectTemplateRequest.pb( + dlp.UpdateInspectTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.UpdateInspectTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.update_inspect_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_inspect_template_rest_bad_request( + transport: str = "rest", request_type=dlp.UpdateInspectTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/inspectTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_inspect_template(request) + + +def test_update_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/inspectTemplates/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, + args[1], + ) + + +def test_update_inspect_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name="name_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetInspectTemplateRequest, + dict, + ], +) +def test_get_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/inspectTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_inspect_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_get_inspect_template_rest_required_fields( + request_type=dlp.GetInspectTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_inspect_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_inspect_template" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_get_inspect_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetInspectTemplateRequest.pb(dlp.GetInspectTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.InspectTemplate.to_json(dlp.InspectTemplate()) + + request = dlp.GetInspectTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.InspectTemplate() + + client.get_inspect_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_inspect_template_rest_bad_request( + transport: str = "rest", request_type=dlp.GetInspectTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/inspectTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_inspect_template(request) + + +def test_get_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.InspectTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/inspectTemplates/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.InspectTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, + args[1], + ) + + +def test_get_inspect_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name="name_value", + ) + + +def test_get_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListInspectTemplatesRequest, + dict, + ], +) +def test_list_inspect_templates_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_inspect_templates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_inspect_templates_rest_required_fields( + request_type=dlp.ListInspectTemplatesRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_inspect_templates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_inspect_templates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "location_id", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_inspect_templates(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_inspect_templates_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_inspect_templates._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "locationId", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_inspect_templates_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_inspect_templates" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_list_inspect_templates" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListInspectTemplatesRequest.pb( + dlp.ListInspectTemplatesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListInspectTemplatesResponse.to_json( + dlp.ListInspectTemplatesResponse() + ) + + request = dlp.ListInspectTemplatesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListInspectTemplatesResponse() + + client.list_inspect_templates( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_inspect_templates_rest_bad_request( + transport: str = "rest", request_type=dlp.ListInspectTemplatesRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_inspect_templates(request) + + +def test_list_inspect_templates_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListInspectTemplatesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListInspectTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_inspect_templates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=organizations/*}/inspectTemplates" % client.transport._host, + args[1], + ) + + +def test_list_inspect_templates_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent="parent_value", + ) + + +def test_list_inspect_templates_rest_pager(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token="abc", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token="def", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token="ghi", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListInspectTemplatesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1"} + + pager = client.list_inspect_templates(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.InspectTemplate) for i in results) + + pages = list(client.list_inspect_templates(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteInspectTemplateRequest, + dict, + ], +) +def test_delete_inspect_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/inspectTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_inspect_template(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_inspect_template_rest_required_fields( + request_type=dlp.DeleteInspectTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_inspect_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_inspect_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_inspect_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_inspect_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_inspect_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_delete_inspect_template" + ) as pre: + pre.assert_not_called() + pb_message = dlp.DeleteInspectTemplateRequest.pb( + dlp.DeleteInspectTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteInspectTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_inspect_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_inspect_template_rest_bad_request( + transport: str = "rest", request_type=dlp.DeleteInspectTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/inspectTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_inspect_template(request) + + +def test_delete_inspect_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/inspectTemplates/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_inspect_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/inspectTemplates/*}" % client.transport._host, + args[1], + ) + + +def test_delete_inspect_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name="name_value", + ) + + +def test_delete_inspect_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateDeidentifyTemplateRequest, + dict, + ], +) +def test_create_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_create_deidentify_template_rest_required_fields( + request_type=dlp.CreateDeidentifyTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_deidentify_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "deidentifyTemplate", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_create_deidentify_template" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_create_deidentify_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateDeidentifyTemplateRequest.pb( + dlp.CreateDeidentifyTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json( + dlp.DeidentifyTemplate() + ) + + request = dlp.CreateDeidentifyTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.create_deidentify_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_deidentify_template_rest_bad_request( + transport: str = "rest", request_type=dlp.CreateDeidentifyTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_deidentify_template(request) + + +def test_create_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=organizations/*}/deidentifyTemplates" + % client.transport._host, + args[1], + ) + + +def test_create_deidentify_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent="parent_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + ) + + +def test_create_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.UpdateDeidentifyTemplateRequest, + dict, + ], +) +def test_update_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_update_deidentify_template_rest_required_fields( + request_type=dlp.UpdateDeidentifyTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_deidentify_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_update_deidentify_template" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_update_deidentify_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateDeidentifyTemplateRequest.pb( + dlp.UpdateDeidentifyTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json( + dlp.DeidentifyTemplate() + ) + + request = dlp.UpdateDeidentifyTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.update_deidentify_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_deidentify_template_rest_bad_request( + transport: str = "rest", request_type=dlp.UpdateDeidentifyTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_deidentify_template(request) + + +def test_update_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/deidentifyTemplates/*}" + % client.transport._host, + args[1], + ) + + +def test_update_deidentify_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name="name_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetDeidentifyTemplateRequest, + dict, + ], +) +def test_get_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + + +def test_get_deidentify_template_rest_required_fields( + request_type=dlp.GetDeidentifyTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_deidentify_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_deidentify_template" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_get_deidentify_template" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetDeidentifyTemplateRequest.pb( + dlp.GetDeidentifyTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DeidentifyTemplate.to_json( + dlp.DeidentifyTemplate() + ) + + request = dlp.GetDeidentifyTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DeidentifyTemplate() + + client.get_deidentify_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_deidentify_template_rest_bad_request( + transport: str = "rest", request_type=dlp.GetDeidentifyTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_deidentify_template(request) + + +def test_get_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DeidentifyTemplate() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DeidentifyTemplate.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/deidentifyTemplates/*}" + % client.transport._host, + args[1], + ) + + +def test_get_deidentify_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name="name_value", + ) + + +def test_get_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListDeidentifyTemplatesRequest, + dict, + ], +) +def test_list_deidentify_templates_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_deidentify_templates(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_deidentify_templates_rest_required_fields( + request_type=dlp.ListDeidentifyTemplatesRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deidentify_templates._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deidentify_templates._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "location_id", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_deidentify_templates(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_deidentify_templates_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_deidentify_templates._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "locationId", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deidentify_templates_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_deidentify_templates" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_list_deidentify_templates" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListDeidentifyTemplatesRequest.pb( + dlp.ListDeidentifyTemplatesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListDeidentifyTemplatesResponse.to_json( + dlp.ListDeidentifyTemplatesResponse() + ) + + request = dlp.ListDeidentifyTemplatesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListDeidentifyTemplatesResponse() + + client.list_deidentify_templates( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_deidentify_templates_rest_bad_request( + transport: str = "rest", request_type=dlp.ListDeidentifyTemplatesRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_deidentify_templates(request) + + +def test_list_deidentify_templates_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDeidentifyTemplatesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDeidentifyTemplatesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_deidentify_templates(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=organizations/*}/deidentifyTemplates" + % client.transport._host, + args[1], + ) + + +def test_list_deidentify_templates_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent="parent_value", + ) + + +def test_list_deidentify_templates_rest_pager(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token="abc", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token="def", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token="ghi", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + dlp.ListDeidentifyTemplatesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1"} + + pager = client.list_deidentify_templates(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) for i in results) + + pages = list(client.list_deidentify_templates(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteDeidentifyTemplateRequest, + dict, + ], +) +def test_delete_deidentify_template_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_deidentify_template(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deidentify_template_rest_required_fields( + request_type=dlp.DeleteDeidentifyTemplateRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deidentify_template._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_deidentify_template(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_deidentify_template_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_deidentify_template._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deidentify_template_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_delete_deidentify_template" + ) as pre: + pre.assert_not_called() + pb_message = dlp.DeleteDeidentifyTemplateRequest.pb( + dlp.DeleteDeidentifyTemplateRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteDeidentifyTemplateRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_deidentify_template( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_deidentify_template_rest_bad_request( + transport: str = "rest", request_type=dlp.DeleteDeidentifyTemplateRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deidentify_template(request) + + +def test_delete_deidentify_template_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/deidentifyTemplates/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_deidentify_template(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/deidentifyTemplates/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_deidentify_template_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name="name_value", + ) + + +def test_delete_deidentify_template_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateJobTriggerRequest, + dict, + ], +) +def test_create_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_create_job_trigger_rest_required_fields( + request_type=dlp.CreateJobTriggerRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_job_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "jobTrigger", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_create_job_trigger" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_create_job_trigger" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateJobTriggerRequest.pb(dlp.CreateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.CreateJobTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.create_job_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_trigger_rest_bad_request( + transport: str = "rest", request_type=dlp.CreateJobTriggerRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job_trigger(request) + + +def test_create_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + job_trigger=dlp.JobTrigger(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1] + ) + + +def test_create_job_trigger_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent="parent_value", + job_trigger=dlp.JobTrigger(name="name_value"), + ) + + +def test_create_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.UpdateJobTriggerRequest, + dict, + ], +) +def test_update_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_update_job_trigger_rest_required_fields( + request_type=dlp.UpdateJobTriggerRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_job_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_update_job_trigger" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_update_job_trigger" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateJobTriggerRequest.pb(dlp.UpdateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.UpdateJobTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.update_job_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_job_trigger_rest_bad_request( + transport: str = "rest", request_type=dlp.UpdateJobTriggerRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_job_trigger(request) + + +def test_update_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/jobTriggers/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + job_trigger=dlp.JobTrigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1] + ) + + +def test_update_job_trigger_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name="name_value", + job_trigger=dlp.JobTrigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.HybridInspectJobTriggerRequest, + dict, + ], +) +def test_hybrid_inspect_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobTriggers/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.hybrid_inspect_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_job_trigger_rest_required_fields( + request_type=dlp.HybridInspectJobTriggerRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).hybrid_inspect_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.hybrid_inspect_job_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_hybrid_inspect_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.hybrid_inspect_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_hybrid_inspect_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_hybrid_inspect_job_trigger" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_job_trigger" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.HybridInspectJobTriggerRequest.pb( + dlp.HybridInspectJobTriggerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.HybridInspectResponse.to_json( + dlp.HybridInspectResponse() + ) + + request = dlp.HybridInspectJobTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.HybridInspectResponse() + + client.hybrid_inspect_job_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_hybrid_inspect_job_trigger_rest_bad_request( + transport: str = "rest", request_type=dlp.HybridInspectJobTriggerRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobTriggers/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.hybrid_inspect_job_trigger(request) + + +def test_hybrid_inspect_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/jobTriggers/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.hybrid_inspect_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/jobTriggers/*}:hybridInspect" + % client.transport._host, + args[1], + ) + + +def test_hybrid_inspect_job_trigger_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name="name_value", + ) + + +def test_hybrid_inspect_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetJobTriggerRequest, + dict, + ], +) +def test_get_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_get_job_trigger_rest_required_fields(request_type=dlp.GetJobTriggerRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_job_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_job_trigger" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_get_job_trigger" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetJobTriggerRequest.pb(dlp.GetJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.JobTrigger.to_json(dlp.JobTrigger()) + + request = dlp.GetJobTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.JobTrigger() + + client.get_job_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_trigger_rest_bad_request( + transport: str = "rest", request_type=dlp.GetJobTriggerRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job_trigger(request) + + +def test_get_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.JobTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/jobTriggers/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.JobTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1] + ) + + +def test_get_job_trigger_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name="name_value", + ) + + +def test_get_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListJobTriggersRequest, + dict, + ], +) +def test_list_job_triggers_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_job_triggers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_job_triggers_rest_required_fields( + request_type=dlp.ListJobTriggersRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_job_triggers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_job_triggers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "location_id", + "order_by", + "page_size", + "page_token", + "type_", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_job_triggers(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_job_triggers_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_job_triggers._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "locationId", + "orderBy", + "pageSize", + "pageToken", + "type", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_job_triggers_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_job_triggers" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_list_job_triggers" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListJobTriggersRequest.pb(dlp.ListJobTriggersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListJobTriggersResponse.to_json( + dlp.ListJobTriggersResponse() + ) + + request = dlp.ListJobTriggersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListJobTriggersResponse() + + client.list_job_triggers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_job_triggers_rest_bad_request( + transport: str = "rest", request_type=dlp.ListJobTriggersRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_job_triggers(request) + + +def test_list_job_triggers_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListJobTriggersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListJobTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_job_triggers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*}/jobTriggers" % client.transport._host, args[1] + ) + + +def test_list_job_triggers_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent="parent_value", + ) + + +def test_list_job_triggers_rest_pager(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token="abc", + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token="def", + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token="ghi", + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListJobTriggersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.list_job_triggers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.JobTrigger) for i in results) + + pages = list(client.list_job_triggers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteJobTriggerRequest, + dict, + ], +) +def test_delete_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_job_trigger(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_trigger_rest_required_fields( + request_type=dlp.DeleteJobTriggerRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_job_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_delete_job_trigger" + ) as pre: + pre.assert_not_called() + pb_message = dlp.DeleteJobTriggerRequest.pb(dlp.DeleteJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteJobTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_job_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_job_trigger_rest_bad_request( + transport: str = "rest", request_type=dlp.DeleteJobTriggerRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_job_trigger(request) + + +def test_delete_job_trigger_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/jobTriggers/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_job_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/jobTriggers/*}" % client.transport._host, args[1] + ) + + +def test_delete_job_trigger_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name="name_value", + ) + + +def test_delete_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ActivateJobTriggerRequest, + dict, + ], +) +def test_activate_job_trigger_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name="name_value", + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.activate_job_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == "name_value" + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_activate_job_trigger_rest_required_fields( + request_type=dlp.ActivateJobTriggerRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).activate_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).activate_job_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.activate_job_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_activate_job_trigger_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.activate_job_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_activate_job_trigger_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_activate_job_trigger" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_activate_job_trigger" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ActivateJobTriggerRequest.pb(dlp.ActivateJobTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.ActivateJobTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.activate_job_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_activate_job_trigger_rest_bad_request( + transport: str = "rest", request_type=dlp.ActivateJobTriggerRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/jobTriggers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.activate_job_trigger(request) + + +def test_activate_job_trigger_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateDlpJobRequest, + dict, + ], +) +def test_create_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name="name_value", + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == "name_value" + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_create_dlp_job_rest_required_fields(request_type=dlp.CreateDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_dlp_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_create_dlp_job" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_create_dlp_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateDlpJobRequest.pb(dlp.CreateDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.CreateDlpJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.create_dlp_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_dlp_job_rest_bad_request( + transport: str = "rest", request_type=dlp.CreateDlpJobRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_dlp_job(request) + + +def test_create_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1] + ) + + +def test_create_dlp_job_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent="parent_value", + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + risk_job=dlp.RiskAnalysisJobConfig( + privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + +def test_create_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListDlpJobsRequest, + dict, + ], +) +def test_list_dlp_jobs_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_dlp_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_dlp_jobs_rest_required_fields(request_type=dlp.ListDlpJobsRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dlp_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dlp_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "location_id", + "order_by", + "page_size", + "page_token", + "type_", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_dlp_jobs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_dlp_jobs_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_dlp_jobs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "locationId", + "orderBy", + "pageSize", + "pageToken", + "type", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_dlp_jobs_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_dlp_jobs" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_list_dlp_jobs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListDlpJobsRequest.pb(dlp.ListDlpJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListDlpJobsResponse.to_json( + dlp.ListDlpJobsResponse() + ) + + request = dlp.ListDlpJobsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListDlpJobsResponse() + + client.list_dlp_jobs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_dlp_jobs_rest_bad_request( + transport: str = "rest", request_type=dlp.ListDlpJobsRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_dlp_jobs(request) + + +def test_list_dlp_jobs_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListDlpJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListDlpJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_dlp_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*}/dlpJobs" % client.transport._host, args[1] + ) + + +def test_list_dlp_jobs_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent="parent_value", + ) + + +def test_list_dlp_jobs_rest_pager(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token="abc", + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token="def", + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token="ghi", + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListDlpJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.list_dlp_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.DlpJob) for i in results) + + pages = list(client.list_dlp_jobs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetDlpJobRequest, + dict, + ], +) +def test_get_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/dlpJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob( + name="name_value", + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == "name_value" + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_get_dlp_job_rest_required_fields(request_type=dlp.GetDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_dlp_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_dlp_job" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_get_dlp_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetDlpJobRequest.pb(dlp.GetDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.DlpJob.to_json(dlp.DlpJob()) + + request = dlp.GetDlpJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.DlpJob() + + client.get_dlp_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_dlp_job_rest_bad_request( + transport: str = "rest", request_type=dlp.GetDlpJobRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/dlpJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_dlp_job(request) + + +def test_get_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.DlpJob() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/dlpJobs/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.DlpJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1] + ) + + +def test_get_dlp_job_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dlp_job( + dlp.GetDlpJobRequest(), + name="name_value", + ) + + +def test_get_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteDlpJobRequest, + dict, + ], +) +def test_delete_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/dlpJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dlp_job_rest_required_fields(request_type=dlp.DeleteDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_dlp_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_delete_dlp_job" + ) as pre: + pre.assert_not_called() + pb_message = dlp.DeleteDlpJobRequest.pb(dlp.DeleteDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteDlpJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_dlp_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_dlp_job_rest_bad_request( + transport: str = "rest", request_type=dlp.DeleteDlpJobRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/dlpJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_dlp_job(request) + + +def test_delete_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/dlpJobs/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/dlpJobs/*}" % client.transport._host, args[1] + ) + + +def test_delete_dlp_job_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name="name_value", + ) + + +def test_delete_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CancelDlpJobRequest, + dict, + ], +) +def test_cancel_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/dlpJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.cancel_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_dlp_job_rest_required_fields(request_type=dlp.CancelDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_dlp_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_cancel_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.cancel_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_cancel_dlp_job" + ) as pre: + pre.assert_not_called() + pb_message = dlp.CancelDlpJobRequest.pb(dlp.CancelDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.CancelDlpJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.cancel_dlp_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_cancel_dlp_job_rest_bad_request( + transport: str = "rest", request_type=dlp.CancelDlpJobRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/dlpJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_dlp_job(request) + + +def test_cancel_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.CreateStoredInfoTypeRequest, + dict, + ], +) +def test_create_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == "name_value" + + +def test_create_stored_info_type_rest_required_fields( + request_type=dlp.CreateStoredInfoTypeRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_stored_info_type(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "config", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_create_stored_info_type" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_create_stored_info_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.CreateStoredInfoTypeRequest.pb( + dlp.CreateStoredInfoTypeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.CreateStoredInfoTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.create_stored_info_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_stored_info_type_rest_bad_request( + transport: str = "rest", request_type=dlp.CreateStoredInfoTypeRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_stored_info_type(request) + + +def test_create_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, + args[1], + ) + + +def test_create_stored_info_type_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent="parent_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + ) + + +def test_create_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.UpdateStoredInfoTypeRequest, + dict, + ], +) +def test_update_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/storedInfoTypes/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == "name_value" + + +def test_update_stored_info_type_rest_required_fields( + request_type=dlp.UpdateStoredInfoTypeRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_stored_info_type(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_update_stored_info_type" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_update_stored_info_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.UpdateStoredInfoTypeRequest.pb( + dlp.UpdateStoredInfoTypeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.UpdateStoredInfoTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.update_stored_info_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_stored_info_type_rest_bad_request( + transport: str = "rest", request_type=dlp.UpdateStoredInfoTypeRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/storedInfoTypes/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_stored_info_type(request) + + +def test_update_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/storedInfoTypes/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, + args[1], + ) + + +def test_update_stored_info_type_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name="name_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.GetStoredInfoTypeRequest, + dict, + ], +) +def test_get_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/storedInfoTypes/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == "name_value" + + +def test_get_stored_info_type_rest_required_fields( + request_type=dlp.GetStoredInfoTypeRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_stored_info_type(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_stored_info_type" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_get_stored_info_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.GetStoredInfoTypeRequest.pb(dlp.GetStoredInfoTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.StoredInfoType.to_json(dlp.StoredInfoType()) + + request = dlp.GetStoredInfoTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.StoredInfoType() + + client.get_stored_info_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_stored_info_type_rest_bad_request( + transport: str = "rest", request_type=dlp.GetStoredInfoTypeRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/storedInfoTypes/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_stored_info_type(request) + + +def test_get_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.StoredInfoType() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/storedInfoTypes/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.StoredInfoType.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, + args[1], + ) + + +def test_get_stored_info_type_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name="name_value", + ) + + +def test_get_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.ListStoredInfoTypesRequest, + dict, + ], +) +def test_list_stored_info_types_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_stored_info_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_stored_info_types_rest_required_fields( + request_type=dlp.ListStoredInfoTypesRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_stored_info_types._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_stored_info_types._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "location_id", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_stored_info_types(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_stored_info_types_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_stored_info_types._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "locationId", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_stored_info_types_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_stored_info_types" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_list_stored_info_types" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.ListStoredInfoTypesRequest.pb(dlp.ListStoredInfoTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.ListStoredInfoTypesResponse.to_json( + dlp.ListStoredInfoTypesResponse() + ) + + request = dlp.ListStoredInfoTypesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.ListStoredInfoTypesResponse() + + client.list_stored_info_types( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_stored_info_types_rest_bad_request( + transport: str = "rest", request_type=dlp.ListStoredInfoTypesRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_stored_info_types(request) + + +def test_list_stored_info_types_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.ListStoredInfoTypesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.ListStoredInfoTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_stored_info_types(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=organizations/*}/storedInfoTypes" % client.transport._host, + args[1], + ) + + +def test_list_stored_info_types_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent="parent_value", + ) + + +def test_list_stored_info_types_rest_pager(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token="abc", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token="def", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token="ghi", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dlp.ListStoredInfoTypesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1"} + + pager = client.list_stored_info_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dlp.StoredInfoType) for i in results) + + pages = list(client.list_stored_info_types(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.DeleteStoredInfoTypeRequest, + dict, + ], +) +def test_delete_stored_info_type_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/storedInfoTypes/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_stored_info_type(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_stored_info_type_rest_required_fields( + request_type=dlp.DeleteStoredInfoTypeRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_stored_info_type._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_stored_info_type(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_stored_info_type_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_stored_info_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_stored_info_type_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_delete_stored_info_type" + ) as pre: + pre.assert_not_called() + pb_message = dlp.DeleteStoredInfoTypeRequest.pb( + dlp.DeleteStoredInfoTypeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.DeleteStoredInfoTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_stored_info_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_stored_info_type_rest_bad_request( + transport: str = "rest", request_type=dlp.DeleteStoredInfoTypeRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/storedInfoTypes/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_stored_info_type(request) + + +def test_delete_stored_info_type_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/storedInfoTypes/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_stored_info_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=organizations/*/storedInfoTypes/*}" % client.transport._host, + args[1], + ) + + +def test_delete_stored_info_type_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name="name_value", + ) + + +def test_delete_stored_info_type_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.HybridInspectDlpJobRequest, + dict, + ], +) +def test_hybrid_inspect_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dlpJobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.hybrid_inspect_dlp_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_dlp_job_rest_required_fields( + request_type=dlp.HybridInspectDlpJobRequest, +): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).hybrid_inspect_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.hybrid_inspect_dlp_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_hybrid_inspect_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.hybrid_inspect_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_hybrid_inspect_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_hybrid_inspect_dlp_job" + ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_dlp_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = dlp.HybridInspectDlpJobRequest.pb(dlp.HybridInspectDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = dlp.HybridInspectResponse.to_json( + dlp.HybridInspectResponse() + ) + + request = dlp.HybridInspectDlpJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dlp.HybridInspectResponse() + + client.hybrid_inspect_dlp_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_hybrid_inspect_dlp_job_rest_bad_request( + transport: str = "rest", request_type=dlp.HybridInspectDlpJobRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dlpJobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.hybrid_inspect_dlp_job(request) + + +def test_hybrid_inspect_dlp_job_rest_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dlp.HybridInspectResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/dlpJobs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = dlp.HybridInspectResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.hybrid_inspect_dlp_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect" + % client.transport._host, + args[1], + ) + + +def test_hybrid_inspect_dlp_job_rest_flattened_error(transport: str = "rest"): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name="name_value", + ) + + +def test_hybrid_inspect_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dlp.FinishDlpJobRequest, + dict, + ], +) +def test_finish_dlp_job_rest(request_type): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dlpJobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.finish_dlp_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_finish_dlp_job_rest_required_fields(request_type=dlp.FinishDlpJobRequest): + transport_class = transports.DlpServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).finish_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).finish_dlp_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.finish_dlp_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_finish_dlp_job_rest_unset_required_fields(): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.finish_dlp_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_finish_dlp_job_rest_interceptors(null_interceptor): + transport = transports.DlpServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DlpServiceRestInterceptor(), + ) + client = DlpServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DlpServiceRestInterceptor, "pre_finish_dlp_job" + ) as pre: + pre.assert_not_called() + pb_message = dlp.FinishDlpJobRequest.pb(dlp.FinishDlpJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = dlp.FinishDlpJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.finish_dlp_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_finish_dlp_job_rest_bad_request( + transport: str = "rest", request_type=dlp.FinishDlpJobRequest +): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dlpJobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.finish_dlp_job(request) + + +def test_finish_dlp_job_rest_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DlpServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DlpServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + transports.DlpServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DlpServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DlpServiceGrpcTransport, + ) + + +def test_dlp_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DlpServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_dlp_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DlpServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "inspect_content", + "redact_image", + "deidentify_content", + "reidentify_content", + "list_info_types", + "create_inspect_template", + "update_inspect_template", + "get_inspect_template", + "list_inspect_templates", + "delete_inspect_template", + "create_deidentify_template", + "update_deidentify_template", + "get_deidentify_template", + "list_deidentify_templates", + "delete_deidentify_template", + "create_job_trigger", + "update_job_trigger", + "hybrid_inspect_job_trigger", + "get_job_trigger", + "list_job_triggers", + "delete_job_trigger", + "activate_job_trigger", + "create_dlp_job", + "list_dlp_jobs", + "get_dlp_job", + "delete_dlp_job", + "cancel_dlp_job", + "create_stored_info_type", + "update_stored_info_type", + "get_stored_info_type", + "list_stored_info_types", + "delete_stored_info_type", + "hybrid_inspect_dlp_job", + "finish_dlp_job", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_dlp_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_dlp_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport() + adc.assert_called_once() + + +def test_dlp_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DlpServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + ], +) +def test_dlp_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + transports.DlpServiceRestTransport, + ], +) +def test_dlp_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DlpServiceGrpcTransport, grpc_helpers), + (transports.DlpServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_dlp_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dlp.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dlp.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport], +) +def test_dlp_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_dlp_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DlpServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_dlp_service_host_no_port(transport_name): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint="dlp.googleapis.com"), + transport=transport_name, + ) + assert client.transport._host == ( + "dlp.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dlp.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_dlp_service_host_with_port(transport_name): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dlp.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dlp.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dlp.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_dlp_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DlpServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DlpServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.inspect_content._session + session2 = client2.transport.inspect_content._session + assert session1 != session2 + session1 = client1.transport.redact_image._session + session2 = client2.transport.redact_image._session + assert session1 != session2 + session1 = client1.transport.deidentify_content._session + session2 = client2.transport.deidentify_content._session + assert session1 != session2 + session1 = client1.transport.reidentify_content._session + session2 = client2.transport.reidentify_content._session + assert session1 != session2 + session1 = client1.transport.list_info_types._session + session2 = client2.transport.list_info_types._session + assert session1 != session2 + session1 = client1.transport.create_inspect_template._session + session2 = client2.transport.create_inspect_template._session + assert session1 != session2 + session1 = client1.transport.update_inspect_template._session + session2 = client2.transport.update_inspect_template._session + assert session1 != session2 + session1 = client1.transport.get_inspect_template._session + session2 = client2.transport.get_inspect_template._session + assert session1 != session2 + session1 = client1.transport.list_inspect_templates._session + session2 = client2.transport.list_inspect_templates._session + assert session1 != session2 + session1 = client1.transport.delete_inspect_template._session + session2 = client2.transport.delete_inspect_template._session + assert session1 != session2 + session1 = client1.transport.create_deidentify_template._session + session2 = client2.transport.create_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.update_deidentify_template._session + session2 = client2.transport.update_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.get_deidentify_template._session + session2 = client2.transport.get_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.list_deidentify_templates._session + session2 = client2.transport.list_deidentify_templates._session + assert session1 != session2 + session1 = client1.transport.delete_deidentify_template._session + session2 = client2.transport.delete_deidentify_template._session + assert session1 != session2 + session1 = client1.transport.create_job_trigger._session + session2 = client2.transport.create_job_trigger._session + assert session1 != session2 + session1 = client1.transport.update_job_trigger._session + session2 = client2.transport.update_job_trigger._session + assert session1 != session2 + session1 = client1.transport.hybrid_inspect_job_trigger._session + session2 = client2.transport.hybrid_inspect_job_trigger._session + assert session1 != session2 + session1 = client1.transport.get_job_trigger._session + session2 = client2.transport.get_job_trigger._session + assert session1 != session2 + session1 = client1.transport.list_job_triggers._session + session2 = client2.transport.list_job_triggers._session + assert session1 != session2 + session1 = client1.transport.delete_job_trigger._session + session2 = client2.transport.delete_job_trigger._session + assert session1 != session2 + session1 = client1.transport.activate_job_trigger._session + session2 = client2.transport.activate_job_trigger._session + assert session1 != session2 + session1 = client1.transport.create_dlp_job._session + session2 = client2.transport.create_dlp_job._session + assert session1 != session2 + session1 = client1.transport.list_dlp_jobs._session + session2 = client2.transport.list_dlp_jobs._session + assert session1 != session2 + session1 = client1.transport.get_dlp_job._session + session2 = client2.transport.get_dlp_job._session + assert session1 != session2 + session1 = client1.transport.delete_dlp_job._session + session2 = client2.transport.delete_dlp_job._session + assert session1 != session2 + session1 = client1.transport.cancel_dlp_job._session + session2 = client2.transport.cancel_dlp_job._session + assert session1 != session2 + session1 = client1.transport.create_stored_info_type._session + session2 = client2.transport.create_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.update_stored_info_type._session + session2 = client2.transport.update_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.get_stored_info_type._session + session2 = client2.transport.get_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.list_stored_info_types._session + session2 = client2.transport.list_stored_info_types._session + assert session1 != session2 + session1 = client1.transport.delete_stored_info_type._session + session2 = client2.transport.delete_stored_info_type._session + assert session1 != session2 + session1 = client1.transport.hybrid_inspect_dlp_job._session + session2 = client2.transport.hybrid_inspect_dlp_job._session + assert session1 != session2 + session1 = client1.transport.finish_dlp_job._session + session2 = client2.transport.finish_dlp_job._session + assert session1 != session2 + + +def test_dlp_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DlpServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_dlp_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DlpServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport], +) +def test_dlp_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport], +) +def test_dlp_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_deidentify_template_path(): + organization = "squid" + deidentify_template = "clam" + expected = ( + "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format( + organization=organization, + deidentify_template=deidentify_template, + ) + ) + actual = DlpServiceClient.deidentify_template_path( + organization, deidentify_template + ) + assert expected == actual + + +def test_parse_deidentify_template_path(): + expected = { + "organization": "whelk", + "deidentify_template": "octopus", + } + path = DlpServiceClient.deidentify_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_deidentify_template_path(path) + assert expected == actual + + +def test_dlp_content_path(): + project = "oyster" + expected = "projects/{project}/dlpContent".format( + project=project, + ) + actual = DlpServiceClient.dlp_content_path(project) + assert expected == actual + + +def test_parse_dlp_content_path(): + expected = { + "project": "nudibranch", + } + path = DlpServiceClient.dlp_content_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_dlp_content_path(path) + assert expected == actual + + +def test_dlp_job_path(): + project = "cuttlefish" + dlp_job = "mussel" + expected = "projects/{project}/dlpJobs/{dlp_job}".format( + project=project, + dlp_job=dlp_job, + ) + actual = DlpServiceClient.dlp_job_path(project, dlp_job) + assert expected == actual + + +def test_parse_dlp_job_path(): + expected = { + "project": "winkle", + "dlp_job": "nautilus", + } + path = DlpServiceClient.dlp_job_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_dlp_job_path(path) + assert expected == actual + + +def test_finding_path(): + project = "scallop" + location = "abalone" + finding = "squid" + expected = "projects/{project}/locations/{location}/findings/{finding}".format( + project=project, + location=location, + finding=finding, + ) + actual = DlpServiceClient.finding_path(project, location, finding) + assert expected == actual + + +def test_parse_finding_path(): + expected = { + "project": "clam", + "location": "whelk", + "finding": "octopus", + } + path = DlpServiceClient.finding_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_finding_path(path) + assert expected == actual + + +def test_inspect_template_path(): + organization = "oyster" + inspect_template = "nudibranch" + expected = ( + "organizations/{organization}/inspectTemplates/{inspect_template}".format( + organization=organization, + inspect_template=inspect_template, + ) + ) + actual = DlpServiceClient.inspect_template_path(organization, inspect_template) + assert expected == actual + + +def test_parse_inspect_template_path(): + expected = { + "organization": "cuttlefish", + "inspect_template": "mussel", + } + path = DlpServiceClient.inspect_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_inspect_template_path(path) + assert expected == actual + + +def test_job_trigger_path(): + project = "winkle" + job_trigger = "nautilus" + expected = "projects/{project}/jobTriggers/{job_trigger}".format( + project=project, + job_trigger=job_trigger, + ) + actual = DlpServiceClient.job_trigger_path(project, job_trigger) + assert expected == actual + + +def test_parse_job_trigger_path(): + expected = { + "project": "scallop", + "job_trigger": "abalone", + } + path = DlpServiceClient.job_trigger_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_job_trigger_path(path) + assert expected == actual + + +def test_stored_info_type_path(): + organization = "squid" + stored_info_type = "clam" + expected = "organizations/{organization}/storedInfoTypes/{stored_info_type}".format( + organization=organization, + stored_info_type=stored_info_type, + ) + actual = DlpServiceClient.stored_info_type_path(organization, stored_info_type) + assert expected == actual + + +def test_parse_stored_info_type_path(): + expected = { + "organization": "whelk", + "stored_info_type": "octopus", + } + path = DlpServiceClient.stored_info_type_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_stored_info_type_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DlpServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = DlpServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DlpServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = DlpServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DlpServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = DlpServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = DlpServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = DlpServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DlpServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = DlpServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DlpServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DlpServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DlpServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DlpServiceClient, transports.DlpServiceGrpcTransport), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-documentai/CHANGELOG.md b/packages/google-cloud-documentai/CHANGELOG.md index 101ca3f3f8c3..ecc1ce304dec 100644 --- a/packages/google-cloud-documentai/CHANGELOG.md +++ b/packages/google-cloud-documentai/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [2.20.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v2.19.0...google-cloud-documentai-v2.20.0) (2023-09-25) + + +### Features + +* Added `ListDocuments()` method for Document AI Workbench training documents ([2fb9078](https://github.com/googleapis/google-cloud-python/commit/2fb9078ef80ecbcc39c9708d49b7707e4708205a)) +* Added SummaryOptions to ProcessOptions for the Summarizer processor ([2fb9078](https://github.com/googleapis/google-cloud-python/commit/2fb9078ef80ecbcc39c9708d49b7707e4708205a)) +* make page_range field public ([2fb9078](https://github.com/googleapis/google-cloud-python/commit/2fb9078ef80ecbcc39c9708d49b7707e4708205a)) + + +### Bug Fixes + +* `OcrConfig.compute_style_info` is deprecated. Use `PremiumFeatures.compute_style_info` instead. ([2fb9078](https://github.com/googleapis/google-cloud-python/commit/2fb9078ef80ecbcc39c9708d49b7707e4708205a)) + ## [2.19.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v2.18.1...google-cloud-documentai-v2.19.0) (2023-09-21) diff --git a/packages/google-cloud-documentai/CONTRIBUTING.rst b/packages/google-cloud-documentai/CONTRIBUTING.rst index 2b320e2af7b3..45855b998443 100644 --- a/packages/google-cloud-documentai/CONTRIBUTING.rst +++ b/packages/google-cloud-documentai/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system- -- -k + $ nox -s system-3.11 -- -k .. note:: - System tests are only configured to run under Python. + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-documentai/docs/documentai_v1beta3/document_service.rst b/packages/google-cloud-documentai/docs/documentai_v1beta3/document_service.rst index 85f24b292a51..4ac25775f8b6 100644 --- a/packages/google-cloud-documentai/docs/documentai_v1beta3/document_service.rst +++ b/packages/google-cloud-documentai/docs/documentai_v1beta3/document_service.rst @@ -4,3 +4,7 @@ DocumentService .. automodule:: google.cloud.documentai_v1beta3.services.document_service :members: :inherited-members: + +.. automodule:: google.cloud.documentai_v1beta3.services.document_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py index 897a8fc0d7c8..8ae9ef586b9c 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.0" # {x-release-please-version} +__version__ = "2.20.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py index 897a8fc0d7c8..8ae9ef586b9c 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.0" # {x-release-please-version} +__version__ = "2.20.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_io.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_io.py index 8f7659380a5a..493827a72442 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_io.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_io.py @@ -48,7 +48,7 @@ class RawDocument(proto.Message): The display name of the document, it supports all Unicode characters except the following: ``*``, ``?``, ``[``, ``]``, ``%``, ``{``, ``}``,\ ``'``, ``\"``, ``,`` ``~``, ``=`` and - ``:`` are reserved. If not specified, a default ID will be + ``:`` are reserved. If not specified, a default ID is generated. """ @@ -260,8 +260,16 @@ class OcrConfig(proto.Message): Includes symbol level OCR information if set to true. compute_style_info (bool): - Turn on font id model and returns font style information. - Use PremiumFeatures.compute_style_info instead. + Turn on font identification model and return font style + information. Deprecated, use + [PremiumFeatures.compute_style_info][google.cloud.documentai.v1.OcrConfig.PremiumFeatures.compute_style_info] + instead. + disable_character_boxes_detection (bool): + Turn off character box detector in OCR + engine. Character box detection is enabled by + default in OCR 2.0+ processors. + premium_features (google.cloud.documentai_v1.types.OcrConfig.PremiumFeatures): + Configurations for premium OCR features. """ class Hints(proto.Message): @@ -285,6 +293,34 @@ class Hints(proto.Message): number=1, ) + class PremiumFeatures(proto.Message): + r"""Configurations for premium OCR features. + + Attributes: + enable_selection_mark_detection (bool): + Turn on selection mark detector in OCR + engine. Only available in OCR 2.0+ processors. + compute_style_info (bool): + Turn on font identification model and return + font style information. + enable_math_ocr (bool): + Turn on the model that can extract LaTeX math + formulas. + """ + + enable_selection_mark_detection: bool = proto.Field( + proto.BOOL, + number=3, + ) + compute_style_info: bool = proto.Field( + proto.BOOL, + number=4, + ) + enable_math_ocr: bool = proto.Field( + proto.BOOL, + number=5, + ) + hints: Hints = proto.Field( proto.MESSAGE, number=2, @@ -310,6 +346,15 @@ class Hints(proto.Message): proto.BOOL, number=8, ) + disable_character_boxes_detection: bool = proto.Field( + proto.BOOL, + number=10, + ) + premium_features: PremiumFeatures = proto.Field( + proto.MESSAGE, + number=11, + message=PremiumFeatures, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_processor_service.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_processor_service.py index 12fe1b1c7ed6..4a3c8dbeb9ff 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_processor_service.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/types/document_processor_service.py @@ -90,12 +90,63 @@ class ProcessOptions(proto.Message): r"""Options for Process API + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: + individual_page_selector (google.cloud.documentai_v1.types.ProcessOptions.IndividualPageSelector): + Which pages to process (1-indexed). + + This field is a member of `oneof`_ ``page_range``. + from_start (int): + Only process certain pages from the start. + Process all if the document has fewer pages. + + This field is a member of `oneof`_ ``page_range``. + from_end (int): + Only process certain pages from the end, same + as above. + + This field is a member of `oneof`_ ``page_range``. ocr_config (google.cloud.documentai_v1.types.OcrConfig): Only applicable to ``OCR_PROCESSOR``. Returns error if set on other processor types. """ + class IndividualPageSelector(proto.Message): + r"""A list of individual page numbers. + + Attributes: + pages (MutableSequence[int]): + Optional. Indices of the pages (starting from + 1). + """ + + pages: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=1, + ) + + individual_page_selector: IndividualPageSelector = proto.Field( + proto.MESSAGE, + number=5, + oneof="page_range", + message=IndividualPageSelector, + ) + from_start: int = proto.Field( + proto.INT32, + number=6, + oneof="page_range", + ) + from_end: int = proto.Field( + proto.INT32, + number=7, + oneof="page_range", + ) ocr_config: document_io.OcrConfig = proto.Field( proto.MESSAGE, number=1, diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py index 897a8fc0d7c8..8ae9ef586b9c 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.0" # {x-release-please-version} +__version__ = "2.20.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/__init__.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/__init__.py index 64985136bc85..dd7585668b97 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/__init__.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/__init__.py @@ -90,12 +90,20 @@ UndeployProcessorVersionRequest, UndeployProcessorVersionResponse, ) -from .types.document_schema import DocumentSchema, EntityTypeMetadata, PropertyMetadata +from .types.document_schema import ( + DocumentSchema, + EntityTypeMetadata, + FieldExtractionMetadata, + PropertyMetadata, + SummaryOptions, +) from .types.document_service import ( BatchDeleteDocumentsMetadata, BatchDeleteDocumentsRequest, BatchDeleteDocumentsResponse, DatasetSplitType, + DocumentLabelingState, + DocumentMetadata, DocumentPageRange, GetDatasetSchemaRequest, GetDocumentRequest, @@ -103,6 +111,8 @@ ImportDocumentsMetadata, ImportDocumentsRequest, ImportDocumentsResponse, + ListDocumentsRequest, + ListDocumentsResponse, UpdateDatasetOperationMetadata, UpdateDatasetRequest, UpdateDatasetSchemaRequest, @@ -110,7 +120,7 @@ from .types.evaluation import Evaluation, EvaluationReference from .types.geometry import BoundingPoly, NormalizedVertex, Vertex from .types.operation_metadata import CommonOperationMetadata -from .types.processor import Processor, ProcessorVersion +from .types.processor import Processor, ProcessorVersion, ProcessorVersionAlias from .types.processor_type import ProcessorType __all__ = ( @@ -143,6 +153,8 @@ "DisableProcessorResponse", "Document", "DocumentId", + "DocumentLabelingState", + "DocumentMetadata", "DocumentOutputConfig", "DocumentPageRange", "DocumentProcessorServiceClient", @@ -159,6 +171,7 @@ "EvaluationReference", "FetchProcessorTypesRequest", "FetchProcessorTypesResponse", + "FieldExtractionMetadata", "GcsDocument", "GcsDocuments", "GcsPrefix", @@ -176,6 +189,8 @@ "ImportProcessorVersionMetadata", "ImportProcessorVersionRequest", "ImportProcessorVersionResponse", + "ListDocumentsRequest", + "ListDocumentsResponse", "ListEvaluationsRequest", "ListEvaluationsResponse", "ListProcessorTypesRequest", @@ -192,6 +207,7 @@ "Processor", "ProcessorType", "ProcessorVersion", + "ProcessorVersionAlias", "PropertyMetadata", "RawDocument", "ReviewDocumentOperationMetadata", @@ -201,6 +217,7 @@ "SetDefaultProcessorVersionMetadata", "SetDefaultProcessorVersionRequest", "SetDefaultProcessorVersionResponse", + "SummaryOptions", "TrainProcessorVersionMetadata", "TrainProcessorVersionRequest", "TrainProcessorVersionResponse", diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_metadata.json b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_metadata.json index 7853ba97a3b6..9d32da0a443b 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_metadata.json +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_metadata.json @@ -394,6 +394,11 @@ "import_documents" ] }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, "UpdateDataset": { "methods": [ "update_dataset" @@ -429,6 +434,11 @@ "import_documents" ] }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, "UpdateDataset": { "methods": [ "update_dataset" @@ -464,6 +474,11 @@ "import_documents" ] }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, "UpdateDataset": { "methods": [ "update_dataset" diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py index 897a8fc0d7c8..8ae9ef586b9c 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.0" # {x-release-please-version} +__version__ = "2.20.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/async_client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/async_client.py index a0c715827ead..9a57bb3aa367 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/async_client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/async_client.py @@ -2918,21 +2918,20 @@ async def sample_import_processor_version(): request (Optional[Union[google.cloud.documentai_v1beta3.types.ImportProcessorVersionRequest, dict]]): The request object. The request message for the [ImportProcessorVersion][google.cloud.documentai.v1beta3.DocumentProcessorService.ImportProcessorVersion] - method. Requirements: + method. - - The Document AI `Service - Agent `__ - of the destination project must have `Document AI - Editor - role `__ - on the source project. + The Document AI `Service + Agent `__ + of the destination project must have `Document AI Editor + role `__ + on the source project. The destination project is specified as part of the [parent][google.cloud.documentai.v1beta3.ImportProcessorVersionRequest.parent] field. The source project is specified as part of the - [source][ImportProcessorVersionRequest.processor_version_source + [source][google.cloud.documentai.v1beta3.ImportProcessorVersionRequest.processor_version_source] or - ImportProcessorVersionRequest.external_processor_version_source] + [external_processor_version_source][google.cloud.documentai.v1beta3.ImportProcessorVersionRequest.external_processor_version_source] field. parent (:class:`str`): Required. The destination processor name to create the diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/client.py index dbf6d367a0a6..17711be9b4a8 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/client.py @@ -3217,21 +3217,20 @@ def sample_import_processor_version(): request (Union[google.cloud.documentai_v1beta3.types.ImportProcessorVersionRequest, dict]): The request object. The request message for the [ImportProcessorVersion][google.cloud.documentai.v1beta3.DocumentProcessorService.ImportProcessorVersion] - method. Requirements: + method. - - The Document AI `Service - Agent `__ - of the destination project must have `Document AI - Editor - role `__ - on the source project. + The Document AI `Service + Agent `__ + of the destination project must have `Document AI Editor + role `__ + on the source project. The destination project is specified as part of the [parent][google.cloud.documentai.v1beta3.ImportProcessorVersionRequest.parent] field. The source project is specified as part of the - [source][ImportProcessorVersionRequest.processor_version_source + [source][google.cloud.documentai.v1beta3.ImportProcessorVersionRequest.processor_version_source] or - ImportProcessorVersionRequest.external_processor_version_source] + [external_processor_version_source][google.cloud.documentai.v1beta3.ImportProcessorVersionRequest.external_processor_version_source] field. parent (str): Required. The destination processor name to create the diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/transports/rest.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/transports/rest.py index ef24fde61238..9c72f032dd23 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/transports/rest.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/transports/rest.py @@ -2414,21 +2414,20 @@ def __call__( request (~.document_processor_service.ImportProcessorVersionRequest): The request object. The request message for the [ImportProcessorVersion][google.cloud.documentai.v1beta3.DocumentProcessorService.ImportProcessorVersion] - method. Requirements: + method. - - The Document AI `Service - Agent `__ - of the destination project must have `Document AI - Editor - role `__ - on the source project. + The Document AI `Service + Agent `__ + of the destination project must have `Document AI Editor + role `__ + on the source project. The destination project is specified as part of the [parent][google.cloud.documentai.v1beta3.ImportProcessorVersionRequest.parent] field. The source project is specified as part of the - [source][ImportProcessorVersionRequest.processor_version_source + [source][google.cloud.documentai.v1beta3.ImportProcessorVersionRequest.processor_version_source] or - ImportProcessorVersionRequest.external_processor_version_source] + [external_processor_version_source][google.cloud.documentai.v1beta3.ImportProcessorVersionRequest.external_processor_version_source] field. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/async_client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/async_client.py index ee5c56fa6de3..200961fededd 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/async_client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/async_client.py @@ -48,6 +48,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.cloud.documentai_v1beta3.services.document_service import pagers from google.cloud.documentai_v1beta3.types import ( document, document_schema, @@ -593,6 +594,120 @@ async def sample_get_document(): # Done; return the response. return response + async def list_documents( + self, + request: Optional[Union[document_service.ListDocumentsRequest, dict]] = None, + *, + dataset: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDocumentsAsyncPager: + r"""Returns a list of documents present in the dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import documentai_v1beta3 + + async def sample_list_documents(): + # Create a client + client = documentai_v1beta3.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = documentai_v1beta3.ListDocumentsRequest( + dataset="dataset_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.documentai_v1beta3.types.ListDocumentsRequest, dict]]): + The request object. + dataset (:class:`str`): + Required. The resource name of the + dataset to be listed. Format: + + projects/{project}/locations/{location}/processors/{processor}/dataset + + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.documentai_v1beta3.services.document_service.pagers.ListDocumentsAsyncPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([dataset]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = document_service.ListDocumentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dataset is not None: + request.dataset = dataset + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_documents, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("dataset", request.dataset),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDocumentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + async def batch_delete_documents( self, request: Optional[ diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/client.py index 25f850303730..93cd6cd6b57e 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/client.py @@ -52,6 +52,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.cloud.documentai_v1beta3.services.document_service import pagers from google.cloud.documentai_v1beta3.types import ( document, document_schema, @@ -860,6 +861,120 @@ def sample_get_document(): # Done; return the response. return response + def list_documents( + self, + request: Optional[Union[document_service.ListDocumentsRequest, dict]] = None, + *, + dataset: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDocumentsPager: + r"""Returns a list of documents present in the dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import documentai_v1beta3 + + def sample_list_documents(): + # Create a client + client = documentai_v1beta3.DocumentServiceClient() + + # Initialize request argument(s) + request = documentai_v1beta3.ListDocumentsRequest( + dataset="dataset_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.documentai_v1beta3.types.ListDocumentsRequest, dict]): + The request object. + dataset (str): + Required. The resource name of the + dataset to be listed. Format: + + projects/{project}/locations/{location}/processors/{processor}/dataset + + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.documentai_v1beta3.services.document_service.pagers.ListDocumentsPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([dataset]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a document_service.ListDocumentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, document_service.ListDocumentsRequest): + request = document_service.ListDocumentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dataset is not None: + request.dataset = dataset + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_documents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("dataset", request.dataset),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDocumentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + def batch_delete_documents( self, request: Optional[ diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/pagers.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/pagers.py new file mode 100644 index 000000000000..f91242404854 --- /dev/null +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.documentai_v1beta3.types import document_service + + +class ListDocumentsPager: + """A pager for iterating through ``list_documents`` requests. + + This class thinly wraps an initial + :class:`google.cloud.documentai_v1beta3.types.ListDocumentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``document_metadata`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDocuments`` requests and continue to iterate + through the ``document_metadata`` field on the + corresponding responses. + + All the usual :class:`google.cloud.documentai_v1beta3.types.ListDocumentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., document_service.ListDocumentsResponse], + request: document_service.ListDocumentsRequest, + response: document_service.ListDocumentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.documentai_v1beta3.types.ListDocumentsRequest): + The initial request object. + response (google.cloud.documentai_v1beta3.types.ListDocumentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = document_service.ListDocumentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[document_service.ListDocumentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[document_service.DocumentMetadata]: + for page in self.pages: + yield from page.document_metadata + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDocumentsAsyncPager: + """A pager for iterating through ``list_documents`` requests. + + This class thinly wraps an initial + :class:`google.cloud.documentai_v1beta3.types.ListDocumentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``document_metadata`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDocuments`` requests and continue to iterate + through the ``document_metadata`` field on the + corresponding responses. + + All the usual :class:`google.cloud.documentai_v1beta3.types.ListDocumentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[document_service.ListDocumentsResponse]], + request: document_service.ListDocumentsRequest, + response: document_service.ListDocumentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.documentai_v1beta3.types.ListDocumentsRequest): + The initial request object. + response (google.cloud.documentai_v1beta3.types.ListDocumentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = document_service.ListDocumentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[document_service.ListDocumentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[document_service.DocumentMetadata]: + async def async_generator(): + async for page in self.pages: + for response in page.document_metadata: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/base.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/base.py index fb2d99970509..04e054ff1e7e 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/base.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/base.py @@ -139,6 +139,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_documents: gapic_v1.method.wrap_method( + self.list_documents, + default_timeout=None, + client_info=client_info, + ), self.batch_delete_documents: gapic_v1.method.wrap_method( self.batch_delete_documents, default_timeout=None, @@ -200,6 +205,18 @@ def get_document( ]: raise NotImplementedError() + @property + def list_documents( + self, + ) -> Callable[ + [document_service.ListDocumentsRequest], + Union[ + document_service.ListDocumentsResponse, + Awaitable[document_service.ListDocumentsResponse], + ], + ]: + raise NotImplementedError() + @property def batch_delete_documents( self, diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/grpc.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/grpc.py index f916e8c50ac4..b33bbdc21fe0 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/grpc.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/grpc.py @@ -327,6 +327,34 @@ def get_document( ) return self._stubs["get_document"] + @property + def list_documents( + self, + ) -> Callable[ + [document_service.ListDocumentsRequest], document_service.ListDocumentsResponse + ]: + r"""Return a callable for the list documents method over gRPC. + + Returns a list of documents present in the dataset. + + Returns: + Callable[[~.ListDocumentsRequest], + ~.ListDocumentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_documents" not in self._stubs: + self._stubs["list_documents"] = self.grpc_channel.unary_unary( + "/google.cloud.documentai.v1beta3.DocumentService/ListDocuments", + request_serializer=document_service.ListDocumentsRequest.serialize, + response_deserializer=document_service.ListDocumentsResponse.deserialize, + ) + return self._stubs["list_documents"] + @property def batch_delete_documents( self, diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/grpc_asyncio.py index 5aaca005ed24..8ecc480bcc64 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/grpc_asyncio.py @@ -337,6 +337,35 @@ def get_document( ) return self._stubs["get_document"] + @property + def list_documents( + self, + ) -> Callable[ + [document_service.ListDocumentsRequest], + Awaitable[document_service.ListDocumentsResponse], + ]: + r"""Return a callable for the list documents method over gRPC. + + Returns a list of documents present in the dataset. + + Returns: + Callable[[~.ListDocumentsRequest], + Awaitable[~.ListDocumentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_documents" not in self._stubs: + self._stubs["list_documents"] = self.grpc_channel.unary_unary( + "/google.cloud.documentai.v1beta3.DocumentService/ListDocuments", + request_serializer=document_service.ListDocumentsRequest.serialize, + response_deserializer=document_service.ListDocumentsResponse.deserialize, + ) + return self._stubs["list_documents"] + @property def batch_delete_documents( self, diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/rest.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/rest.py index aa2719ec71f0..337947072d2f 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/rest.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/rest.py @@ -104,6 +104,14 @@ def post_import_documents(self, response): logging.log(f"Received response: {response}") return response + def pre_list_documents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_documents(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_dataset(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -218,6 +226,29 @@ def post_import_documents( """ return response + def pre_list_documents( + self, + request: document_service.ListDocumentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[document_service.ListDocumentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_documents + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_list_documents( + self, response: document_service.ListDocumentsResponse + ) -> document_service.ListDocumentsResponse: + """Post-rpc interceptor for list_documents + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + def pre_update_dataset( self, request: document_service.UpdateDatasetRequest, @@ -906,6 +937,101 @@ def __call__( resp = self._interceptor.post_import_documents(resp) return resp + class _ListDocuments(DocumentServiceRestStub): + def __hash__(self): + return hash("ListDocuments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.ListDocumentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.ListDocumentsResponse: + r"""Call the list documents method over HTTP. + + Args: + request (~.document_service.ListDocumentsRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.ListDocumentsResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta3/{dataset=projects/*/locations/*/processors/*/dataset}:listDocuments", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_list_documents(request, metadata) + pb_request = document_service.ListDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.ListDocumentsResponse() + pb_resp = document_service.ListDocumentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_documents(resp) + return resp + class _UpdateDataset(DocumentServiceRestStub): def __hash__(self): return hash("UpdateDataset") @@ -1135,6 +1261,16 @@ def import_documents( # In C++ this would require a dynamic_cast return self._ImportDocuments(self._session, self._host, self._interceptor) # type: ignore + @property + def list_documents( + self, + ) -> Callable[ + [document_service.ListDocumentsRequest], document_service.ListDocumentsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDocuments(self._session, self._host, self._interceptor) # type: ignore + @property def update_dataset( self, diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/__init__.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/__init__.py index 849ba2382a19..efe88aa57617 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/__init__.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/__init__.py @@ -80,12 +80,20 @@ UndeployProcessorVersionRequest, UndeployProcessorVersionResponse, ) -from .document_schema import DocumentSchema, EntityTypeMetadata, PropertyMetadata +from .document_schema import ( + DocumentSchema, + EntityTypeMetadata, + FieldExtractionMetadata, + PropertyMetadata, + SummaryOptions, +) from .document_service import ( BatchDeleteDocumentsMetadata, BatchDeleteDocumentsRequest, BatchDeleteDocumentsResponse, DatasetSplitType, + DocumentLabelingState, + DocumentMetadata, DocumentPageRange, GetDatasetSchemaRequest, GetDocumentRequest, @@ -93,6 +101,8 @@ ImportDocumentsMetadata, ImportDocumentsRequest, ImportDocumentsResponse, + ListDocumentsRequest, + ListDocumentsResponse, UpdateDatasetOperationMetadata, UpdateDatasetRequest, UpdateDatasetSchemaRequest, @@ -100,7 +110,7 @@ from .evaluation import Evaluation, EvaluationReference from .geometry import BoundingPoly, NormalizedVertex, Vertex from .operation_metadata import CommonOperationMetadata -from .processor import Processor, ProcessorVersion +from .processor import Processor, ProcessorVersion, ProcessorVersionAlias from .processor_type import ProcessorType __all__ = ( @@ -173,10 +183,13 @@ "UndeployProcessorVersionResponse", "DocumentSchema", "EntityTypeMetadata", + "FieldExtractionMetadata", "PropertyMetadata", + "SummaryOptions", "BatchDeleteDocumentsMetadata", "BatchDeleteDocumentsRequest", "BatchDeleteDocumentsResponse", + "DocumentMetadata", "DocumentPageRange", "GetDatasetSchemaRequest", "GetDocumentRequest", @@ -184,10 +197,13 @@ "ImportDocumentsMetadata", "ImportDocumentsRequest", "ImportDocumentsResponse", + "ListDocumentsRequest", + "ListDocumentsResponse", "UpdateDatasetOperationMetadata", "UpdateDatasetRequest", "UpdateDatasetSchemaRequest", "DatasetSplitType", + "DocumentLabelingState", "Evaluation", "EvaluationReference", "BoundingPoly", @@ -196,5 +212,6 @@ "CommonOperationMetadata", "Processor", "ProcessorVersion", + "ProcessorVersionAlias", "ProcessorType", ) diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document.py index 35ed74717b3c..8fb22a98f97d 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document.py @@ -1902,7 +1902,8 @@ class RevisionCase(proto.Enum): Values: REVISION_CASE_UNSPECIFIED (0): - Unspecified case, fallback to read the LATEST_HUMAN_REVIEW. + Unspecified case, fall back to read the + ``LATEST_HUMAN_REVIEW``. LATEST_HUMAN_REVIEW (1): The latest revision made by a human. LATEST_TIMESTAMP (2): diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_io.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_io.py index 73cfc30d986a..25ca2116723e 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_io.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_io.py @@ -44,6 +44,12 @@ class RawDocument(proto.Message): An IANA MIME type (RFC6838) indicating the nature and format of the [content][google.cloud.documentai.v1beta3.RawDocument.content]. + display_name (str): + The display name of the document, it supports all Unicode + characters except the following: ``*``, ``?``, ``[``, ``]``, + ``%``, ``{``, ``}``,\ ``'``, ``\"``, ``,`` ``~``, ``=`` and + ``:`` are reserved. If not specified, a default ID is + generated. """ content: bytes = proto.Field( @@ -54,6 +60,10 @@ class RawDocument(proto.Message): proto.STRING, number=2, ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) class GcsDocument(proto.Message): @@ -257,7 +267,7 @@ class OcrConfig(proto.Message): disable_character_boxes_detection (bool): Turn off character box detector in OCR engine. Character box detection is enabled by - default in OCR 2.0+ processors. + default in OCR 2.0 (and later) processors. premium_features (google.cloud.documentai_v1beta3.types.OcrConfig.PremiumFeatures): Configurations for premium OCR features. """ @@ -289,7 +299,8 @@ class PremiumFeatures(proto.Message): Attributes: enable_selection_mark_detection (bool): Turn on selection mark detector in OCR - engine. Only available in OCR 2.0+ processors. + engine. Only available in OCR 2.0 (and later) + processors. compute_style_info (bool): Turn on font identification model and return font style information. diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_processor_service.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_processor_service.py index 17f51ac9bec9..637808172eba 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_processor_service.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_processor_service.py @@ -106,8 +106,8 @@ class ProcessOptions(proto.Message): This field is a member of `oneof`_ ``page_range``. from_start (int): - Only process certain pages from the start, - process all if the document has less pages. + Only process certain pages from the start. + Process all if the document has fewer pages. This field is a member of `oneof`_ ``page_range``. from_end (int): @@ -118,6 +118,13 @@ class ProcessOptions(proto.Message): ocr_config (google.cloud.documentai_v1beta3.types.OcrConfig): Only applicable to ``OCR_PROCESSOR``. Returns error if set on other processor types. + schema_override (google.cloud.documentai_v1beta3.types.DocumentSchema): + Optional. Override the schema of the + [ProcessorVersion][google.cloud.documentai.v1beta3.ProcessorVersion]. + Will return an Invalid Argument error if this field is set + when the underlying + [ProcessorVersion][google.cloud.documentai.v1beta3.ProcessorVersion] + doesn't support schema override. """ class IndividualPageSelector(proto.Message): @@ -155,6 +162,11 @@ class IndividualPageSelector(proto.Message): number=1, message=document_io.OcrConfig, ) + schema_override: gcd_document_schema.DocumentSchema = proto.Field( + proto.MESSAGE, + number=8, + message=gcd_document_schema.DocumentSchema, + ) class ProcessRequest(proto.Message): @@ -1268,8 +1280,8 @@ class CustomDocumentExtractionOptions(proto.Message): """ class TrainingMethod(proto.Enum): - r"""Training Method for CDE. TRAINING_METHOD_UNSPECIFIED will fallback - to MODEL_BASED. + r"""Training Method for CDE. ``TRAINING_METHOD_UNSPECIFIED`` will fall + back to ``MODEL_BASED``. Values: TRAINING_METHOD_UNSPECIFIED (0): @@ -1747,19 +1759,20 @@ def raw_page(self): class ImportProcessorVersionRequest(proto.Message): r"""The request message for the [ImportProcessorVersion][google.cloud.documentai.v1beta3.DocumentProcessorService.ImportProcessorVersion] - method. Requirements: + method. - - The Document AI `Service - Agent `__ of - the destination project must have `Document AI Editor - role `__ - on the source project. + The Document AI `Service + Agent `__ of the + destination project must have `Document AI Editor + role `__ + on the source project. The destination project is specified as part of the [parent][google.cloud.documentai.v1beta3.ImportProcessorVersionRequest.parent] field. The source project is specified as part of the - [source][ImportProcessorVersionRequest.processor_version_source or - ImportProcessorVersionRequest.external_processor_version_source] + [source][google.cloud.documentai.v1beta3.ImportProcessorVersionRequest.processor_version_source] + or + [external_processor_version_source][google.cloud.documentai.v1beta3.ImportProcessorVersionRequest.external_processor_version_source] field. This message has `oneof`_ fields (mutually exclusive fields). @@ -1778,9 +1791,9 @@ class ImportProcessorVersionRequest(proto.Message): This field is a member of `oneof`_ ``source``. external_processor_version_source (google.cloud.documentai_v1beta3.types.ImportProcessorVersionRequest.ExternalProcessorVersionSource): - The source processor version to import from, - and can be from different environment and region - than the destination processor. + The source processor version to import from. + It can be from a different environment and + region than the destination processor. This field is a member of `oneof`_ ``source``. parent (str): diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_schema.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_schema.py index 161929ac72b4..2b177b922f1c 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_schema.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_schema.py @@ -22,6 +22,8 @@ __protobuf__ = proto.module( package="google.cloud.documentai.v1beta3", manifest={ + "SummaryOptions", + "FieldExtractionMetadata", "PropertyMetadata", "EntityTypeMetadata", "DocumentSchema", @@ -29,6 +31,76 @@ ) +class SummaryOptions(proto.Message): + r"""Metadata for document summarization. + + Attributes: + length (google.cloud.documentai_v1beta3.types.SummaryOptions.Length): + How long the summary should be. + format_ (google.cloud.documentai_v1beta3.types.SummaryOptions.Format): + The format the summary should be in. + """ + + class Length(proto.Enum): + r"""The Length enum. + + Values: + LENGTH_UNSPECIFIED (0): + Default. + BRIEF (1): + A brief summary of one or two sentences. + MODERATE (2): + A paragraph-length summary. + COMPREHENSIVE (3): + The longest option available. + """ + LENGTH_UNSPECIFIED = 0 + BRIEF = 1 + MODERATE = 2 + COMPREHENSIVE = 3 + + class Format(proto.Enum): + r"""The Format enum. + + Values: + FORMAT_UNSPECIFIED (0): + Default. + PARAGRAPH (1): + Format the output in paragraphs. + BULLETS (2): + Format the output in bullets. + """ + FORMAT_UNSPECIFIED = 0 + PARAGRAPH = 1 + BULLETS = 2 + + length: Length = proto.Field( + proto.ENUM, + number=1, + enum=Length, + ) + format_: Format = proto.Field( + proto.ENUM, + number=2, + enum=Format, + ) + + +class FieldExtractionMetadata(proto.Message): + r"""Metadata for how this field value is extracted. + + Attributes: + summary_options (google.cloud.documentai_v1beta3.types.SummaryOptions): + Summary options config. + """ + + summary_options: "SummaryOptions" = proto.Field( + proto.MESSAGE, + number=2, + message="SummaryOptions", + ) + + class PropertyMetadata(proto.Message): r"""Metadata about a property. @@ -36,12 +108,19 @@ class PropertyMetadata(proto.Message): inactive (bool): Whether the property should be considered as "inactive". + field_extraction_metadata (google.cloud.documentai_v1beta3.types.FieldExtractionMetadata): + Field extraction metadata on the property. """ inactive: bool = proto.Field( proto.BOOL, number=3, ) + field_extraction_metadata: "FieldExtractionMetadata" = proto.Field( + proto.MESSAGE, + number=9, + message="FieldExtractionMetadata", + ) class EntityTypeMetadata(proto.Message): diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py index 4dd9ee5b8013..f3f6445be1b1 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_service.py @@ -29,6 +29,7 @@ package="google.cloud.documentai.v1beta3", manifest={ "DatasetSplitType", + "DocumentLabelingState", "UpdateDatasetRequest", "UpdateDatasetOperationMetadata", "ImportDocumentsRequest", @@ -36,12 +37,15 @@ "ImportDocumentsMetadata", "GetDocumentRequest", "GetDocumentResponse", + "ListDocumentsRequest", + "ListDocumentsResponse", "BatchDeleteDocumentsRequest", "BatchDeleteDocumentsResponse", "BatchDeleteDocumentsMetadata", "GetDatasetSchemaRequest", "UpdateDatasetSchemaRequest", "DocumentPageRange", + "DocumentMetadata", }, ) @@ -53,7 +57,6 @@ class DatasetSplitType(proto.Enum): Values: DATASET_SPLIT_TYPE_UNSPECIFIED (0): Default value if the enum is not set. - go/protodosdonts#do-include-an-unspecified-value-in-an-enum DATASET_SPLIT_TRAIN (1): Identifies the train documents. DATASET_SPLIT_TEST (2): @@ -67,6 +70,25 @@ class DatasetSplitType(proto.Enum): DATASET_SPLIT_UNASSIGNED = 3 +class DocumentLabelingState(proto.Enum): + r"""Describes the labelling status of a document. + + Values: + DOCUMENT_LABELING_STATE_UNSPECIFIED (0): + Default value if the enum is not set. + DOCUMENT_LABELED (1): + Document has been labelled. + DOCUMENT_UNLABELED (2): + Document has not been labelled. + DOCUMENT_AUTO_LABELED (3): + Document has been auto-labelled. + """ + DOCUMENT_LABELING_STATE_UNSPECIFIED = 0 + DOCUMENT_LABELED = 1 + DOCUMENT_UNLABELED = 2 + DOCUMENT_AUTO_LABELED = 3 + + class UpdateDatasetRequest(proto.Message): r""" @@ -95,7 +117,7 @@ class UpdateDatasetOperationMetadata(proto.Message): Attributes: common_metadata (google.cloud.documentai_v1beta3.types.CommonOperationMetadata): - The basic metadata of the long running + The basic metadata of the long-running operation. """ @@ -201,7 +223,7 @@ class ImportDocumentsMetadata(proto.Message): Attributes: common_metadata (google.cloud.documentai_v1beta3.types.CommonOperationMetadata): - The basic metadata of the long running + The basic metadata of the long-running operation. individual_import_statuses (MutableSequence[google.cloud.documentai_v1beta3.types.ImportDocumentsMetadata.IndividualImportStatus]): The list of response details of each @@ -243,9 +265,10 @@ class IndividualImportStatus(proto.Message): ) class ImportConfigValidationResult(proto.Message): - r"""The validation status of each import config. Status is set to errors - if there is no documents to import in the import_config, or OK if - the operation will try to proceed at least one document. + r"""The validation status of each import config. Status is set to an + error if there are no documents to import in the ``import_config``, + or ``OK`` if the operation will try to proceed with at least one + document. Attributes: input_gcs_source (str): @@ -346,6 +369,130 @@ class GetDocumentResponse(proto.Message): ) +class ListDocumentsRequest(proto.Message): + r""" + + Attributes: + dataset (str): + Required. The resource name of the dataset to + be listed. Format: + + projects/{project}/locations/{location}/processors/{processor}/dataset + page_size (int): + The maximum number of documents to return. + The service may return fewer than this value. If + unspecified, at most 20 documents will be + returned. The maximum value is 100; values above + 100 will be coerced to 100. + page_token (str): + A page token, received from a previous ``ListDocuments`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListDocuments`` must match the call that provided the page + token. + filter (str): + Optional. Query to filter the documents based on + https://google.aip.dev/160. + + Currently support query strings are: + + - ``SplitType=DATASET_SPLIT_TEST|DATASET_SPLIT_TRAIN|DATASET_SPLIT_UNASSIGNED`` + - ``LabelingState=DOCUMENT_LABELED|DOCUMENT_UNLABELED|DOCUMENT_AUTO_LABELED`` + - ``DisplayName=\"file_name.pdf\"`` + - ``EntityType=abc/def`` + - ``TagName=\"auto-labeling-running\"|\"sampled\"`` + + Note: + + - Only ``AND``, ``=`` and ``!=`` are supported. e.g. + ``DisplayName=file_name AND EntityType!=abc`` IS + supported. + - Wildcard ``*`` is supported only in ``DisplayName`` + filter + - No duplicate filter keys are allowed, e.g. + ``EntityType=a AND EntityType=b`` is NOT supported. + - String match is case sensitive (for filter + ``DisplayName`` & ``EntityType``). + return_total_size (bool): + Optional. Controls if the ListDocuments request requires a + total size of matched documents. See + ListDocumentsResponse.total_size. + + Enabling this flag may adversely impact performance. + + Defaults to false. + skip (int): + Optional. Number of results to skip beginning from the + ``page_token`` if provided. + https://google.aip.dev/158#skipping-results. It must be a + non-negative integer. Negative values wil be rejected. Note + that this is not the number of pages to skip. If this value + causes the cursor to move past the end of results, + ``ListDocumentsResponse.document_metadata`` and + ``ListDocumentsResponse.next_page_token`` will be empty. + """ + + dataset: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + return_total_size: bool = proto.Field( + proto.BOOL, + number=6, + ) + skip: int = proto.Field( + proto.INT32, + number=8, + ) + + +class ListDocumentsResponse(proto.Message): + r""" + + Attributes: + document_metadata (MutableSequence[google.cloud.documentai_v1beta3.types.DocumentMetadata]): + Document metadata corresponding to the listed + documents. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + total_size (int): + Total count of documents queried. + """ + + @property + def raw_page(self): + return self + + document_metadata: MutableSequence["DocumentMetadata"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DocumentMetadata", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + total_size: int = proto.Field( + proto.INT32, + number=3, + ) + + class BatchDeleteDocumentsRequest(proto.Message): r""" @@ -383,7 +530,7 @@ class BatchDeleteDocumentsMetadata(proto.Message): Attributes: common_metadata (google.cloud.documentai_v1beta3.types.CommonOperationMetadata): - The basic metadata of the long running + The basic metadata of the long-running operation. individual_batch_delete_statuses (MutableSequence[google.cloud.documentai_v1beta3.types.BatchDeleteDocumentsMetadata.IndividualBatchDeleteStatus]): The list of response details of each @@ -510,4 +657,46 @@ class DocumentPageRange(proto.Message): ) +class DocumentMetadata(proto.Message): + r"""Metadata about a document. + + Attributes: + document_id (google.cloud.documentai_v1beta3.types.DocumentId): + Document identifier. + page_count (int): + Number of pages in the document. + dataset_type (google.cloud.documentai_v1beta3.types.DatasetSplitType): + Type of the dataset split to which the + document belongs. + labeling_state (google.cloud.documentai_v1beta3.types.DocumentLabelingState): + Labelling state of the document. + display_name (str): + The display name of the document. + """ + + document_id: gcd_dataset.DocumentId = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_dataset.DocumentId, + ) + page_count: int = proto.Field( + proto.INT32, + number=2, + ) + dataset_type: "DatasetSplitType" = proto.Field( + proto.ENUM, + number=3, + enum="DatasetSplitType", + ) + labeling_state: "DocumentLabelingState" = proto.Field( + proto.ENUM, + number=5, + enum="DocumentLabelingState", + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/processor.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/processor.py index cccf96a174a1..f9610c349925 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/processor.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/processor.py @@ -27,6 +27,7 @@ package="google.cloud.documentai.v1beta3", manifest={ "ProcessorVersion", + "ProcessorVersionAlias", "Processor", }, ) @@ -175,6 +176,29 @@ class DeprecationInfo(proto.Message): ) +class ProcessorVersionAlias(proto.Message): + r"""Contains the alias and the aliased resource name of processor + version. + + Attributes: + alias (str): + The alias in the form of ``processor_version`` resource + name. + processor_version (str): + The resource name of aliased processor + version. + """ + + alias: str = proto.Field( + proto.STRING, + number=1, + ) + processor_version: str = proto.Field( + proto.STRING, + number=2, + ) + + class Processor(proto.Message): r"""The first-class citizen for Document AI. Each processor defines how to extract structural information from a document. @@ -194,6 +218,8 @@ class Processor(proto.Message): Output only. The state of the processor. default_processor_version (str): The default processor version. + processor_version_aliases (MutableSequence[google.cloud.documentai_v1beta3.types.ProcessorVersionAlias]): + Output only. The processor version aliases. process_endpoint (str): Output only. Immutable. The http endpoint that can be called to invoke processing. @@ -271,6 +297,13 @@ class State(proto.Enum): proto.STRING, number=9, ) + processor_version_aliases: MutableSequence[ + "ProcessorVersionAlias" + ] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="ProcessorVersionAlias", + ) process_endpoint: str = proto.Field( proto.STRING, number=6, diff --git a/packages/google-cloud-documentai/noxfile.py b/packages/google-cloud-documentai/noxfile.py index 9a2acd8b6787..be54712bfa8f 100644 --- a/packages/google-cloud-documentai/noxfile.py +++ b/packages/google-cloud-documentai/noxfile.py @@ -46,7 +46,7 @@ UNIT_TEST_EXTRAS = [] UNIT_TEST_EXTRAS_BY_PYTHON = {} -SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -405,24 +405,3 @@ def prerelease_deps(session): session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) diff --git a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta3_generated_document_service_list_documents_async.py b/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta3_generated_document_service_list_documents_async.py new file mode 100644 index 000000000000..c1cecf8cbcb4 --- /dev/null +++ b/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta3_generated_document_service_list_documents_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-documentai + + +# [START documentai_v1beta3_generated_DocumentService_ListDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import documentai_v1beta3 + + +async def sample_list_documents(): + # Create a client + client = documentai_v1beta3.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = documentai_v1beta3.ListDocumentsRequest( + dataset="dataset_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END documentai_v1beta3_generated_DocumentService_ListDocuments_async] diff --git a/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta3_generated_document_service_list_documents_sync.py b/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta3_generated_document_service_list_documents_sync.py new file mode 100644 index 000000000000..2ff442f74696 --- /dev/null +++ b/packages/google-cloud-documentai/samples/generated_samples/documentai_v1beta3_generated_document_service_list_documents_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-documentai + + +# [START documentai_v1beta3_generated_DocumentService_ListDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import documentai_v1beta3 + + +def sample_list_documents(): + # Create a client + client = documentai_v1beta3.DocumentServiceClient() + + # Initialize request argument(s) + request = documentai_v1beta3.ListDocumentsRequest( + dataset="dataset_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END documentai_v1beta3_generated_DocumentService_ListDocuments_sync] diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json index 12d7ad027832..26af44f1c288 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.19.0" + "version": "2.20.0" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json index b92e5ed066c0..0934795ec337 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.19.0" + "version": "2.20.0" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json index 16c3bacb6e0f..ee639d5e1b34 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "2.19.0" + "version": "2.20.0" }, "snippets": [ { @@ -4350,6 +4350,167 @@ ], "title": "documentai_v1beta3_generated_document_service_import_documents_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.documentai_v1beta3.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.documentai_v1beta3.DocumentServiceAsyncClient.list_documents", + "method": { + "fullName": "google.cloud.documentai.v1beta3.DocumentService.ListDocuments", + "service": { + "fullName": "google.cloud.documentai.v1beta3.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.documentai_v1beta3.types.ListDocumentsRequest" + }, + { + "name": "dataset", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.documentai_v1beta3.services.document_service.pagers.ListDocumentsAsyncPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "documentai_v1beta3_generated_document_service_list_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "documentai_v1beta3_generated_DocumentService_ListDocuments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "documentai_v1beta3_generated_document_service_list_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.documentai_v1beta3.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.documentai_v1beta3.DocumentServiceClient.list_documents", + "method": { + "fullName": "google.cloud.documentai.v1beta3.DocumentService.ListDocuments", + "service": { + "fullName": "google.cloud.documentai.v1beta3.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.documentai_v1beta3.types.ListDocumentsRequest" + }, + { + "name": "dataset", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.documentai_v1beta3.services.document_service.pagers.ListDocumentsPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "documentai_v1beta3_generated_document_service_list_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "documentai_v1beta3_generated_DocumentService_ListDocuments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "documentai_v1beta3_generated_document_service_list_documents_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-documentai/scripts/fixup_documentai_v1beta3_keywords.py b/packages/google-cloud-documentai/scripts/fixup_documentai_v1beta3_keywords.py index 432d626bc4d5..40bd65af9ae3 100644 --- a/packages/google-cloud-documentai/scripts/fixup_documentai_v1beta3_keywords.py +++ b/packages/google-cloud-documentai/scripts/fixup_documentai_v1beta3_keywords.py @@ -57,6 +57,7 @@ class documentaiCallTransformer(cst.CSTTransformer): 'get_processor_version': ('name', ), 'import_documents': ('dataset', 'batch_documents_import_configs', ), 'import_processor_version': ('parent', 'processor_version_source', 'external_processor_version_source', ), + 'list_documents': ('dataset', 'page_size', 'page_token', 'filter', 'return_total_size', 'skip', ), 'list_evaluations': ('parent', 'page_size', 'page_token', ), 'list_processors': ('parent', 'page_size', 'page_token', ), 'list_processor_types': ('parent', 'page_size', 'page_token', ), diff --git a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py index 9187c565e35f..48571b0c2731 100644 --- a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py +++ b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py @@ -10738,6 +10738,9 @@ def test_create_processor_rest(request_type): "display_name": "display_name_value", "state": 1, "default_processor_version": "default_processor_version_value", + "processor_version_aliases": [ + {"alias": "alias_value", "processor_version": "processor_version_value"} + ], "process_endpoint": "process_endpoint_value", "create_time": {"seconds": 751, "nanos": 543}, "kms_key_name": "kms_key_name_value", @@ -10947,6 +10950,9 @@ def test_create_processor_rest_bad_request( "display_name": "display_name_value", "state": 1, "default_processor_version": "default_processor_version_value", + "processor_version_aliases": [ + {"alias": "alias_value", "processor_version": "processor_version_value"} + ], "process_endpoint": "process_endpoint_value", "create_time": {"seconds": 751, "nanos": 543}, "kms_key_name": "kms_key_name_value", diff --git a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py index d59cded4c52b..3496b9be09e7 100644 --- a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py +++ b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py @@ -57,6 +57,7 @@ from google.cloud.documentai_v1beta3.services.document_service import ( DocumentServiceAsyncClient, DocumentServiceClient, + pagers, transports, ) from google.cloud.documentai_v1beta3.types import ( @@ -1465,6 +1466,432 @@ async def test_get_document_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + document_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + response = client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.ListDocumentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_documents_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + client.list_documents() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.ListDocumentsRequest() + + +@pytest.mark.asyncio +async def test_list_documents_async( + transport: str = "grpc_asyncio", request_type=document_service.ListDocumentsRequest +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + ) + response = await client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.ListDocumentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +@pytest.mark.asyncio +async def test_list_documents_async_from_dict(): + await test_list_documents_async(request_type=dict) + + +def test_list_documents_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.ListDocumentsRequest() + + request.dataset = "dataset_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + call.return_value = document_service.ListDocumentsResponse() + client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "dataset=dataset_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_documents_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.ListDocumentsRequest() + + request.dataset = "dataset_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.ListDocumentsResponse() + ) + await client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "dataset=dataset_value", + ) in kw["metadata"] + + +def test_list_documents_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.ListDocumentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_documents( + dataset="dataset_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].dataset + mock_val = "dataset_value" + assert arg == mock_val + + +def test_list_documents_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_documents( + document_service.ListDocumentsRequest(), + dataset="dataset_value", + ) + + +@pytest.mark.asyncio +async def test_list_documents_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.ListDocumentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.ListDocumentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_documents( + dataset="dataset_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].dataset + mock_val = "dataset_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_documents_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_documents( + document_service.ListDocumentsRequest(), + dataset="dataset_value", + ) + + +def test_list_documents_pager(transport_name: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + document_metadata=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("dataset", ""),)), + ) + pager = client.list_documents(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document_service.DocumentMetadata) for i in results) + + +def test_list_documents_pages(transport_name: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + document_metadata=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + ], + ), + RuntimeError, + ) + pages = list(client.list_documents(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_documents_async_pager(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + document_metadata=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_documents( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, document_service.DocumentMetadata) for i in responses) + + +@pytest.mark.asyncio +async def test_list_documents_async_pages(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + document_metadata=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_documents(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ @@ -3094,6 +3521,342 @@ def test_get_document_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + document_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "dataset": "projects/sample1/locations/sample2/processors/sample3/dataset" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", + total_size=1086, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_service.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_documents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.total_size == 1086 + + +def test_list_documents_rest_required_fields( + request_type=document_service.ListDocumentsRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["dataset"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["dataset"] = "dataset_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "dataset" in jsonified_request + assert jsonified_request["dataset"] == "dataset_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document_service.ListDocumentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = document_service.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_documents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_documents_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("dataset",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_documents_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_list_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_list_documents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service.ListDocumentsRequest.pb( + document_service.ListDocumentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document_service.ListDocumentsResponse.to_json( + document_service.ListDocumentsResponse() + ) + + request = document_service.ListDocumentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document_service.ListDocumentsResponse() + + client.list_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_documents_rest_bad_request( + transport: str = "rest", request_type=document_service.ListDocumentsRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "dataset": "projects/sample1/locations/sample2/processors/sample3/dataset" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_documents(request) + + +def test_list_documents_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.ListDocumentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "dataset": "projects/sample1/locations/sample2/processors/sample3/dataset" + } + + # get truthy value for each flattened field + mock_args = dict( + dataset="dataset_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = document_service.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_documents(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta3/{dataset=projects/*/locations/*/processors/*/dataset}:listDocuments" + % client.transport._host, + args[1], + ) + + +def test_list_documents_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_documents( + document_service.ListDocumentsRequest(), + dataset="dataset_value", + ) + + +def test_list_documents_rest_pager(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + document_metadata=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + document_metadata=[ + document_service.DocumentMetadata(), + document_service.DocumentMetadata(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_service.ListDocumentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "dataset": "projects/sample1/locations/sample2/processors/sample3/dataset" + } + + pager = client.list_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document_service.DocumentMetadata) for i in results) + + pages = list(client.list_documents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ @@ -3683,7 +4446,12 @@ def test_update_dataset_schema_rest(request_type): "name": "name_value", "value_type": "value_type_value", "occurrence_type": 1, - "property_metadata": {"inactive": True}, + "property_metadata": { + "inactive": True, + "field_extraction_metadata": { + "summary_options": {"length": 1, "format_": 1} + }, + }, } ], "entity_type_metadata": {"inactive": True}, @@ -3892,7 +4660,12 @@ def test_update_dataset_schema_rest_bad_request( "name": "name_value", "value_type": "value_type_value", "occurrence_type": 1, - "property_metadata": {"inactive": True}, + "property_metadata": { + "inactive": True, + "field_extraction_metadata": { + "summary_options": {"length": 1, "format_": 1} + }, + }, } ], "entity_type_metadata": {"inactive": True}, @@ -4130,6 +4903,7 @@ def test_document_service_base_transport(): "update_dataset", "import_documents", "get_document", + "list_documents", "batch_delete_documents", "get_dataset_schema", "update_dataset_schema", @@ -4427,6 +5201,9 @@ def test_document_service_client_transport_session_collision(transport_name): session1 = client1.transport.get_document._session session2 = client2.transport.get_document._session assert session1 != session2 + session1 = client1.transport.list_documents._session + session2 = client2.transport.list_documents._session + assert session1 != session2 session1 = client1.transport.batch_delete_documents._session session2 = client2.transport.batch_delete_documents._session assert session1 != session2 diff --git a/packages/google-cloud-gsuiteaddons/.OwlBot.yaml b/packages/google-cloud-gsuiteaddons/.OwlBot.yaml new file mode 100644 index 000000000000..d82d33c3bf88 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/.OwlBot.yaml @@ -0,0 +1,23 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/gsuiteaddons/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-gsuiteaddons/$1 + +begin-after-commit-hash: 70f7f0525414fe4dfeb2fc2e81546b073f83a621 diff --git a/packages/google-cloud-gsuiteaddons/.coveragerc b/packages/google-cloud-gsuiteaddons/.coveragerc new file mode 100644 index 000000000000..00b9f3912784 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/gsuiteaddons/__init__.py + google/cloud/gsuiteaddons/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-gsuiteaddons/.flake8 b/packages/google-cloud-gsuiteaddons/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-gsuiteaddons/.gitignore b/packages/google-cloud-gsuiteaddons/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-gsuiteaddons/.repo-metadata.json b/packages/google-cloud-gsuiteaddons/.repo-metadata.json new file mode 100644 index 000000000000..621669dc373e --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "gsuiteaddons", + "name_pretty": "Google Workspace Add-ons API", + "product_documentation": "https://developers.google.com/workspace/add-ons/overview", + "client_documentation": "https://cloud.google.com/python/docs/reference/gsuiteaddons/latest", + "issue_tracker": "", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-gsuiteaddons", + "api_id": "gsuiteaddons.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "gsuiteaddons", + "api_description": "Add-ons are customized applications that integrate with Google Workspace applications." +} diff --git a/packages/google-cloud-gsuiteaddons/CHANGELOG.md b/packages/google-cloud-gsuiteaddons/CHANGELOG.md new file mode 100644 index 000000000000..d7e92a6e9c28 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/CHANGELOG.md @@ -0,0 +1,71 @@ +# Changelog + +## [0.3.3](https://github.com/googleapis/python-gsuiteaddons/compare/v0.3.2...v0.3.3) (2023-09-21) + + +### Documentation + +* Minor formatting ([975e10e](https://github.com/googleapis/python-gsuiteaddons/commit/975e10ec8e76826ba6ce4ace7aa5c4cd59affc71)) + +## [0.3.2](https://github.com/googleapis/python-gsuiteaddons/compare/v0.3.1...v0.3.2) (2023-07-04) + + +### Bug Fixes + +* Add async context manager return types ([#36](https://github.com/googleapis/python-gsuiteaddons/issues/36)) ([2c2bcde](https://github.com/googleapis/python-gsuiteaddons/commit/2c2bcde646d3b0e1550ea46e1008e7b7964f5f91)) + +## [0.3.1](https://github.com/googleapis/python-gsuiteaddons/compare/v0.3.0...v0.3.1) (2023-03-24) + + +### Documentation + +* Fix formatting of request arg in docstring ([#28](https://github.com/googleapis/python-gsuiteaddons/issues/28)) ([825245f](https://github.com/googleapis/python-gsuiteaddons/commit/825245f81594feea5ee41ab64f4177e02d8f903d)) + +## [0.3.0](https://github.com/googleapis/python-gsuiteaddons/compare/v0.2.1...v0.3.0) (2023-02-19) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#22](https://github.com/googleapis/python-gsuiteaddons/issues/22)) ([3e96be1](https://github.com/googleapis/python-gsuiteaddons/commit/3e96be1efbd9b39f40db4ceb46bf7c228ab2de73)) + +## [0.2.1](https://github.com/googleapis/python-gsuiteaddons/compare/v0.2.0...v0.2.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([1fc871a](https://github.com/googleapis/python-gsuiteaddons/commit/1fc871ae26d3d1988e0d12063300f1b95c87c1f3)) + + +### Documentation + +* Add documentation for enums ([1fc871a](https://github.com/googleapis/python-gsuiteaddons/commit/1fc871ae26d3d1988e0d12063300f1b95c87c1f3)) + +## [0.2.0](https://github.com/googleapis/python-gsuiteaddons/compare/v0.1.1...v0.2.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#13](https://github.com/googleapis/python-gsuiteaddons/issues/13)) ([d30436f](https://github.com/googleapis/python-gsuiteaddons/commit/d30436fa933cbb007e86b9b0514bdb73d97bc7a4)) + +## [0.1.1](https://github.com/googleapis/python-gsuiteaddons/compare/v0.1.0...v0.1.1) (2022-12-08) + + +### Bug Fixes + +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([e07fce5](https://github.com/googleapis/python-gsuiteaddons/commit/e07fce502d57a37fd901a97cce753ecbcf773143)) +* Drop usage of pkg_resources ([e07fce5](https://github.com/googleapis/python-gsuiteaddons/commit/e07fce502d57a37fd901a97cce753ecbcf773143)) +* Fix timeout default values ([e07fce5](https://github.com/googleapis/python-gsuiteaddons/commit/e07fce502d57a37fd901a97cce753ecbcf773143)) + + +### Documentation + +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([e07fce5](https://github.com/googleapis/python-gsuiteaddons/commit/e07fce502d57a37fd901a97cce753ecbcf773143)) + +## 0.1.0 (2022-11-14) + + +### Features + +* Generate v1 ([57d14c1](https://github.com/googleapis/python-gsuiteaddons/commit/57d14c10830674d1bcd314ee39d5eedfcc60159c)) + +## Changelog diff --git a/packages/google-cloud-gsuiteaddons/CODE_OF_CONDUCT.md b/packages/google-cloud-gsuiteaddons/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-gsuiteaddons/CONTRIBUTING.rst b/packages/google-cloud-gsuiteaddons/CONTRIBUTING.rst new file mode 100644 index 000000000000..e182cd5c1cad --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.11 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-gsuiteaddons + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-gsuiteaddons/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-gsuiteaddons/LICENSE b/packages/google-cloud-gsuiteaddons/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-gsuiteaddons/MANIFEST.in b/packages/google-cloud-gsuiteaddons/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-gsuiteaddons/README.rst b/packages/google-cloud-gsuiteaddons/README.rst new file mode 100644 index 000000000000..45930310e45f --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/README.rst @@ -0,0 +1,108 @@ +Python Client for Google Workspace Add-ons API +============================================== + +|preview| |pypi| |versions| + +`Google Workspace Add-ons API`_: Add-ons are customized applications that integrate with Google Workspace applications. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-gsuiteaddons.svg + :target: https://pypi.org/project/google-cloud-gsuiteaddons/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-gsuiteaddons.svg + :target: https://pypi.org/project/google-cloud-gsuiteaddons/ +.. _Google Workspace Add-ons API: https://developers.google.com/workspace/add-ons/overview +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/gsuiteaddons/latest +.. _Product Documentation: https://developers.google.com/workspace/add-ons/overview + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Workspace Add-ons API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Workspace Add-ons API.: https://developers.google.com/workspace/add-ons/overview +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-gsuiteaddons + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-gsuiteaddons + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Workspace Add-ons API + to see other available methods on the client. +- Read the `Google Workspace Add-ons API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Workspace Add-ons API Product documentation: https://developers.google.com/workspace/add-ons/overview +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-gsuiteaddons/SECURITY.md b/packages/google-cloud-gsuiteaddons/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-gsuiteaddons/docs/CHANGELOG.md b/packages/google-cloud-gsuiteaddons/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-gsuiteaddons/docs/README.rst b/packages/google-cloud-gsuiteaddons/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-gsuiteaddons/docs/_static/custom.css b/packages/google-cloud-gsuiteaddons/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-gsuiteaddons/docs/_templates/layout.html b/packages/google-cloud-gsuiteaddons/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-gsuiteaddons/docs/conf.py b/packages/google-cloud-gsuiteaddons/docs/conf.py new file mode 100644 index 000000000000..567a00f88344 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-gsuiteaddons documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-gsuiteaddons" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-gsuiteaddons", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-gsuiteaddons-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-gsuiteaddons.tex", + "google-cloud-gsuiteaddons Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-gsuiteaddons", + "google-cloud-gsuiteaddons Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-gsuiteaddons", + "google-cloud-gsuiteaddons Documentation", + author, + "google-cloud-gsuiteaddons", + "google-cloud-gsuiteaddons Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-gsuiteaddons/docs/gsuiteaddons_v1/g_suite_add_ons.rst b/packages/google-cloud-gsuiteaddons/docs/gsuiteaddons_v1/g_suite_add_ons.rst new file mode 100644 index 000000000000..00410fdc3bd5 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/docs/gsuiteaddons_v1/g_suite_add_ons.rst @@ -0,0 +1,10 @@ +GSuiteAddOns +------------------------------ + +.. automodule:: google.cloud.gsuiteaddons_v1.services.g_suite_add_ons + :members: + :inherited-members: + +.. automodule:: google.cloud.gsuiteaddons_v1.services.g_suite_add_ons.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-gsuiteaddons/docs/gsuiteaddons_v1/services.rst b/packages/google-cloud-gsuiteaddons/docs/gsuiteaddons_v1/services.rst new file mode 100644 index 000000000000..32932149da89 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/docs/gsuiteaddons_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Gsuiteaddons v1 API +============================================= +.. toctree:: + :maxdepth: 2 + + g_suite_add_ons diff --git a/packages/google-cloud-gsuiteaddons/docs/gsuiteaddons_v1/types.rst b/packages/google-cloud-gsuiteaddons/docs/gsuiteaddons_v1/types.rst new file mode 100644 index 000000000000..63712713542d --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/docs/gsuiteaddons_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Gsuiteaddons v1 API +========================================== + +.. automodule:: google.cloud.gsuiteaddons_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-gsuiteaddons/docs/index.rst b/packages/google-cloud-gsuiteaddons/docs/index.rst new file mode 100644 index 000000000000..8df99c83bccf --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + gsuiteaddons_v1/services + gsuiteaddons_v1/types + + +Changelog +--------- + +For a list of all ``google-cloud-gsuiteaddons`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-gsuiteaddons/docs/multiprocessing.rst b/packages/google-cloud-gsuiteaddons/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/__init__.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/__init__.py new file mode 100644 index 000000000000..678332926058 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/__init__.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.gsuiteaddons import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.gsuiteaddons_v1.services.g_suite_add_ons.async_client import ( + GSuiteAddOnsAsyncClient, +) +from google.cloud.gsuiteaddons_v1.services.g_suite_add_ons.client import ( + GSuiteAddOnsClient, +) +from google.cloud.gsuiteaddons_v1.types.gsuiteaddons import ( + AddOns, + Authorization, + CreateDeploymentRequest, + DeleteDeploymentRequest, + Deployment, + GetAuthorizationRequest, + GetDeploymentRequest, + GetInstallStatusRequest, + InstallDeploymentRequest, + InstallStatus, + ListDeploymentsRequest, + ListDeploymentsResponse, + ReplaceDeploymentRequest, + UninstallDeploymentRequest, +) + +__all__ = ( + "GSuiteAddOnsClient", + "GSuiteAddOnsAsyncClient", + "AddOns", + "Authorization", + "CreateDeploymentRequest", + "DeleteDeploymentRequest", + "Deployment", + "GetAuthorizationRequest", + "GetDeploymentRequest", + "GetInstallStatusRequest", + "InstallDeploymentRequest", + "InstallStatus", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "ReplaceDeploymentRequest", + "UninstallDeploymentRequest", +) diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/gapic_version.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/gapic_version.py new file mode 100644 index 000000000000..b4f0ccd75f45 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.3.3" # {x-release-please-version} diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/py.typed b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/py.typed new file mode 100644 index 000000000000..34ad6427d267 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-gsuiteaddons package uses inline types. diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/__init__.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/__init__.py new file mode 100644 index 000000000000..07b3cc871ed5 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.gsuiteaddons_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.g_suite_add_ons import GSuiteAddOnsAsyncClient, GSuiteAddOnsClient +from .types.gsuiteaddons import ( + AddOns, + Authorization, + CreateDeploymentRequest, + DeleteDeploymentRequest, + Deployment, + GetAuthorizationRequest, + GetDeploymentRequest, + GetInstallStatusRequest, + InstallDeploymentRequest, + InstallStatus, + ListDeploymentsRequest, + ListDeploymentsResponse, + ReplaceDeploymentRequest, + UninstallDeploymentRequest, +) + +__all__ = ( + "GSuiteAddOnsAsyncClient", + "AddOns", + "Authorization", + "CreateDeploymentRequest", + "DeleteDeploymentRequest", + "Deployment", + "GSuiteAddOnsClient", + "GetAuthorizationRequest", + "GetDeploymentRequest", + "GetInstallStatusRequest", + "InstallDeploymentRequest", + "InstallStatus", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "ReplaceDeploymentRequest", + "UninstallDeploymentRequest", +) diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/gapic_metadata.json b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/gapic_metadata.json new file mode 100644 index 000000000000..d5eb47890b5a --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/gapic_metadata.json @@ -0,0 +1,163 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.gsuiteaddons_v1", + "protoPackage": "google.cloud.gsuiteaddons.v1", + "schema": "1.0", + "services": { + "GSuiteAddOns": { + "clients": { + "grpc": { + "libraryClient": "GSuiteAddOnsClient", + "rpcs": { + "CreateDeployment": { + "methods": [ + "create_deployment" + ] + }, + "DeleteDeployment": { + "methods": [ + "delete_deployment" + ] + }, + "GetAuthorization": { + "methods": [ + "get_authorization" + ] + }, + "GetDeployment": { + "methods": [ + "get_deployment" + ] + }, + "GetInstallStatus": { + "methods": [ + "get_install_status" + ] + }, + "InstallDeployment": { + "methods": [ + "install_deployment" + ] + }, + "ListDeployments": { + "methods": [ + "list_deployments" + ] + }, + "ReplaceDeployment": { + "methods": [ + "replace_deployment" + ] + }, + "UninstallDeployment": { + "methods": [ + "uninstall_deployment" + ] + } + } + }, + "grpc-async": { + "libraryClient": "GSuiteAddOnsAsyncClient", + "rpcs": { + "CreateDeployment": { + "methods": [ + "create_deployment" + ] + }, + "DeleteDeployment": { + "methods": [ + "delete_deployment" + ] + }, + "GetAuthorization": { + "methods": [ + "get_authorization" + ] + }, + "GetDeployment": { + "methods": [ + "get_deployment" + ] + }, + "GetInstallStatus": { + "methods": [ + "get_install_status" + ] + }, + "InstallDeployment": { + "methods": [ + "install_deployment" + ] + }, + "ListDeployments": { + "methods": [ + "list_deployments" + ] + }, + "ReplaceDeployment": { + "methods": [ + "replace_deployment" + ] + }, + "UninstallDeployment": { + "methods": [ + "uninstall_deployment" + ] + } + } + }, + "rest": { + "libraryClient": "GSuiteAddOnsClient", + "rpcs": { + "CreateDeployment": { + "methods": [ + "create_deployment" + ] + }, + "DeleteDeployment": { + "methods": [ + "delete_deployment" + ] + }, + "GetAuthorization": { + "methods": [ + "get_authorization" + ] + }, + "GetDeployment": { + "methods": [ + "get_deployment" + ] + }, + "GetInstallStatus": { + "methods": [ + "get_install_status" + ] + }, + "InstallDeployment": { + "methods": [ + "install_deployment" + ] + }, + "ListDeployments": { + "methods": [ + "list_deployments" + ] + }, + "ReplaceDeployment": { + "methods": [ + "replace_deployment" + ] + }, + "UninstallDeployment": { + "methods": [ + "uninstall_deployment" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/gapic_version.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/gapic_version.py new file mode 100644 index 000000000000..b4f0ccd75f45 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.3.3" # {x-release-please-version} diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/py.typed b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/py.typed new file mode 100644 index 000000000000..34ad6427d267 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-gsuiteaddons package uses inline types. diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/__init__.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/__init__.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/__init__.py new file mode 100644 index 000000000000..66889384cfa7 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import GSuiteAddOnsAsyncClient +from .client import GSuiteAddOnsClient + +__all__ = ( + "GSuiteAddOnsClient", + "GSuiteAddOnsAsyncClient", +) diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/async_client.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/async_client.py new file mode 100644 index 000000000000..eb2444992fbb --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/async_client.py @@ -0,0 +1,1210 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gsuiteaddons_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import wrappers_pb2 # type: ignore + +from google.cloud.gsuiteaddons_v1.services.g_suite_add_ons import pagers +from google.cloud.gsuiteaddons_v1.types import gsuiteaddons + +from .client import GSuiteAddOnsClient +from .transports.base import DEFAULT_CLIENT_INFO, GSuiteAddOnsTransport +from .transports.grpc_asyncio import GSuiteAddOnsGrpcAsyncIOTransport + + +class GSuiteAddOnsAsyncClient: + """A service for managing Google Workspace Add-ons deployments. + + A Google Workspace Add-on is a third-party embedded component + that can be installed in Google Workspace Applications like + Gmail, Calendar, Drive, and the Google Docs, Sheets, and Slides + editors. Google Workspace Add-ons can display UI cards, receive + contextual information from the host application, and perform + actions in the host application (See: + + https://developers.google.com/gsuite/add-ons/overview for more + information). + + A Google Workspace Add-on deployment resource specifies metadata + about the add-on, including a specification of the entry points + in the host application that trigger add-on executions (see: + + https://developers.google.com/gsuite/add-ons/concepts/gsuite-manifests). + Add-on deployments defined via the Google Workspace Add-ons API + define their entrypoints using HTTPS URLs (See: + + https://developers.google.com/gsuite/add-ons/guides/alternate-runtimes), + + A Google Workspace Add-on deployment can be installed in + developer mode, which allows an add-on developer to test the + experience an end-user would see when installing and running the + add-on in their G Suite applications. When running in developer + mode, more detailed error messages are exposed in the add-on UI + to aid in debugging. + + A Google Workspace Add-on deployment can be published to Google + Workspace Marketplace, which allows other Google Workspace users + to discover and install the add-on. See: + + https://developers.google.com/gsuite/add-ons/how-tos/publish-add-on-overview + for details. + """ + + _client: GSuiteAddOnsClient + + DEFAULT_ENDPOINT = GSuiteAddOnsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = GSuiteAddOnsClient.DEFAULT_MTLS_ENDPOINT + + authorization_path = staticmethod(GSuiteAddOnsClient.authorization_path) + parse_authorization_path = staticmethod(GSuiteAddOnsClient.parse_authorization_path) + deployment_path = staticmethod(GSuiteAddOnsClient.deployment_path) + parse_deployment_path = staticmethod(GSuiteAddOnsClient.parse_deployment_path) + install_status_path = staticmethod(GSuiteAddOnsClient.install_status_path) + parse_install_status_path = staticmethod( + GSuiteAddOnsClient.parse_install_status_path + ) + common_billing_account_path = staticmethod( + GSuiteAddOnsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + GSuiteAddOnsClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(GSuiteAddOnsClient.common_folder_path) + parse_common_folder_path = staticmethod(GSuiteAddOnsClient.parse_common_folder_path) + common_organization_path = staticmethod(GSuiteAddOnsClient.common_organization_path) + parse_common_organization_path = staticmethod( + GSuiteAddOnsClient.parse_common_organization_path + ) + common_project_path = staticmethod(GSuiteAddOnsClient.common_project_path) + parse_common_project_path = staticmethod( + GSuiteAddOnsClient.parse_common_project_path + ) + common_location_path = staticmethod(GSuiteAddOnsClient.common_location_path) + parse_common_location_path = staticmethod( + GSuiteAddOnsClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GSuiteAddOnsAsyncClient: The constructed client. + """ + return GSuiteAddOnsClient.from_service_account_info.__func__(GSuiteAddOnsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GSuiteAddOnsAsyncClient: The constructed client. + """ + return GSuiteAddOnsClient.from_service_account_file.__func__(GSuiteAddOnsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return GSuiteAddOnsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> GSuiteAddOnsTransport: + """Returns the transport used by the client instance. + + Returns: + GSuiteAddOnsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(GSuiteAddOnsClient).get_transport_class, type(GSuiteAddOnsClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, GSuiteAddOnsTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the g suite add ons client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.GSuiteAddOnsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = GSuiteAddOnsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_authorization( + self, + request: Optional[Union[gsuiteaddons.GetAuthorizationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.Authorization: + r"""Gets the authorization information for deployments in + a given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + async def sample_get_authorization(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsAsyncClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.GetAuthorizationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_authorization(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gsuiteaddons_v1.types.GetAuthorizationRequest, dict]]): + The request object. Request message to get Google + Workspace Add-ons authorization + information. + name (:class:`str`): + Required. Name of the project for which to get the + Google Workspace Add-ons authorization information. + + Example: ``projects/my_project/authorization``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gsuiteaddons_v1.types.Authorization: + The authorization information used + when invoking deployment endpoints. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gsuiteaddons.GetAuthorizationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_authorization, + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_deployment( + self, + request: Optional[Union[gsuiteaddons.CreateDeploymentRequest, dict]] = None, + *, + parent: Optional[str] = None, + deployment: Optional[gsuiteaddons.Deployment] = None, + deployment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.Deployment: + r"""Creates a deployment with the specified name and + configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + async def sample_create_deployment(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsAsyncClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.CreateDeploymentRequest( + parent="parent_value", + deployment_id="deployment_id_value", + ) + + # Make the request + response = await client.create_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gsuiteaddons_v1.types.CreateDeploymentRequest, dict]]): + The request object. Request message to create a + deployment. + parent (:class:`str`): + Required. Name of the project in which to create the + deployment. + + Example: ``projects/my_project``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment (:class:`google.cloud.gsuiteaddons_v1.types.Deployment`): + Required. The deployment to create + (deployment.name cannot be set). + + This corresponds to the ``deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment_id (:class:`str`): + Required. The id to use for this deployment. The full + name of the created resource will be + ``projects//deployments/``. + + This corresponds to the ``deployment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gsuiteaddons_v1.types.Deployment: + A Google Workspace Add-on deployment + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deployment, deployment_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gsuiteaddons.CreateDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deployment is not None: + request.deployment = deployment + if deployment_id is not None: + request.deployment_id = deployment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_deployment, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def replace_deployment( + self, + request: Optional[Union[gsuiteaddons.ReplaceDeploymentRequest, dict]] = None, + *, + deployment: Optional[gsuiteaddons.Deployment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.Deployment: + r"""Creates or replaces a deployment with the specified + name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + async def sample_replace_deployment(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsAsyncClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.ReplaceDeploymentRequest( + ) + + # Make the request + response = await client.replace_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gsuiteaddons_v1.types.ReplaceDeploymentRequest, dict]]): + The request object. Request message to create or replace + a deployment. + deployment (:class:`google.cloud.gsuiteaddons_v1.types.Deployment`): + Required. The deployment to create or + replace. + + This corresponds to the ``deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gsuiteaddons_v1.types.Deployment: + A Google Workspace Add-on deployment + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deployment]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gsuiteaddons.ReplaceDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deployment is not None: + request.deployment = deployment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.replace_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deployment.name", request.deployment.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_deployment( + self, + request: Optional[Union[gsuiteaddons.GetDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.Deployment: + r"""Gets the deployment with the specified name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + async def sample_get_deployment(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsAsyncClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.GetDeploymentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gsuiteaddons_v1.types.GetDeploymentRequest, dict]]): + The request object. Request message to get a deployment. + name (:class:`str`): + Required. The full resource name of the deployment to + get. + + Example: + ``projects/my_project/deployments/my_deployment``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gsuiteaddons_v1.types.Deployment: + A Google Workspace Add-on deployment + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gsuiteaddons.GetDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_deployments( + self, + request: Optional[Union[gsuiteaddons.ListDeploymentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeploymentsAsyncPager: + r"""Lists all deployments in a particular project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + async def sample_list_deployments(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsAsyncClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.ListDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.gsuiteaddons_v1.types.ListDeploymentsRequest, dict]]): + The request object. Request message to list deployments + for a project. + parent (:class:`str`): + Required. Name of the project in which to create the + deployment. + + Example: ``projects/my_project``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gsuiteaddons_v1.services.g_suite_add_ons.pagers.ListDeploymentsAsyncPager: + Response message to list deployments. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gsuiteaddons.ListDeploymentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_deployments, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeploymentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_deployment( + self, + request: Optional[Union[gsuiteaddons.DeleteDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the deployment with the given name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + async def sample_delete_deployment(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsAsyncClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.DeleteDeploymentRequest( + name="name_value", + ) + + # Make the request + await client.delete_deployment(request=request) + + Args: + request (Optional[Union[google.cloud.gsuiteaddons_v1.types.DeleteDeploymentRequest, dict]]): + The request object. Request message to delete a + deployment. + name (:class:`str`): + Required. The full resource name of the deployment to + delete. + + Example: + ``projects/my_project/deployments/my_deployment``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gsuiteaddons.DeleteDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_deployment, + default_timeout=10.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def install_deployment( + self, + request: Optional[Union[gsuiteaddons.InstallDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Installs a deployment in developer mode. + See: + + https://developers.google.com/gsuite/add-ons/how-tos/testing-gsuite-addons. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + async def sample_install_deployment(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsAsyncClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.InstallDeploymentRequest( + name="name_value", + ) + + # Make the request + await client.install_deployment(request=request) + + Args: + request (Optional[Union[google.cloud.gsuiteaddons_v1.types.InstallDeploymentRequest, dict]]): + The request object. Request message to install a + developer mode deployment. + name (:class:`str`): + Required. The full resource name of the deployment to + install. + + Example: + ``projects/my_project/deployments/my_deployment``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gsuiteaddons.InstallDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.install_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def uninstall_deployment( + self, + request: Optional[Union[gsuiteaddons.UninstallDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Uninstalls a developer mode deployment. + See: + + https://developers.google.com/gsuite/add-ons/how-tos/testing-gsuite-addons. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + async def sample_uninstall_deployment(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsAsyncClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.UninstallDeploymentRequest( + name="name_value", + ) + + # Make the request + await client.uninstall_deployment(request=request) + + Args: + request (Optional[Union[google.cloud.gsuiteaddons_v1.types.UninstallDeploymentRequest, dict]]): + The request object. Request message to uninstall a + developer mode deployment. + name (:class:`str`): + Required. The full resource name of the deployment to + install. + + Example: + ``projects/my_project/deployments/my_deployment``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gsuiteaddons.UninstallDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.uninstall_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_install_status( + self, + request: Optional[Union[gsuiteaddons.GetInstallStatusRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.InstallStatus: + r"""Fetches the install status of a developer mode + deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + async def sample_get_install_status(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsAsyncClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.GetInstallStatusRequest( + name="name_value", + ) + + # Make the request + response = await client.get_install_status(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.gsuiteaddons_v1.types.GetInstallStatusRequest, dict]]): + The request object. Request message to get the install + status of a developer mode deployment. + name (:class:`str`): + Required. The full resource name of the deployment. + + Example: + ``projects/my_project/deployments/my_deployment/installStatus``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gsuiteaddons_v1.types.InstallStatus: + Developer mode install status of a + deployment + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gsuiteaddons.GetInstallStatusRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_install_status, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "GSuiteAddOnsAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GSuiteAddOnsAsyncClient",) diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/client.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/client.py new file mode 100644 index 000000000000..7817f834f196 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/client.py @@ -0,0 +1,1474 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.gsuiteaddons_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import wrappers_pb2 # type: ignore + +from google.cloud.gsuiteaddons_v1.services.g_suite_add_ons import pagers +from google.cloud.gsuiteaddons_v1.types import gsuiteaddons + +from .transports.base import DEFAULT_CLIENT_INFO, GSuiteAddOnsTransport +from .transports.grpc import GSuiteAddOnsGrpcTransport +from .transports.grpc_asyncio import GSuiteAddOnsGrpcAsyncIOTransport +from .transports.rest import GSuiteAddOnsRestTransport + + +class GSuiteAddOnsClientMeta(type): + """Metaclass for the GSuiteAddOns client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[GSuiteAddOnsTransport]] + _transport_registry["grpc"] = GSuiteAddOnsGrpcTransport + _transport_registry["grpc_asyncio"] = GSuiteAddOnsGrpcAsyncIOTransport + _transport_registry["rest"] = GSuiteAddOnsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[GSuiteAddOnsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GSuiteAddOnsClient(metaclass=GSuiteAddOnsClientMeta): + """A service for managing Google Workspace Add-ons deployments. + + A Google Workspace Add-on is a third-party embedded component + that can be installed in Google Workspace Applications like + Gmail, Calendar, Drive, and the Google Docs, Sheets, and Slides + editors. Google Workspace Add-ons can display UI cards, receive + contextual information from the host application, and perform + actions in the host application (See: + + https://developers.google.com/gsuite/add-ons/overview for more + information). + + A Google Workspace Add-on deployment resource specifies metadata + about the add-on, including a specification of the entry points + in the host application that trigger add-on executions (see: + + https://developers.google.com/gsuite/add-ons/concepts/gsuite-manifests). + Add-on deployments defined via the Google Workspace Add-ons API + define their entrypoints using HTTPS URLs (See: + + https://developers.google.com/gsuite/add-ons/guides/alternate-runtimes), + + A Google Workspace Add-on deployment can be installed in + developer mode, which allows an add-on developer to test the + experience an end-user would see when installing and running the + add-on in their G Suite applications. When running in developer + mode, more detailed error messages are exposed in the add-on UI + to aid in debugging. + + A Google Workspace Add-on deployment can be published to Google + Workspace Marketplace, which allows other Google Workspace users + to discover and install the add-on. See: + + https://developers.google.com/gsuite/add-ons/how-tos/publish-add-on-overview + for details. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "gsuiteaddons.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GSuiteAddOnsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GSuiteAddOnsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GSuiteAddOnsTransport: + """Returns the transport used by the client instance. + + Returns: + GSuiteAddOnsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def authorization_path( + project: str, + ) -> str: + """Returns a fully-qualified authorization string.""" + return "projects/{project}/authorization".format( + project=project, + ) + + @staticmethod + def parse_authorization_path(path: str) -> Dict[str, str]: + """Parses a authorization path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/authorization$", path) + return m.groupdict() if m else {} + + @staticmethod + def deployment_path( + project: str, + deployment: str, + ) -> str: + """Returns a fully-qualified deployment string.""" + return "projects/{project}/deployments/{deployment}".format( + project=project, + deployment=deployment, + ) + + @staticmethod + def parse_deployment_path(path: str) -> Dict[str, str]: + """Parses a deployment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/deployments/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def install_status_path( + project: str, + deployment: str, + ) -> str: + """Returns a fully-qualified install_status string.""" + return "projects/{project}/deployments/{deployment}/installStatus".format( + project=project, + deployment=deployment, + ) + + @staticmethod + def parse_install_status_path(path: str) -> Dict[str, str]: + """Parses a install_status path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/deployments/(?P.+?)/installStatus$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, GSuiteAddOnsTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the g suite add ons client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, GSuiteAddOnsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, GSuiteAddOnsTransport): + # transport is a GSuiteAddOnsTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_authorization( + self, + request: Optional[Union[gsuiteaddons.GetAuthorizationRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.Authorization: + r"""Gets the authorization information for deployments in + a given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + def sample_get_authorization(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.GetAuthorizationRequest( + name="name_value", + ) + + # Make the request + response = client.get_authorization(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gsuiteaddons_v1.types.GetAuthorizationRequest, dict]): + The request object. Request message to get Google + Workspace Add-ons authorization + information. + name (str): + Required. Name of the project for which to get the + Google Workspace Add-ons authorization information. + + Example: ``projects/my_project/authorization``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gsuiteaddons_v1.types.Authorization: + The authorization information used + when invoking deployment endpoints. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gsuiteaddons.GetAuthorizationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gsuiteaddons.GetAuthorizationRequest): + request = gsuiteaddons.GetAuthorizationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_authorization] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_deployment( + self, + request: Optional[Union[gsuiteaddons.CreateDeploymentRequest, dict]] = None, + *, + parent: Optional[str] = None, + deployment: Optional[gsuiteaddons.Deployment] = None, + deployment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.Deployment: + r"""Creates a deployment with the specified name and + configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + def sample_create_deployment(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.CreateDeploymentRequest( + parent="parent_value", + deployment_id="deployment_id_value", + ) + + # Make the request + response = client.create_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gsuiteaddons_v1.types.CreateDeploymentRequest, dict]): + The request object. Request message to create a + deployment. + parent (str): + Required. Name of the project in which to create the + deployment. + + Example: ``projects/my_project``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment (google.cloud.gsuiteaddons_v1.types.Deployment): + Required. The deployment to create + (deployment.name cannot be set). + + This corresponds to the ``deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment_id (str): + Required. The id to use for this deployment. The full + name of the created resource will be + ``projects//deployments/``. + + This corresponds to the ``deployment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gsuiteaddons_v1.types.Deployment: + A Google Workspace Add-on deployment + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deployment, deployment_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gsuiteaddons.CreateDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gsuiteaddons.CreateDeploymentRequest): + request = gsuiteaddons.CreateDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deployment is not None: + request.deployment = deployment + if deployment_id is not None: + request.deployment_id = deployment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def replace_deployment( + self, + request: Optional[Union[gsuiteaddons.ReplaceDeploymentRequest, dict]] = None, + *, + deployment: Optional[gsuiteaddons.Deployment] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.Deployment: + r"""Creates or replaces a deployment with the specified + name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + def sample_replace_deployment(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.ReplaceDeploymentRequest( + ) + + # Make the request + response = client.replace_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gsuiteaddons_v1.types.ReplaceDeploymentRequest, dict]): + The request object. Request message to create or replace + a deployment. + deployment (google.cloud.gsuiteaddons_v1.types.Deployment): + Required. The deployment to create or + replace. + + This corresponds to the ``deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gsuiteaddons_v1.types.Deployment: + A Google Workspace Add-on deployment + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([deployment]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gsuiteaddons.ReplaceDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gsuiteaddons.ReplaceDeploymentRequest): + request = gsuiteaddons.ReplaceDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deployment is not None: + request.deployment = deployment + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.replace_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deployment.name", request.deployment.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_deployment( + self, + request: Optional[Union[gsuiteaddons.GetDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.Deployment: + r"""Gets the deployment with the specified name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + def sample_get_deployment(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.GetDeploymentRequest( + name="name_value", + ) + + # Make the request + response = client.get_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gsuiteaddons_v1.types.GetDeploymentRequest, dict]): + The request object. Request message to get a deployment. + name (str): + Required. The full resource name of the deployment to + get. + + Example: + ``projects/my_project/deployments/my_deployment``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gsuiteaddons_v1.types.Deployment: + A Google Workspace Add-on deployment + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gsuiteaddons.GetDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gsuiteaddons.GetDeploymentRequest): + request = gsuiteaddons.GetDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_deployments( + self, + request: Optional[Union[gsuiteaddons.ListDeploymentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeploymentsPager: + r"""Lists all deployments in a particular project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + def sample_list_deployments(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.ListDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.gsuiteaddons_v1.types.ListDeploymentsRequest, dict]): + The request object. Request message to list deployments + for a project. + parent (str): + Required. Name of the project in which to create the + deployment. + + Example: ``projects/my_project``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gsuiteaddons_v1.services.g_suite_add_ons.pagers.ListDeploymentsPager: + Response message to list deployments. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gsuiteaddons.ListDeploymentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gsuiteaddons.ListDeploymentsRequest): + request = gsuiteaddons.ListDeploymentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_deployments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeploymentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_deployment( + self, + request: Optional[Union[gsuiteaddons.DeleteDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the deployment with the given name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + def sample_delete_deployment(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.DeleteDeploymentRequest( + name="name_value", + ) + + # Make the request + client.delete_deployment(request=request) + + Args: + request (Union[google.cloud.gsuiteaddons_v1.types.DeleteDeploymentRequest, dict]): + The request object. Request message to delete a + deployment. + name (str): + Required. The full resource name of the deployment to + delete. + + Example: + ``projects/my_project/deployments/my_deployment``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gsuiteaddons.DeleteDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gsuiteaddons.DeleteDeploymentRequest): + request = gsuiteaddons.DeleteDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def install_deployment( + self, + request: Optional[Union[gsuiteaddons.InstallDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Installs a deployment in developer mode. + See: + + https://developers.google.com/gsuite/add-ons/how-tos/testing-gsuite-addons. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + def sample_install_deployment(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.InstallDeploymentRequest( + name="name_value", + ) + + # Make the request + client.install_deployment(request=request) + + Args: + request (Union[google.cloud.gsuiteaddons_v1.types.InstallDeploymentRequest, dict]): + The request object. Request message to install a + developer mode deployment. + name (str): + Required. The full resource name of the deployment to + install. + + Example: + ``projects/my_project/deployments/my_deployment``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gsuiteaddons.InstallDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gsuiteaddons.InstallDeploymentRequest): + request = gsuiteaddons.InstallDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.install_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def uninstall_deployment( + self, + request: Optional[Union[gsuiteaddons.UninstallDeploymentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Uninstalls a developer mode deployment. + See: + + https://developers.google.com/gsuite/add-ons/how-tos/testing-gsuite-addons. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + def sample_uninstall_deployment(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.UninstallDeploymentRequest( + name="name_value", + ) + + # Make the request + client.uninstall_deployment(request=request) + + Args: + request (Union[google.cloud.gsuiteaddons_v1.types.UninstallDeploymentRequest, dict]): + The request object. Request message to uninstall a + developer mode deployment. + name (str): + Required. The full resource name of the deployment to + install. + + Example: + ``projects/my_project/deployments/my_deployment``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gsuiteaddons.UninstallDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gsuiteaddons.UninstallDeploymentRequest): + request = gsuiteaddons.UninstallDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.uninstall_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_install_status( + self, + request: Optional[Union[gsuiteaddons.GetInstallStatusRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.InstallStatus: + r"""Fetches the install status of a developer mode + deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import gsuiteaddons_v1 + + def sample_get_install_status(): + # Create a client + client = gsuiteaddons_v1.GSuiteAddOnsClient() + + # Initialize request argument(s) + request = gsuiteaddons_v1.GetInstallStatusRequest( + name="name_value", + ) + + # Make the request + response = client.get_install_status(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.gsuiteaddons_v1.types.GetInstallStatusRequest, dict]): + The request object. Request message to get the install + status of a developer mode deployment. + name (str): + Required. The full resource name of the deployment. + + Example: + ``projects/my_project/deployments/my_deployment/installStatus``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.gsuiteaddons_v1.types.InstallStatus: + Developer mode install status of a + deployment + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gsuiteaddons.GetInstallStatusRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gsuiteaddons.GetInstallStatusRequest): + request = gsuiteaddons.GetInstallStatusRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_install_status] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "GSuiteAddOnsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GSuiteAddOnsClient",) diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/pagers.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/pagers.py new file mode 100644 index 000000000000..bdb66c39c4b0 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.gsuiteaddons_v1.types import gsuiteaddons + + +class ListDeploymentsPager: + """A pager for iterating through ``list_deployments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gsuiteaddons_v1.types.ListDeploymentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deployments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeployments`` requests and continue to iterate + through the ``deployments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gsuiteaddons_v1.types.ListDeploymentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., gsuiteaddons.ListDeploymentsResponse], + request: gsuiteaddons.ListDeploymentsRequest, + response: gsuiteaddons.ListDeploymentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gsuiteaddons_v1.types.ListDeploymentsRequest): + The initial request object. + response (google.cloud.gsuiteaddons_v1.types.ListDeploymentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = gsuiteaddons.ListDeploymentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[gsuiteaddons.ListDeploymentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[gsuiteaddons.Deployment]: + for page in self.pages: + yield from page.deployments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeploymentsAsyncPager: + """A pager for iterating through ``list_deployments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.gsuiteaddons_v1.types.ListDeploymentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deployments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeployments`` requests and continue to iterate + through the ``deployments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.gsuiteaddons_v1.types.ListDeploymentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[gsuiteaddons.ListDeploymentsResponse]], + request: gsuiteaddons.ListDeploymentsRequest, + response: gsuiteaddons.ListDeploymentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.gsuiteaddons_v1.types.ListDeploymentsRequest): + The initial request object. + response (google.cloud.gsuiteaddons_v1.types.ListDeploymentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = gsuiteaddons.ListDeploymentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[gsuiteaddons.ListDeploymentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[gsuiteaddons.Deployment]: + async def async_generator(): + async for page in self.pages: + for response in page.deployments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/__init__.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/__init__.py new file mode 100644 index 000000000000..f0c41729e307 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GSuiteAddOnsTransport +from .grpc import GSuiteAddOnsGrpcTransport +from .grpc_asyncio import GSuiteAddOnsGrpcAsyncIOTransport +from .rest import GSuiteAddOnsRestInterceptor, GSuiteAddOnsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[GSuiteAddOnsTransport]] +_transport_registry["grpc"] = GSuiteAddOnsGrpcTransport +_transport_registry["grpc_asyncio"] = GSuiteAddOnsGrpcAsyncIOTransport +_transport_registry["rest"] = GSuiteAddOnsRestTransport + +__all__ = ( + "GSuiteAddOnsTransport", + "GSuiteAddOnsGrpcTransport", + "GSuiteAddOnsGrpcAsyncIOTransport", + "GSuiteAddOnsRestTransport", + "GSuiteAddOnsRestInterceptor", +) diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/base.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/base.py new file mode 100644 index 000000000000..77edfe6bf2fa --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/base.py @@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.gsuiteaddons_v1 import gapic_version as package_version +from google.cloud.gsuiteaddons_v1.types import gsuiteaddons + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class GSuiteAddOnsTransport(abc.ABC): + """Abstract transport class for GSuiteAddOns.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "gsuiteaddons.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_authorization: gapic_v1.method.wrap_method( + self.get_authorization, + default_timeout=120.0, + client_info=client_info, + ), + self.create_deployment: gapic_v1.method.wrap_method( + self.create_deployment, + default_timeout=10.0, + client_info=client_info, + ), + self.replace_deployment: gapic_v1.method.wrap_method( + self.replace_deployment, + default_timeout=None, + client_info=client_info, + ), + self.get_deployment: gapic_v1.method.wrap_method( + self.get_deployment, + default_timeout=None, + client_info=client_info, + ), + self.list_deployments: gapic_v1.method.wrap_method( + self.list_deployments, + default_timeout=None, + client_info=client_info, + ), + self.delete_deployment: gapic_v1.method.wrap_method( + self.delete_deployment, + default_timeout=10.0, + client_info=client_info, + ), + self.install_deployment: gapic_v1.method.wrap_method( + self.install_deployment, + default_timeout=None, + client_info=client_info, + ), + self.uninstall_deployment: gapic_v1.method.wrap_method( + self.uninstall_deployment, + default_timeout=None, + client_info=client_info, + ), + self.get_install_status: gapic_v1.method.wrap_method( + self.get_install_status, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_authorization( + self, + ) -> Callable[ + [gsuiteaddons.GetAuthorizationRequest], + Union[gsuiteaddons.Authorization, Awaitable[gsuiteaddons.Authorization]], + ]: + raise NotImplementedError() + + @property + def create_deployment( + self, + ) -> Callable[ + [gsuiteaddons.CreateDeploymentRequest], + Union[gsuiteaddons.Deployment, Awaitable[gsuiteaddons.Deployment]], + ]: + raise NotImplementedError() + + @property + def replace_deployment( + self, + ) -> Callable[ + [gsuiteaddons.ReplaceDeploymentRequest], + Union[gsuiteaddons.Deployment, Awaitable[gsuiteaddons.Deployment]], + ]: + raise NotImplementedError() + + @property + def get_deployment( + self, + ) -> Callable[ + [gsuiteaddons.GetDeploymentRequest], + Union[gsuiteaddons.Deployment, Awaitable[gsuiteaddons.Deployment]], + ]: + raise NotImplementedError() + + @property + def list_deployments( + self, + ) -> Callable[ + [gsuiteaddons.ListDeploymentsRequest], + Union[ + gsuiteaddons.ListDeploymentsResponse, + Awaitable[gsuiteaddons.ListDeploymentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_deployment( + self, + ) -> Callable[ + [gsuiteaddons.DeleteDeploymentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def install_deployment( + self, + ) -> Callable[ + [gsuiteaddons.InstallDeploymentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def uninstall_deployment( + self, + ) -> Callable[ + [gsuiteaddons.UninstallDeploymentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_install_status( + self, + ) -> Callable[ + [gsuiteaddons.GetInstallStatusRequest], + Union[gsuiteaddons.InstallStatus, Awaitable[gsuiteaddons.InstallStatus]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("GSuiteAddOnsTransport",) diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/grpc.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/grpc.py new file mode 100644 index 000000000000..ac71461d72e1 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/grpc.py @@ -0,0 +1,520 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.gsuiteaddons_v1.types import gsuiteaddons + +from .base import DEFAULT_CLIENT_INFO, GSuiteAddOnsTransport + + +class GSuiteAddOnsGrpcTransport(GSuiteAddOnsTransport): + """gRPC backend transport for GSuiteAddOns. + + A service for managing Google Workspace Add-ons deployments. + + A Google Workspace Add-on is a third-party embedded component + that can be installed in Google Workspace Applications like + Gmail, Calendar, Drive, and the Google Docs, Sheets, and Slides + editors. Google Workspace Add-ons can display UI cards, receive + contextual information from the host application, and perform + actions in the host application (See: + + https://developers.google.com/gsuite/add-ons/overview for more + information). + + A Google Workspace Add-on deployment resource specifies metadata + about the add-on, including a specification of the entry points + in the host application that trigger add-on executions (see: + + https://developers.google.com/gsuite/add-ons/concepts/gsuite-manifests). + Add-on deployments defined via the Google Workspace Add-ons API + define their entrypoints using HTTPS URLs (See: + + https://developers.google.com/gsuite/add-ons/guides/alternate-runtimes), + + A Google Workspace Add-on deployment can be installed in + developer mode, which allows an add-on developer to test the + experience an end-user would see when installing and running the + add-on in their G Suite applications. When running in developer + mode, more detailed error messages are exposed in the add-on UI + to aid in debugging. + + A Google Workspace Add-on deployment can be published to Google + Workspace Marketplace, which allows other Google Workspace users + to discover and install the add-on. See: + + https://developers.google.com/gsuite/add-ons/how-tos/publish-add-on-overview + for details. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "gsuiteaddons.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "gsuiteaddons.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def get_authorization( + self, + ) -> Callable[[gsuiteaddons.GetAuthorizationRequest], gsuiteaddons.Authorization]: + r"""Return a callable for the get authorization method over gRPC. + + Gets the authorization information for deployments in + a given project. + + Returns: + Callable[[~.GetAuthorizationRequest], + ~.Authorization]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_authorization" not in self._stubs: + self._stubs["get_authorization"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/GetAuthorization", + request_serializer=gsuiteaddons.GetAuthorizationRequest.serialize, + response_deserializer=gsuiteaddons.Authorization.deserialize, + ) + return self._stubs["get_authorization"] + + @property + def create_deployment( + self, + ) -> Callable[[gsuiteaddons.CreateDeploymentRequest], gsuiteaddons.Deployment]: + r"""Return a callable for the create deployment method over gRPC. + + Creates a deployment with the specified name and + configuration. + + Returns: + Callable[[~.CreateDeploymentRequest], + ~.Deployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deployment" not in self._stubs: + self._stubs["create_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/CreateDeployment", + request_serializer=gsuiteaddons.CreateDeploymentRequest.serialize, + response_deserializer=gsuiteaddons.Deployment.deserialize, + ) + return self._stubs["create_deployment"] + + @property + def replace_deployment( + self, + ) -> Callable[[gsuiteaddons.ReplaceDeploymentRequest], gsuiteaddons.Deployment]: + r"""Return a callable for the replace deployment method over gRPC. + + Creates or replaces a deployment with the specified + name. + + Returns: + Callable[[~.ReplaceDeploymentRequest], + ~.Deployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "replace_deployment" not in self._stubs: + self._stubs["replace_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/ReplaceDeployment", + request_serializer=gsuiteaddons.ReplaceDeploymentRequest.serialize, + response_deserializer=gsuiteaddons.Deployment.deserialize, + ) + return self._stubs["replace_deployment"] + + @property + def get_deployment( + self, + ) -> Callable[[gsuiteaddons.GetDeploymentRequest], gsuiteaddons.Deployment]: + r"""Return a callable for the get deployment method over gRPC. + + Gets the deployment with the specified name. + + Returns: + Callable[[~.GetDeploymentRequest], + ~.Deployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deployment" not in self._stubs: + self._stubs["get_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/GetDeployment", + request_serializer=gsuiteaddons.GetDeploymentRequest.serialize, + response_deserializer=gsuiteaddons.Deployment.deserialize, + ) + return self._stubs["get_deployment"] + + @property + def list_deployments( + self, + ) -> Callable[ + [gsuiteaddons.ListDeploymentsRequest], gsuiteaddons.ListDeploymentsResponse + ]: + r"""Return a callable for the list deployments method over gRPC. + + Lists all deployments in a particular project. + + Returns: + Callable[[~.ListDeploymentsRequest], + ~.ListDeploymentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deployments" not in self._stubs: + self._stubs["list_deployments"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/ListDeployments", + request_serializer=gsuiteaddons.ListDeploymentsRequest.serialize, + response_deserializer=gsuiteaddons.ListDeploymentsResponse.deserialize, + ) + return self._stubs["list_deployments"] + + @property + def delete_deployment( + self, + ) -> Callable[[gsuiteaddons.DeleteDeploymentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete deployment method over gRPC. + + Deletes the deployment with the given name. + + Returns: + Callable[[~.DeleteDeploymentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deployment" not in self._stubs: + self._stubs["delete_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/DeleteDeployment", + request_serializer=gsuiteaddons.DeleteDeploymentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_deployment"] + + @property + def install_deployment( + self, + ) -> Callable[[gsuiteaddons.InstallDeploymentRequest], empty_pb2.Empty]: + r"""Return a callable for the install deployment method over gRPC. + + Installs a deployment in developer mode. + See: + + https://developers.google.com/gsuite/add-ons/how-tos/testing-gsuite-addons. + + Returns: + Callable[[~.InstallDeploymentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "install_deployment" not in self._stubs: + self._stubs["install_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/InstallDeployment", + request_serializer=gsuiteaddons.InstallDeploymentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["install_deployment"] + + @property + def uninstall_deployment( + self, + ) -> Callable[[gsuiteaddons.UninstallDeploymentRequest], empty_pb2.Empty]: + r"""Return a callable for the uninstall deployment method over gRPC. + + Uninstalls a developer mode deployment. + See: + + https://developers.google.com/gsuite/add-ons/how-tos/testing-gsuite-addons. + + Returns: + Callable[[~.UninstallDeploymentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "uninstall_deployment" not in self._stubs: + self._stubs["uninstall_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/UninstallDeployment", + request_serializer=gsuiteaddons.UninstallDeploymentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["uninstall_deployment"] + + @property + def get_install_status( + self, + ) -> Callable[[gsuiteaddons.GetInstallStatusRequest], gsuiteaddons.InstallStatus]: + r"""Return a callable for the get install status method over gRPC. + + Fetches the install status of a developer mode + deployment. + + Returns: + Callable[[~.GetInstallStatusRequest], + ~.InstallStatus]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_install_status" not in self._stubs: + self._stubs["get_install_status"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/GetInstallStatus", + request_serializer=gsuiteaddons.GetInstallStatusRequest.serialize, + response_deserializer=gsuiteaddons.InstallStatus.deserialize, + ) + return self._stubs["get_install_status"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("GSuiteAddOnsGrpcTransport",) diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/grpc_asyncio.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/grpc_asyncio.py new file mode 100644 index 000000000000..f6693e762457 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/grpc_asyncio.py @@ -0,0 +1,532 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.gsuiteaddons_v1.types import gsuiteaddons + +from .base import DEFAULT_CLIENT_INFO, GSuiteAddOnsTransport +from .grpc import GSuiteAddOnsGrpcTransport + + +class GSuiteAddOnsGrpcAsyncIOTransport(GSuiteAddOnsTransport): + """gRPC AsyncIO backend transport for GSuiteAddOns. + + A service for managing Google Workspace Add-ons deployments. + + A Google Workspace Add-on is a third-party embedded component + that can be installed in Google Workspace Applications like + Gmail, Calendar, Drive, and the Google Docs, Sheets, and Slides + editors. Google Workspace Add-ons can display UI cards, receive + contextual information from the host application, and perform + actions in the host application (See: + + https://developers.google.com/gsuite/add-ons/overview for more + information). + + A Google Workspace Add-on deployment resource specifies metadata + about the add-on, including a specification of the entry points + in the host application that trigger add-on executions (see: + + https://developers.google.com/gsuite/add-ons/concepts/gsuite-manifests). + Add-on deployments defined via the Google Workspace Add-ons API + define their entrypoints using HTTPS URLs (See: + + https://developers.google.com/gsuite/add-ons/guides/alternate-runtimes), + + A Google Workspace Add-on deployment can be installed in + developer mode, which allows an add-on developer to test the + experience an end-user would see when installing and running the + add-on in their G Suite applications. When running in developer + mode, more detailed error messages are exposed in the add-on UI + to aid in debugging. + + A Google Workspace Add-on deployment can be published to Google + Workspace Marketplace, which allows other Google Workspace users + to discover and install the add-on. See: + + https://developers.google.com/gsuite/add-ons/how-tos/publish-add-on-overview + for details. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "gsuiteaddons.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "gsuiteaddons.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_authorization( + self, + ) -> Callable[ + [gsuiteaddons.GetAuthorizationRequest], Awaitable[gsuiteaddons.Authorization] + ]: + r"""Return a callable for the get authorization method over gRPC. + + Gets the authorization information for deployments in + a given project. + + Returns: + Callable[[~.GetAuthorizationRequest], + Awaitable[~.Authorization]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_authorization" not in self._stubs: + self._stubs["get_authorization"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/GetAuthorization", + request_serializer=gsuiteaddons.GetAuthorizationRequest.serialize, + response_deserializer=gsuiteaddons.Authorization.deserialize, + ) + return self._stubs["get_authorization"] + + @property + def create_deployment( + self, + ) -> Callable[ + [gsuiteaddons.CreateDeploymentRequest], Awaitable[gsuiteaddons.Deployment] + ]: + r"""Return a callable for the create deployment method over gRPC. + + Creates a deployment with the specified name and + configuration. + + Returns: + Callable[[~.CreateDeploymentRequest], + Awaitable[~.Deployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deployment" not in self._stubs: + self._stubs["create_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/CreateDeployment", + request_serializer=gsuiteaddons.CreateDeploymentRequest.serialize, + response_deserializer=gsuiteaddons.Deployment.deserialize, + ) + return self._stubs["create_deployment"] + + @property + def replace_deployment( + self, + ) -> Callable[ + [gsuiteaddons.ReplaceDeploymentRequest], Awaitable[gsuiteaddons.Deployment] + ]: + r"""Return a callable for the replace deployment method over gRPC. + + Creates or replaces a deployment with the specified + name. + + Returns: + Callable[[~.ReplaceDeploymentRequest], + Awaitable[~.Deployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "replace_deployment" not in self._stubs: + self._stubs["replace_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/ReplaceDeployment", + request_serializer=gsuiteaddons.ReplaceDeploymentRequest.serialize, + response_deserializer=gsuiteaddons.Deployment.deserialize, + ) + return self._stubs["replace_deployment"] + + @property + def get_deployment( + self, + ) -> Callable[ + [gsuiteaddons.GetDeploymentRequest], Awaitable[gsuiteaddons.Deployment] + ]: + r"""Return a callable for the get deployment method over gRPC. + + Gets the deployment with the specified name. + + Returns: + Callable[[~.GetDeploymentRequest], + Awaitable[~.Deployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deployment" not in self._stubs: + self._stubs["get_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/GetDeployment", + request_serializer=gsuiteaddons.GetDeploymentRequest.serialize, + response_deserializer=gsuiteaddons.Deployment.deserialize, + ) + return self._stubs["get_deployment"] + + @property + def list_deployments( + self, + ) -> Callable[ + [gsuiteaddons.ListDeploymentsRequest], + Awaitable[gsuiteaddons.ListDeploymentsResponse], + ]: + r"""Return a callable for the list deployments method over gRPC. + + Lists all deployments in a particular project. + + Returns: + Callable[[~.ListDeploymentsRequest], + Awaitable[~.ListDeploymentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deployments" not in self._stubs: + self._stubs["list_deployments"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/ListDeployments", + request_serializer=gsuiteaddons.ListDeploymentsRequest.serialize, + response_deserializer=gsuiteaddons.ListDeploymentsResponse.deserialize, + ) + return self._stubs["list_deployments"] + + @property + def delete_deployment( + self, + ) -> Callable[[gsuiteaddons.DeleteDeploymentRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete deployment method over gRPC. + + Deletes the deployment with the given name. + + Returns: + Callable[[~.DeleteDeploymentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deployment" not in self._stubs: + self._stubs["delete_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/DeleteDeployment", + request_serializer=gsuiteaddons.DeleteDeploymentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_deployment"] + + @property + def install_deployment( + self, + ) -> Callable[[gsuiteaddons.InstallDeploymentRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the install deployment method over gRPC. + + Installs a deployment in developer mode. + See: + + https://developers.google.com/gsuite/add-ons/how-tos/testing-gsuite-addons. + + Returns: + Callable[[~.InstallDeploymentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "install_deployment" not in self._stubs: + self._stubs["install_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/InstallDeployment", + request_serializer=gsuiteaddons.InstallDeploymentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["install_deployment"] + + @property + def uninstall_deployment( + self, + ) -> Callable[ + [gsuiteaddons.UninstallDeploymentRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the uninstall deployment method over gRPC. + + Uninstalls a developer mode deployment. + See: + + https://developers.google.com/gsuite/add-ons/how-tos/testing-gsuite-addons. + + Returns: + Callable[[~.UninstallDeploymentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "uninstall_deployment" not in self._stubs: + self._stubs["uninstall_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/UninstallDeployment", + request_serializer=gsuiteaddons.UninstallDeploymentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["uninstall_deployment"] + + @property + def get_install_status( + self, + ) -> Callable[ + [gsuiteaddons.GetInstallStatusRequest], Awaitable[gsuiteaddons.InstallStatus] + ]: + r"""Return a callable for the get install status method over gRPC. + + Fetches the install status of a developer mode + deployment. + + Returns: + Callable[[~.GetInstallStatusRequest], + Awaitable[~.InstallStatus]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_install_status" not in self._stubs: + self._stubs["get_install_status"] = self.grpc_channel.unary_unary( + "/google.cloud.gsuiteaddons.v1.GSuiteAddOns/GetInstallStatus", + request_serializer=gsuiteaddons.GetInstallStatusRequest.serialize, + response_deserializer=gsuiteaddons.InstallStatus.deserialize, + ) + return self._stubs["get_install_status"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("GSuiteAddOnsGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/rest.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/rest.py new file mode 100644 index 000000000000..bdeec2505f1c --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/rest.py @@ -0,0 +1,1326 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.gsuiteaddons_v1.types import gsuiteaddons + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import GSuiteAddOnsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class GSuiteAddOnsRestInterceptor: + """Interceptor for GSuiteAddOns. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the GSuiteAddOnsRestTransport. + + .. code-block:: python + class MyCustomGSuiteAddOnsInterceptor(GSuiteAddOnsRestInterceptor): + def pre_create_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_authorization(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_authorization(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_install_status(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_install_status(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_install_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_list_deployments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deployments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_replace_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_replace_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_uninstall_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + transport = GSuiteAddOnsRestTransport(interceptor=MyCustomGSuiteAddOnsInterceptor()) + client = GSuiteAddOnsClient(transport=transport) + + + """ + + def pre_create_deployment( + self, + request: gsuiteaddons.CreateDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gsuiteaddons.CreateDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the GSuiteAddOns server. + """ + return request, metadata + + def post_create_deployment( + self, response: gsuiteaddons.Deployment + ) -> gsuiteaddons.Deployment: + """Post-rpc interceptor for create_deployment + + Override in a subclass to manipulate the response + after it is returned by the GSuiteAddOns server but before + it is returned to user code. + """ + return response + + def pre_delete_deployment( + self, + request: gsuiteaddons.DeleteDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gsuiteaddons.DeleteDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the GSuiteAddOns server. + """ + return request, metadata + + def pre_get_authorization( + self, + request: gsuiteaddons.GetAuthorizationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gsuiteaddons.GetAuthorizationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_authorization + + Override in a subclass to manipulate the request or metadata + before they are sent to the GSuiteAddOns server. + """ + return request, metadata + + def post_get_authorization( + self, response: gsuiteaddons.Authorization + ) -> gsuiteaddons.Authorization: + """Post-rpc interceptor for get_authorization + + Override in a subclass to manipulate the response + after it is returned by the GSuiteAddOns server but before + it is returned to user code. + """ + return response + + def pre_get_deployment( + self, + request: gsuiteaddons.GetDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gsuiteaddons.GetDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the GSuiteAddOns server. + """ + return request, metadata + + def post_get_deployment( + self, response: gsuiteaddons.Deployment + ) -> gsuiteaddons.Deployment: + """Post-rpc interceptor for get_deployment + + Override in a subclass to manipulate the response + after it is returned by the GSuiteAddOns server but before + it is returned to user code. + """ + return response + + def pre_get_install_status( + self, + request: gsuiteaddons.GetInstallStatusRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gsuiteaddons.GetInstallStatusRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_install_status + + Override in a subclass to manipulate the request or metadata + before they are sent to the GSuiteAddOns server. + """ + return request, metadata + + def post_get_install_status( + self, response: gsuiteaddons.InstallStatus + ) -> gsuiteaddons.InstallStatus: + """Post-rpc interceptor for get_install_status + + Override in a subclass to manipulate the response + after it is returned by the GSuiteAddOns server but before + it is returned to user code. + """ + return response + + def pre_install_deployment( + self, + request: gsuiteaddons.InstallDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gsuiteaddons.InstallDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for install_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the GSuiteAddOns server. + """ + return request, metadata + + def pre_list_deployments( + self, + request: gsuiteaddons.ListDeploymentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gsuiteaddons.ListDeploymentsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_deployments + + Override in a subclass to manipulate the request or metadata + before they are sent to the GSuiteAddOns server. + """ + return request, metadata + + def post_list_deployments( + self, response: gsuiteaddons.ListDeploymentsResponse + ) -> gsuiteaddons.ListDeploymentsResponse: + """Post-rpc interceptor for list_deployments + + Override in a subclass to manipulate the response + after it is returned by the GSuiteAddOns server but before + it is returned to user code. + """ + return response + + def pre_replace_deployment( + self, + request: gsuiteaddons.ReplaceDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gsuiteaddons.ReplaceDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for replace_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the GSuiteAddOns server. + """ + return request, metadata + + def post_replace_deployment( + self, response: gsuiteaddons.Deployment + ) -> gsuiteaddons.Deployment: + """Post-rpc interceptor for replace_deployment + + Override in a subclass to manipulate the response + after it is returned by the GSuiteAddOns server but before + it is returned to user code. + """ + return response + + def pre_uninstall_deployment( + self, + request: gsuiteaddons.UninstallDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gsuiteaddons.UninstallDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for uninstall_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the GSuiteAddOns server. + """ + return request, metadata + + +@dataclasses.dataclass +class GSuiteAddOnsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: GSuiteAddOnsRestInterceptor + + +class GSuiteAddOnsRestTransport(GSuiteAddOnsTransport): + """REST backend transport for GSuiteAddOns. + + A service for managing Google Workspace Add-ons deployments. + + A Google Workspace Add-on is a third-party embedded component + that can be installed in Google Workspace Applications like + Gmail, Calendar, Drive, and the Google Docs, Sheets, and Slides + editors. Google Workspace Add-ons can display UI cards, receive + contextual information from the host application, and perform + actions in the host application (See: + + https://developers.google.com/gsuite/add-ons/overview for more + information). + + A Google Workspace Add-on deployment resource specifies metadata + about the add-on, including a specification of the entry points + in the host application that trigger add-on executions (see: + + https://developers.google.com/gsuite/add-ons/concepts/gsuite-manifests). + Add-on deployments defined via the Google Workspace Add-ons API + define their entrypoints using HTTPS URLs (See: + + https://developers.google.com/gsuite/add-ons/guides/alternate-runtimes), + + A Google Workspace Add-on deployment can be installed in + developer mode, which allows an add-on developer to test the + experience an end-user would see when installing and running the + add-on in their G Suite applications. When running in developer + mode, more detailed error messages are exposed in the add-on UI + to aid in debugging. + + A Google Workspace Add-on deployment can be published to Google + Workspace Marketplace, which allows other Google Workspace users + to discover and install the add-on. See: + + https://developers.google.com/gsuite/add-ons/how-tos/publish-add-on-overview + for details. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "gsuiteaddons.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[GSuiteAddOnsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or GSuiteAddOnsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateDeployment(GSuiteAddOnsRestStub): + def __hash__(self): + return hash("CreateDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "deploymentId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsuiteaddons.CreateDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.Deployment: + r"""Call the create deployment method over HTTP. + + Args: + request (~.gsuiteaddons.CreateDeploymentRequest): + The request object. Request message to create a + deployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gsuiteaddons.Deployment: + A Google Workspace Add-on deployment + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/deployments", + "body": "deployment", + }, + ] + request, metadata = self._interceptor.pre_create_deployment( + request, metadata + ) + pb_request = gsuiteaddons.CreateDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsuiteaddons.Deployment() + pb_resp = gsuiteaddons.Deployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_deployment(resp) + return resp + + class _DeleteDeployment(GSuiteAddOnsRestStub): + def __hash__(self): + return hash("DeleteDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsuiteaddons.DeleteDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete deployment method over HTTP. + + Args: + request (~.gsuiteaddons.DeleteDeploymentRequest): + The request object. Request message to delete a + deployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/deployments/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_deployment( + request, metadata + ) + pb_request = gsuiteaddons.DeleteDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetAuthorization(GSuiteAddOnsRestStub): + def __hash__(self): + return hash("GetAuthorization") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsuiteaddons.GetAuthorizationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.Authorization: + r"""Call the get authorization method over HTTP. + + Args: + request (~.gsuiteaddons.GetAuthorizationRequest): + The request object. Request message to get Google + Workspace Add-ons authorization + information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gsuiteaddons.Authorization: + The authorization information used + when invoking deployment endpoints. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/authorization}", + }, + ] + request, metadata = self._interceptor.pre_get_authorization( + request, metadata + ) + pb_request = gsuiteaddons.GetAuthorizationRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsuiteaddons.Authorization() + pb_resp = gsuiteaddons.Authorization.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_authorization(resp) + return resp + + class _GetDeployment(GSuiteAddOnsRestStub): + def __hash__(self): + return hash("GetDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsuiteaddons.GetDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.Deployment: + r"""Call the get deployment method over HTTP. + + Args: + request (~.gsuiteaddons.GetDeploymentRequest): + The request object. Request message to get a deployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gsuiteaddons.Deployment: + A Google Workspace Add-on deployment + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/deployments/*}", + }, + ] + request, metadata = self._interceptor.pre_get_deployment(request, metadata) + pb_request = gsuiteaddons.GetDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsuiteaddons.Deployment() + pb_resp = gsuiteaddons.Deployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_deployment(resp) + return resp + + class _GetInstallStatus(GSuiteAddOnsRestStub): + def __hash__(self): + return hash("GetInstallStatus") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsuiteaddons.GetInstallStatusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.InstallStatus: + r"""Call the get install status method over HTTP. + + Args: + request (~.gsuiteaddons.GetInstallStatusRequest): + The request object. Request message to get the install + status of a developer mode deployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gsuiteaddons.InstallStatus: + Developer mode install status of a + deployment + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/deployments/*/installStatus}", + }, + ] + request, metadata = self._interceptor.pre_get_install_status( + request, metadata + ) + pb_request = gsuiteaddons.GetInstallStatusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsuiteaddons.InstallStatus() + pb_resp = gsuiteaddons.InstallStatus.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_install_status(resp) + return resp + + class _InstallDeployment(GSuiteAddOnsRestStub): + def __hash__(self): + return hash("InstallDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsuiteaddons.InstallDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the install deployment method over HTTP. + + Args: + request (~.gsuiteaddons.InstallDeploymentRequest): + The request object. Request message to install a + developer mode deployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/deployments/*}:install", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_install_deployment( + request, metadata + ) + pb_request = gsuiteaddons.InstallDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _ListDeployments(GSuiteAddOnsRestStub): + def __hash__(self): + return hash("ListDeployments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsuiteaddons.ListDeploymentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.ListDeploymentsResponse: + r"""Call the list deployments method over HTTP. + + Args: + request (~.gsuiteaddons.ListDeploymentsRequest): + The request object. Request message to list deployments + for a project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gsuiteaddons.ListDeploymentsResponse: + Response message to list deployments. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*}/deployments", + }, + ] + request, metadata = self._interceptor.pre_list_deployments( + request, metadata + ) + pb_request = gsuiteaddons.ListDeploymentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsuiteaddons.ListDeploymentsResponse() + pb_resp = gsuiteaddons.ListDeploymentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_deployments(resp) + return resp + + class _ReplaceDeployment(GSuiteAddOnsRestStub): + def __hash__(self): + return hash("ReplaceDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsuiteaddons.ReplaceDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gsuiteaddons.Deployment: + r"""Call the replace deployment method over HTTP. + + Args: + request (~.gsuiteaddons.ReplaceDeploymentRequest): + The request object. Request message to create or replace + a deployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gsuiteaddons.Deployment: + A Google Workspace Add-on deployment + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1/{deployment.name=projects/*/deployments/*}", + "body": "deployment", + }, + ] + request, metadata = self._interceptor.pre_replace_deployment( + request, metadata + ) + pb_request = gsuiteaddons.ReplaceDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsuiteaddons.Deployment() + pb_resp = gsuiteaddons.Deployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_replace_deployment(resp) + return resp + + class _UninstallDeployment(GSuiteAddOnsRestStub): + def __hash__(self): + return hash("UninstallDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gsuiteaddons.UninstallDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the uninstall deployment method over HTTP. + + Args: + request (~.gsuiteaddons.UninstallDeploymentRequest): + The request object. Request message to uninstall a + developer mode deployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/deployments/*}:uninstall", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_uninstall_deployment( + request, metadata + ) + pb_request = gsuiteaddons.UninstallDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + @property + def create_deployment( + self, + ) -> Callable[[gsuiteaddons.CreateDeploymentRequest], gsuiteaddons.Deployment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_deployment( + self, + ) -> Callable[[gsuiteaddons.DeleteDeploymentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_authorization( + self, + ) -> Callable[[gsuiteaddons.GetAuthorizationRequest], gsuiteaddons.Authorization]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAuthorization(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_deployment( + self, + ) -> Callable[[gsuiteaddons.GetDeploymentRequest], gsuiteaddons.Deployment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_install_status( + self, + ) -> Callable[[gsuiteaddons.GetInstallStatusRequest], gsuiteaddons.InstallStatus]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstallStatus(self._session, self._host, self._interceptor) # type: ignore + + @property + def install_deployment( + self, + ) -> Callable[[gsuiteaddons.InstallDeploymentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InstallDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_deployments( + self, + ) -> Callable[ + [gsuiteaddons.ListDeploymentsRequest], gsuiteaddons.ListDeploymentsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeployments(self._session, self._host, self._interceptor) # type: ignore + + @property + def replace_deployment( + self, + ) -> Callable[[gsuiteaddons.ReplaceDeploymentRequest], gsuiteaddons.Deployment]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReplaceDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def uninstall_deployment( + self, + ) -> Callable[[gsuiteaddons.UninstallDeploymentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UninstallDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("GSuiteAddOnsRestTransport",) diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/types/__init__.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/types/__init__.py new file mode 100644 index 000000000000..afeebd9da21b --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/types/__init__.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .gsuiteaddons import ( + AddOns, + Authorization, + CreateDeploymentRequest, + DeleteDeploymentRequest, + Deployment, + GetAuthorizationRequest, + GetDeploymentRequest, + GetInstallStatusRequest, + InstallDeploymentRequest, + InstallStatus, + ListDeploymentsRequest, + ListDeploymentsResponse, + ReplaceDeploymentRequest, + UninstallDeploymentRequest, +) + +__all__ = ( + "AddOns", + "Authorization", + "CreateDeploymentRequest", + "DeleteDeploymentRequest", + "Deployment", + "GetAuthorizationRequest", + "GetDeploymentRequest", + "GetInstallStatusRequest", + "InstallDeploymentRequest", + "InstallStatus", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "ReplaceDeploymentRequest", + "UninstallDeploymentRequest", +) diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/types/gsuiteaddons.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/types/gsuiteaddons.py new file mode 100644 index 000000000000..6121c8ee9647 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/types/gsuiteaddons.py @@ -0,0 +1,440 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.apps.script.type.calendar.types import calendar_addon_manifest +from google.apps.script.type.docs.types import docs_addon_manifest +from google.apps.script.type.drive.types import drive_addon_manifest +from google.apps.script.type.gmail.types import gmail_addon_manifest +from google.apps.script.type.sheets.types import sheets_addon_manifest +from google.apps.script.type.slides.types import slides_addon_manifest +from google.apps.script.type.types import script_manifest +from google.protobuf import wrappers_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.gsuiteaddons.v1", + manifest={ + "GetAuthorizationRequest", + "Authorization", + "CreateDeploymentRequest", + "ReplaceDeploymentRequest", + "GetDeploymentRequest", + "ListDeploymentsRequest", + "ListDeploymentsResponse", + "DeleteDeploymentRequest", + "InstallDeploymentRequest", + "UninstallDeploymentRequest", + "GetInstallStatusRequest", + "InstallStatus", + "Deployment", + "AddOns", + }, +) + + +class GetAuthorizationRequest(proto.Message): + r"""Request message to get Google Workspace Add-ons authorization + information. + + Attributes: + name (str): + Required. Name of the project for which to get the Google + Workspace Add-ons authorization information. + + Example: ``projects/my_project/authorization``. + """ + + name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Authorization(proto.Message): + r"""The authorization information used when invoking deployment + endpoints. + + Attributes: + name (str): + The canonical full name of this resource. Example: + ``projects/123/authorization`` + service_account_email (str): + The email address of the service account used + to authenticate requests to add-on callback + endpoints. + oauth_client_id (str): + The OAuth client ID used to obtain OAuth + access tokens for a user on the add-on's behalf. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + service_account_email: str = proto.Field( + proto.STRING, + number=2, + ) + oauth_client_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class CreateDeploymentRequest(proto.Message): + r"""Request message to create a deployment. + + Attributes: + parent (str): + Required. Name of the project in which to create the + deployment. + + Example: ``projects/my_project``. + deployment_id (str): + Required. The id to use for this deployment. The full name + of the created resource will be + ``projects//deployments/``. + deployment (google.cloud.gsuiteaddons_v1.types.Deployment): + Required. The deployment to create + (deployment.name cannot be set). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deployment_id: str = proto.Field( + proto.STRING, + number=2, + ) + deployment: "Deployment" = proto.Field( + proto.MESSAGE, + number=3, + message="Deployment", + ) + + +class ReplaceDeploymentRequest(proto.Message): + r"""Request message to create or replace a deployment. + + Attributes: + deployment (google.cloud.gsuiteaddons_v1.types.Deployment): + Required. The deployment to create or + replace. + """ + + deployment: "Deployment" = proto.Field( + proto.MESSAGE, + number=2, + message="Deployment", + ) + + +class GetDeploymentRequest(proto.Message): + r"""Request message to get a deployment. + + Attributes: + name (str): + Required. The full resource name of the deployment to get. + + Example: ``projects/my_project/deployments/my_deployment``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDeploymentsRequest(proto.Message): + r"""Request message to list deployments for a project. + + Attributes: + parent (str): + Required. Name of the project in which to create the + deployment. + + Example: ``projects/my_project``. + page_size (int): + The maximum number of deployments to return. + The service may return fewer than this value. If + unspecified, at most 1000 deployments will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + A page token, received from a previous ``ListDeployments`` + call. Provide this to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListDeployments`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDeploymentsResponse(proto.Message): + r"""Response message to list deployments. + + Attributes: + deployments (MutableSequence[google.cloud.gsuiteaddons_v1.types.Deployment]): + The list of deployments for the given + project. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + deployments: MutableSequence["Deployment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Deployment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteDeploymentRequest(proto.Message): + r"""Request message to delete a deployment. + + Attributes: + name (str): + Required. The full resource name of the deployment to + delete. + + Example: ``projects/my_project/deployments/my_deployment``. + etag (str): + The etag of the deployment to delete. + If this is provided, it must match the server's + etag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class InstallDeploymentRequest(proto.Message): + r"""Request message to install a developer mode deployment. + + Attributes: + name (str): + Required. The full resource name of the deployment to + install. + + Example: ``projects/my_project/deployments/my_deployment``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UninstallDeploymentRequest(proto.Message): + r"""Request message to uninstall a developer mode deployment. + + Attributes: + name (str): + Required. The full resource name of the deployment to + install. + + Example: ``projects/my_project/deployments/my_deployment``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetInstallStatusRequest(proto.Message): + r"""Request message to get the install status of a developer mode + deployment. + + Attributes: + name (str): + Required. The full resource name of the deployment. + + Example: + ``projects/my_project/deployments/my_deployment/installStatus``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class InstallStatus(proto.Message): + r"""Developer mode install status of a deployment + + Attributes: + name (str): + The canonical full resource name of the deployment install + status. + + Example: + ``projects/123/deployments/my_deployment/installStatus``. + installed (google.protobuf.wrappers_pb2.BoolValue): + True if the deployment is installed for the + user + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + installed: wrappers_pb2.BoolValue = proto.Field( + proto.MESSAGE, + number=2, + message=wrappers_pb2.BoolValue, + ) + + +class Deployment(proto.Message): + r"""A Google Workspace Add-on deployment + + Attributes: + name (str): + The deployment resource name. Example: + projects/123/deployments/my_deployment. + oauth_scopes (MutableSequence[str]): + The list of Google OAuth scopes for which to + request consent from the end user before + executing an add-on endpoint. + add_ons (google.cloud.gsuiteaddons_v1.types.AddOns): + The Google Workspace Add-on configuration. + etag (str): + This value is computed by the server based on + the version of the deployment in storage, and + may be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + oauth_scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + add_ons: "AddOns" = proto.Field( + proto.MESSAGE, + number=3, + message="AddOns", + ) + etag: str = proto.Field( + proto.STRING, + number=5, + ) + + +class AddOns(proto.Message): + r"""A Google Workspace Add-on configuration. + + Attributes: + common (google.apps.script.type.types.CommonAddOnManifest): + Configuration that is common across all + Google Workspace Add-ons. + gmail (google.apps.script.type.gmail.types.GmailAddOnManifest): + Gmail add-on configuration. + drive (google.apps.script.type.drive.types.DriveAddOnManifest): + Drive add-on configuration. + calendar (google.apps.script.type.calendar.types.CalendarAddOnManifest): + Calendar add-on configuration. + docs (google.apps.script.type.docs.types.DocsAddOnManifest): + Docs add-on configuration. + sheets (google.apps.script.type.sheets.types.SheetsAddOnManifest): + Sheets add-on configuration. + slides (google.apps.script.type.slides.types.SlidesAddOnManifest): + Slides add-on configuration. + http_options (google.apps.script.type.types.HttpOptions): + Options for sending requests to add-on HTTP + endpoints + """ + + common: script_manifest.CommonAddOnManifest = proto.Field( + proto.MESSAGE, + number=1, + message=script_manifest.CommonAddOnManifest, + ) + gmail: gmail_addon_manifest.GmailAddOnManifest = proto.Field( + proto.MESSAGE, + number=2, + message=gmail_addon_manifest.GmailAddOnManifest, + ) + drive: drive_addon_manifest.DriveAddOnManifest = proto.Field( + proto.MESSAGE, + number=5, + message=drive_addon_manifest.DriveAddOnManifest, + ) + calendar: calendar_addon_manifest.CalendarAddOnManifest = proto.Field( + proto.MESSAGE, + number=6, + message=calendar_addon_manifest.CalendarAddOnManifest, + ) + docs: docs_addon_manifest.DocsAddOnManifest = proto.Field( + proto.MESSAGE, + number=7, + message=docs_addon_manifest.DocsAddOnManifest, + ) + sheets: sheets_addon_manifest.SheetsAddOnManifest = proto.Field( + proto.MESSAGE, + number=8, + message=sheets_addon_manifest.SheetsAddOnManifest, + ) + slides: slides_addon_manifest.SlidesAddOnManifest = proto.Field( + proto.MESSAGE, + number=10, + message=slides_addon_manifest.SlidesAddOnManifest, + ) + http_options: script_manifest.HttpOptions = proto.Field( + proto.MESSAGE, + number=15, + message=script_manifest.HttpOptions, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gsuiteaddons/gsuiteaddons-v1-py.tar.gz b/packages/google-cloud-gsuiteaddons/gsuiteaddons-v1-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-gsuiteaddons/mypy.ini b/packages/google-cloud-gsuiteaddons/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-gsuiteaddons/noxfile.py b/packages/google-cloud-gsuiteaddons/noxfile.py new file mode 100644 index 000000000000..be54712bfa8f --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/noxfile.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-gsuiteaddons/renovate.json b/packages/google-cloud-gsuiteaddons/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-gsuiteaddons/scripts/decrypt-secrets.sh b/packages/google-cloud-gsuiteaddons/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-gsuiteaddons/scripts/fixup_gsuiteaddons_v1_keywords.py b/packages/google-cloud-gsuiteaddons/scripts/fixup_gsuiteaddons_v1_keywords.py new file mode 100644 index 000000000000..e95a79e8e5b4 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/scripts/fixup_gsuiteaddons_v1_keywords.py @@ -0,0 +1,184 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class gsuiteaddonsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_deployment': ('parent', 'deployment_id', 'deployment', ), + 'delete_deployment': ('name', 'etag', ), + 'get_authorization': ('name', ), + 'get_deployment': ('name', ), + 'get_install_status': ('name', ), + 'install_deployment': ('name', ), + 'list_deployments': ('parent', 'page_size', 'page_token', ), + 'replace_deployment': ('deployment', ), + 'uninstall_deployment': ('name', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=gsuiteaddonsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the gsuiteaddons client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-gsuiteaddons/scripts/readme-gen/readme_gen.py b/packages/google-cloud-gsuiteaddons/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..1acc119835b5 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-gsuiteaddons/setup.cfg b/packages/google-cloud-gsuiteaddons/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-gsuiteaddons/setup.py b/packages/google-cloud-gsuiteaddons/setup.py new file mode 100644 index 000000000000..c527d47070b5 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/setup.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-gsuiteaddons" + + +description = "Google Cloud Gsuiteaddons API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/gsuiteaddons/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-apps-script-type >= 0.2.0, <1.0.0dev", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-gsuiteaddons/testing/.gitignore b/packages/google-cloud-gsuiteaddons/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-gsuiteaddons/testing/constraints-3.10.txt b/packages/google-cloud-gsuiteaddons/testing/constraints-3.10.txt new file mode 100644 index 000000000000..3a9e27b606ec --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-apps-script-type diff --git a/packages/google-cloud-gsuiteaddons/testing/constraints-3.11.txt b/packages/google-cloud-gsuiteaddons/testing/constraints-3.11.txt new file mode 100644 index 000000000000..3a9e27b606ec --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-apps-script-type diff --git a/packages/google-cloud-gsuiteaddons/testing/constraints-3.12.txt b/packages/google-cloud-gsuiteaddons/testing/constraints-3.12.txt new file mode 100644 index 000000000000..3a9e27b606ec --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-apps-script-type diff --git a/packages/google-cloud-gsuiteaddons/testing/constraints-3.7.txt b/packages/google-cloud-gsuiteaddons/testing/constraints-3.7.txt new file mode 100644 index 000000000000..4ae34ff40304 --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 +google-apps-script-type==0.2.0 diff --git a/packages/google-cloud-gsuiteaddons/testing/constraints-3.8.txt b/packages/google-cloud-gsuiteaddons/testing/constraints-3.8.txt new file mode 100644 index 000000000000..3a9e27b606ec --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-apps-script-type diff --git a/packages/google-cloud-gsuiteaddons/testing/constraints-3.9.txt b/packages/google-cloud-gsuiteaddons/testing/constraints-3.9.txt new file mode 100644 index 000000000000..3a9e27b606ec --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-apps-script-type diff --git a/packages/google-cloud-gsuiteaddons/tests/__init__.py b/packages/google-cloud-gsuiteaddons/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-gsuiteaddons/tests/unit/__init__.py b/packages/google-cloud-gsuiteaddons/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-gsuiteaddons/tests/unit/gapic/__init__.py b/packages/google-cloud-gsuiteaddons/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/__init__.py b/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py b/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py new file mode 100644 index 000000000000..c1ecb27696ad --- /dev/null +++ b/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py @@ -0,0 +1,6788 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.apps.script.type.calendar.types import calendar_addon_manifest +from google.apps.script.type.docs.types import docs_addon_manifest +from google.apps.script.type.drive.types import drive_addon_manifest +from google.apps.script.type.gmail.types import gmail_addon_manifest +from google.apps.script.type.sheets.types import sheets_addon_manifest +from google.apps.script.type.slides.types import slides_addon_manifest +from google.apps.script.type.types import ( + addon_widget_set, + extension_point, + script_manifest, +) +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import wrappers_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.gsuiteaddons_v1.services.g_suite_add_ons import ( + GSuiteAddOnsAsyncClient, + GSuiteAddOnsClient, + pagers, + transports, +) +from google.cloud.gsuiteaddons_v1.types import gsuiteaddons + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GSuiteAddOnsClient._get_default_mtls_endpoint(None) is None + assert ( + GSuiteAddOnsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + GSuiteAddOnsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + GSuiteAddOnsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + GSuiteAddOnsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert GSuiteAddOnsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GSuiteAddOnsClient, "grpc"), + (GSuiteAddOnsAsyncClient, "grpc_asyncio"), + (GSuiteAddOnsClient, "rest"), + ], +) +def test_g_suite_add_ons_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "gsuiteaddons.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://gsuiteaddons.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.GSuiteAddOnsGrpcTransport, "grpc"), + (transports.GSuiteAddOnsGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.GSuiteAddOnsRestTransport, "rest"), + ], +) +def test_g_suite_add_ons_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GSuiteAddOnsClient, "grpc"), + (GSuiteAddOnsAsyncClient, "grpc_asyncio"), + (GSuiteAddOnsClient, "rest"), + ], +) +def test_g_suite_add_ons_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "gsuiteaddons.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://gsuiteaddons.googleapis.com" + ) + + +def test_g_suite_add_ons_client_get_transport_class(): + transport = GSuiteAddOnsClient.get_transport_class() + available_transports = [ + transports.GSuiteAddOnsGrpcTransport, + transports.GSuiteAddOnsRestTransport, + ] + assert transport in available_transports + + transport = GSuiteAddOnsClient.get_transport_class("grpc") + assert transport == transports.GSuiteAddOnsGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (GSuiteAddOnsClient, transports.GSuiteAddOnsGrpcTransport, "grpc"), + ( + GSuiteAddOnsAsyncClient, + transports.GSuiteAddOnsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (GSuiteAddOnsClient, transports.GSuiteAddOnsRestTransport, "rest"), + ], +) +@mock.patch.object( + GSuiteAddOnsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GSuiteAddOnsClient) +) +@mock.patch.object( + GSuiteAddOnsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GSuiteAddOnsAsyncClient), +) +def test_g_suite_add_ons_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(GSuiteAddOnsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(GSuiteAddOnsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (GSuiteAddOnsClient, transports.GSuiteAddOnsGrpcTransport, "grpc", "true"), + ( + GSuiteAddOnsAsyncClient, + transports.GSuiteAddOnsGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (GSuiteAddOnsClient, transports.GSuiteAddOnsGrpcTransport, "grpc", "false"), + ( + GSuiteAddOnsAsyncClient, + transports.GSuiteAddOnsGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (GSuiteAddOnsClient, transports.GSuiteAddOnsRestTransport, "rest", "true"), + (GSuiteAddOnsClient, transports.GSuiteAddOnsRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + GSuiteAddOnsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GSuiteAddOnsClient) +) +@mock.patch.object( + GSuiteAddOnsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GSuiteAddOnsAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_g_suite_add_ons_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [GSuiteAddOnsClient, GSuiteAddOnsAsyncClient]) +@mock.patch.object( + GSuiteAddOnsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(GSuiteAddOnsClient) +) +@mock.patch.object( + GSuiteAddOnsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GSuiteAddOnsAsyncClient), +) +def test_g_suite_add_ons_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (GSuiteAddOnsClient, transports.GSuiteAddOnsGrpcTransport, "grpc"), + ( + GSuiteAddOnsAsyncClient, + transports.GSuiteAddOnsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (GSuiteAddOnsClient, transports.GSuiteAddOnsRestTransport, "rest"), + ], +) +def test_g_suite_add_ons_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GSuiteAddOnsClient, + transports.GSuiteAddOnsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GSuiteAddOnsAsyncClient, + transports.GSuiteAddOnsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (GSuiteAddOnsClient, transports.GSuiteAddOnsRestTransport, "rest", None), + ], +) +def test_g_suite_add_ons_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_g_suite_add_ons_client_client_options_from_dict(): + with mock.patch( + "google.cloud.gsuiteaddons_v1.services.g_suite_add_ons.transports.GSuiteAddOnsGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = GSuiteAddOnsClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GSuiteAddOnsClient, + transports.GSuiteAddOnsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GSuiteAddOnsAsyncClient, + transports.GSuiteAddOnsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_g_suite_add_ons_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "gsuiteaddons.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="gsuiteaddons.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.GetAuthorizationRequest, + dict, + ], +) +def test_get_authorization(request_type, transport: str = "grpc"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authorization), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.Authorization( + name="name_value", + service_account_email="service_account_email_value", + oauth_client_id="oauth_client_id_value", + ) + response = client.get_authorization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.GetAuthorizationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.Authorization) + assert response.name == "name_value" + assert response.service_account_email == "service_account_email_value" + assert response.oauth_client_id == "oauth_client_id_value" + + +def test_get_authorization_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authorization), "__call__" + ) as call: + client.get_authorization() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.GetAuthorizationRequest() + + +@pytest.mark.asyncio +async def test_get_authorization_async( + transport: str = "grpc_asyncio", request_type=gsuiteaddons.GetAuthorizationRequest +): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authorization), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.Authorization( + name="name_value", + service_account_email="service_account_email_value", + oauth_client_id="oauth_client_id_value", + ) + ) + response = await client.get_authorization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.GetAuthorizationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.Authorization) + assert response.name == "name_value" + assert response.service_account_email == "service_account_email_value" + assert response.oauth_client_id == "oauth_client_id_value" + + +@pytest.mark.asyncio +async def test_get_authorization_async_from_dict(): + await test_get_authorization_async(request_type=dict) + + +def test_get_authorization_field_headers(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.GetAuthorizationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authorization), "__call__" + ) as call: + call.return_value = gsuiteaddons.Authorization() + client.get_authorization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_authorization_field_headers_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.GetAuthorizationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authorization), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.Authorization() + ) + await client.get_authorization(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_authorization_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authorization), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.Authorization() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_authorization( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_authorization_flattened_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_authorization( + gsuiteaddons.GetAuthorizationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_authorization_flattened_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authorization), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.Authorization() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.Authorization() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_authorization( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_authorization_flattened_error_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_authorization( + gsuiteaddons.GetAuthorizationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.CreateDeploymentRequest, + dict, + ], +) +def test_create_deployment(request_type, transport: str = "grpc"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.Deployment( + name="name_value", + oauth_scopes=["oauth_scopes_value"], + etag="etag_value", + ) + response = client.create_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.CreateDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.Deployment) + assert response.name == "name_value" + assert response.oauth_scopes == ["oauth_scopes_value"] + assert response.etag == "etag_value" + + +def test_create_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + client.create_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.CreateDeploymentRequest() + + +@pytest.mark.asyncio +async def test_create_deployment_async( + transport: str = "grpc_asyncio", request_type=gsuiteaddons.CreateDeploymentRequest +): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.Deployment( + name="name_value", + oauth_scopes=["oauth_scopes_value"], + etag="etag_value", + ) + ) + response = await client.create_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.CreateDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.Deployment) + assert response.name == "name_value" + assert response.oauth_scopes == ["oauth_scopes_value"] + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_create_deployment_async_from_dict(): + await test_create_deployment_async(request_type=dict) + + +def test_create_deployment_field_headers(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.CreateDeploymentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + call.return_value = gsuiteaddons.Deployment() + client.create_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_deployment_field_headers_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.CreateDeploymentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.Deployment() + ) + await client.create_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_deployment_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.Deployment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_deployment( + parent="parent_value", + deployment=gsuiteaddons.Deployment(name="name_value"), + deployment_id="deployment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deployment + mock_val = gsuiteaddons.Deployment(name="name_value") + assert arg == mock_val + arg = args[0].deployment_id + mock_val = "deployment_id_value" + assert arg == mock_val + + +def test_create_deployment_flattened_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deployment( + gsuiteaddons.CreateDeploymentRequest(), + parent="parent_value", + deployment=gsuiteaddons.Deployment(name="name_value"), + deployment_id="deployment_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_deployment_flattened_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.Deployment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.Deployment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_deployment( + parent="parent_value", + deployment=gsuiteaddons.Deployment(name="name_value"), + deployment_id="deployment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deployment + mock_val = gsuiteaddons.Deployment(name="name_value") + assert arg == mock_val + arg = args[0].deployment_id + mock_val = "deployment_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_deployment_flattened_error_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_deployment( + gsuiteaddons.CreateDeploymentRequest(), + parent="parent_value", + deployment=gsuiteaddons.Deployment(name="name_value"), + deployment_id="deployment_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.ReplaceDeploymentRequest, + dict, + ], +) +def test_replace_deployment(request_type, transport: str = "grpc"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.replace_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.Deployment( + name="name_value", + oauth_scopes=["oauth_scopes_value"], + etag="etag_value", + ) + response = client.replace_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.ReplaceDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.Deployment) + assert response.name == "name_value" + assert response.oauth_scopes == ["oauth_scopes_value"] + assert response.etag == "etag_value" + + +def test_replace_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.replace_deployment), "__call__" + ) as call: + client.replace_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.ReplaceDeploymentRequest() + + +@pytest.mark.asyncio +async def test_replace_deployment_async( + transport: str = "grpc_asyncio", request_type=gsuiteaddons.ReplaceDeploymentRequest +): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.replace_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.Deployment( + name="name_value", + oauth_scopes=["oauth_scopes_value"], + etag="etag_value", + ) + ) + response = await client.replace_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.ReplaceDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.Deployment) + assert response.name == "name_value" + assert response.oauth_scopes == ["oauth_scopes_value"] + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_replace_deployment_async_from_dict(): + await test_replace_deployment_async(request_type=dict) + + +def test_replace_deployment_field_headers(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.ReplaceDeploymentRequest() + + request.deployment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.replace_deployment), "__call__" + ) as call: + call.return_value = gsuiteaddons.Deployment() + client.replace_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "deployment.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_replace_deployment_field_headers_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.ReplaceDeploymentRequest() + + request.deployment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.replace_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.Deployment() + ) + await client.replace_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "deployment.name=name_value", + ) in kw["metadata"] + + +def test_replace_deployment_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.replace_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.Deployment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.replace_deployment( + deployment=gsuiteaddons.Deployment(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].deployment + mock_val = gsuiteaddons.Deployment(name="name_value") + assert arg == mock_val + + +def test_replace_deployment_flattened_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.replace_deployment( + gsuiteaddons.ReplaceDeploymentRequest(), + deployment=gsuiteaddons.Deployment(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_replace_deployment_flattened_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.replace_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.Deployment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.Deployment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.replace_deployment( + deployment=gsuiteaddons.Deployment(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].deployment + mock_val = gsuiteaddons.Deployment(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_replace_deployment_flattened_error_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.replace_deployment( + gsuiteaddons.ReplaceDeploymentRequest(), + deployment=gsuiteaddons.Deployment(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.GetDeploymentRequest, + dict, + ], +) +def test_get_deployment(request_type, transport: str = "grpc"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.Deployment( + name="name_value", + oauth_scopes=["oauth_scopes_value"], + etag="etag_value", + ) + response = client.get_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.GetDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.Deployment) + assert response.name == "name_value" + assert response.oauth_scopes == ["oauth_scopes_value"] + assert response.etag == "etag_value" + + +def test_get_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + client.get_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.GetDeploymentRequest() + + +@pytest.mark.asyncio +async def test_get_deployment_async( + transport: str = "grpc_asyncio", request_type=gsuiteaddons.GetDeploymentRequest +): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.Deployment( + name="name_value", + oauth_scopes=["oauth_scopes_value"], + etag="etag_value", + ) + ) + response = await client.get_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.GetDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.Deployment) + assert response.name == "name_value" + assert response.oauth_scopes == ["oauth_scopes_value"] + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_deployment_async_from_dict(): + await test_get_deployment_async(request_type=dict) + + +def test_get_deployment_field_headers(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.GetDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + call.return_value = gsuiteaddons.Deployment() + client.get_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_deployment_field_headers_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.GetDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.Deployment() + ) + await client.get_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_deployment_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.Deployment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_deployment_flattened_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deployment( + gsuiteaddons.GetDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_deployment_flattened_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.Deployment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.Deployment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_deployment_flattened_error_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_deployment( + gsuiteaddons.GetDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.ListDeploymentsRequest, + dict, + ], +) +def test_list_deployments(request_type, transport: str = "grpc"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.ListDeploymentsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.ListDeploymentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_deployments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + client.list_deployments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.ListDeploymentsRequest() + + +@pytest.mark.asyncio +async def test_list_deployments_async( + transport: str = "grpc_asyncio", request_type=gsuiteaddons.ListDeploymentsRequest +): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.ListDeploymentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.ListDeploymentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_deployments_async_from_dict(): + await test_list_deployments_async(request_type=dict) + + +def test_list_deployments_field_headers(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.ListDeploymentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + call.return_value = gsuiteaddons.ListDeploymentsResponse() + client.list_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_deployments_field_headers_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.ListDeploymentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.ListDeploymentsResponse() + ) + await client.list_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_deployments_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.ListDeploymentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_deployments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_deployments_flattened_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deployments( + gsuiteaddons.ListDeploymentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_deployments_flattened_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.ListDeploymentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.ListDeploymentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_deployments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_deployments_flattened_error_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_deployments( + gsuiteaddons.ListDeploymentsRequest(), + parent="parent_value", + ) + + +def test_list_deployments_pager(transport_name: str = "grpc"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + ], + next_page_token="abc", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + ], + next_page_token="ghi", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_deployments(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, gsuiteaddons.Deployment) for i in results) + + +def test_list_deployments_pages(transport_name: str = "grpc"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + ], + next_page_token="abc", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + ], + next_page_token="ghi", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_deployments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_deployments_async_pager(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployments), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + ], + next_page_token="abc", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + ], + next_page_token="ghi", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_deployments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, gsuiteaddons.Deployment) for i in responses) + + +@pytest.mark.asyncio +async def test_list_deployments_async_pages(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployments), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + ], + next_page_token="abc", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + ], + next_page_token="ghi", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_deployments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.DeleteDeploymentRequest, + dict, + ], +) +def test_delete_deployment(request_type, transport: str = "grpc"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.DeleteDeploymentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + client.delete_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.DeleteDeploymentRequest() + + +@pytest.mark.asyncio +async def test_delete_deployment_async( + transport: str = "grpc_asyncio", request_type=gsuiteaddons.DeleteDeploymentRequest +): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.DeleteDeploymentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_deployment_async_from_dict(): + await test_delete_deployment_async(request_type=dict) + + +def test_delete_deployment_field_headers(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.DeleteDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + call.return_value = None + client.delete_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_deployment_field_headers_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.DeleteDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_deployment_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_deployment_flattened_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deployment( + gsuiteaddons.DeleteDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_deployment_flattened_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_deployment_flattened_error_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_deployment( + gsuiteaddons.DeleteDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.InstallDeploymentRequest, + dict, + ], +) +def test_install_deployment(request_type, transport: str = "grpc"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.install_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.install_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.InstallDeploymentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_install_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.install_deployment), "__call__" + ) as call: + client.install_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.InstallDeploymentRequest() + + +@pytest.mark.asyncio +async def test_install_deployment_async( + transport: str = "grpc_asyncio", request_type=gsuiteaddons.InstallDeploymentRequest +): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.install_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.install_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.InstallDeploymentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_install_deployment_async_from_dict(): + await test_install_deployment_async(request_type=dict) + + +def test_install_deployment_field_headers(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.InstallDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.install_deployment), "__call__" + ) as call: + call.return_value = None + client.install_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_install_deployment_field_headers_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.InstallDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.install_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.install_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_install_deployment_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.install_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.install_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_install_deployment_flattened_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.install_deployment( + gsuiteaddons.InstallDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_install_deployment_flattened_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.install_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.install_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_install_deployment_flattened_error_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.install_deployment( + gsuiteaddons.InstallDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.UninstallDeploymentRequest, + dict, + ], +) +def test_uninstall_deployment(request_type, transport: str = "grpc"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.uninstall_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.uninstall_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.UninstallDeploymentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_uninstall_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.uninstall_deployment), "__call__" + ) as call: + client.uninstall_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.UninstallDeploymentRequest() + + +@pytest.mark.asyncio +async def test_uninstall_deployment_async( + transport: str = "grpc_asyncio", + request_type=gsuiteaddons.UninstallDeploymentRequest, +): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.uninstall_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.uninstall_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.UninstallDeploymentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_uninstall_deployment_async_from_dict(): + await test_uninstall_deployment_async(request_type=dict) + + +def test_uninstall_deployment_field_headers(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.UninstallDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.uninstall_deployment), "__call__" + ) as call: + call.return_value = None + client.uninstall_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_uninstall_deployment_field_headers_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.UninstallDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.uninstall_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.uninstall_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_uninstall_deployment_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.uninstall_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.uninstall_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_uninstall_deployment_flattened_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.uninstall_deployment( + gsuiteaddons.UninstallDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_uninstall_deployment_flattened_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.uninstall_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.uninstall_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_uninstall_deployment_flattened_error_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.uninstall_deployment( + gsuiteaddons.UninstallDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.GetInstallStatusRequest, + dict, + ], +) +def test_get_install_status(request_type, transport: str = "grpc"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_install_status), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.InstallStatus( + name="name_value", + ) + response = client.get_install_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.GetInstallStatusRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.InstallStatus) + assert response.name == "name_value" + + +def test_get_install_status_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_install_status), "__call__" + ) as call: + client.get_install_status() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.GetInstallStatusRequest() + + +@pytest.mark.asyncio +async def test_get_install_status_async( + transport: str = "grpc_asyncio", request_type=gsuiteaddons.GetInstallStatusRequest +): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_install_status), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.InstallStatus( + name="name_value", + ) + ) + response = await client.get_install_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gsuiteaddons.GetInstallStatusRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.InstallStatus) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_install_status_async_from_dict(): + await test_get_install_status_async(request_type=dict) + + +def test_get_install_status_field_headers(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.GetInstallStatusRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_install_status), "__call__" + ) as call: + call.return_value = gsuiteaddons.InstallStatus() + client.get_install_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_install_status_field_headers_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsuiteaddons.GetInstallStatusRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_install_status), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.InstallStatus() + ) + await client.get_install_status(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_install_status_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_install_status), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.InstallStatus() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_install_status( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_install_status_flattened_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_install_status( + gsuiteaddons.GetInstallStatusRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_install_status_flattened_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_install_status), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gsuiteaddons.InstallStatus() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gsuiteaddons.InstallStatus() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_install_status( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_install_status_flattened_error_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_install_status( + gsuiteaddons.GetInstallStatusRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.GetAuthorizationRequest, + dict, + ], +) +def test_get_authorization_rest(request_type): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/authorization"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.Authorization( + name="name_value", + service_account_email="service_account_email_value", + oauth_client_id="oauth_client_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gsuiteaddons.Authorization.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_authorization(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.Authorization) + assert response.name == "name_value" + assert response.service_account_email == "service_account_email_value" + assert response.oauth_client_id == "oauth_client_id_value" + + +def test_get_authorization_rest_required_fields( + request_type=gsuiteaddons.GetAuthorizationRequest, +): + transport_class = transports.GSuiteAddOnsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_authorization._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_authorization._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.Authorization() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gsuiteaddons.Authorization.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_authorization(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_authorization_rest_unset_required_fields(): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_authorization._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_authorization_rest_interceptors(null_interceptor): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GSuiteAddOnsRestInterceptor(), + ) + client = GSuiteAddOnsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "post_get_authorization" + ) as post, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "pre_get_authorization" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gsuiteaddons.GetAuthorizationRequest.pb( + gsuiteaddons.GetAuthorizationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gsuiteaddons.Authorization.to_json( + gsuiteaddons.Authorization() + ) + + request = gsuiteaddons.GetAuthorizationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsuiteaddons.Authorization() + + client.get_authorization( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_authorization_rest_bad_request( + transport: str = "rest", request_type=gsuiteaddons.GetAuthorizationRequest +): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/authorization"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_authorization(request) + + +def test_get_authorization_rest_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.Authorization() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/authorization"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gsuiteaddons.Authorization.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_authorization(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/authorization}" % client.transport._host, args[1] + ) + + +def test_get_authorization_rest_flattened_error(transport: str = "rest"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_authorization( + gsuiteaddons.GetAuthorizationRequest(), + name="name_value", + ) + + +def test_get_authorization_rest_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.CreateDeploymentRequest, + dict, + ], +) +def test_create_deployment_rest(request_type): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["deployment"] = { + "name": "name_value", + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "add_ons": { + "common": { + "name": "name_value", + "logo_url": "logo_url_value", + "layout_properties": { + "primary_color": "primary_color_value", + "secondary_color": "secondary_color_value", + }, + "add_on_widget_set": {"used_widgets": [1]}, + "use_locale_from_app": True, + "homepage_trigger": { + "run_function": "run_function_value", + "enabled": {"value": True}, + }, + "universal_actions": [ + { + "label": "label_value", + "open_link": "open_link_value", + "run_function": "run_function_value", + } + ], + "open_link_url_prefixes": { + "values": [ + { + "null_value": 0, + "number_value": 0.1285, + "string_value": "string_value_value", + "bool_value": True, + "struct_value": {"fields": {}}, + "list_value": {}, + } + ] + }, + }, + "gmail": { + "homepage_trigger": {}, + "contextual_triggers": [ + { + "unconditional": {}, + "on_trigger_function": "on_trigger_function_value", + } + ], + "universal_actions": [ + { + "text": "text_value", + "open_link": "open_link_value", + "run_function": "run_function_value", + } + ], + "compose_trigger": { + "actions": [ + { + "run_function": "run_function_value", + "label": "label_value", + "logo_url": "logo_url_value", + } + ], + "draft_access": 1, + }, + "authorization_check_function": "authorization_check_function_value", + }, + "drive": { + "homepage_trigger": {}, + "on_items_selected_trigger": {"run_function": "run_function_value"}, + }, + "calendar": { + "homepage_trigger": {}, + "conference_solution": [ + { + "on_create_function": "on_create_function_value", + "id": "id_value", + "name": "name_value", + "logo_url": "logo_url_value", + } + ], + "create_settings_url_function": "create_settings_url_function_value", + "event_open_trigger": {"run_function": "run_function_value"}, + "event_update_trigger": {}, + "current_event_access": 1, + }, + "docs": { + "homepage_trigger": {}, + "on_file_scope_granted_trigger": {"run_function": "run_function_value"}, + }, + "sheets": { + "homepage_trigger": {}, + "on_file_scope_granted_trigger": {"run_function": "run_function_value"}, + }, + "slides": { + "homepage_trigger": {}, + "on_file_scope_granted_trigger": {"run_function": "run_function_value"}, + }, + "http_options": {"authorization_header": 1}, + }, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.Deployment( + name="name_value", + oauth_scopes=["oauth_scopes_value"], + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gsuiteaddons.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.Deployment) + assert response.name == "name_value" + assert response.oauth_scopes == ["oauth_scopes_value"] + assert response.etag == "etag_value" + + +def test_create_deployment_rest_required_fields( + request_type=gsuiteaddons.CreateDeploymentRequest, +): + transport_class = transports.GSuiteAddOnsRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["deployment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "deploymentId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "deploymentId" in jsonified_request + assert jsonified_request["deploymentId"] == request_init["deployment_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["deploymentId"] = "deployment_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("deployment_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "deploymentId" in jsonified_request + assert jsonified_request["deploymentId"] == "deployment_id_value" + + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gsuiteaddons.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_deployment(request) + + expected_params = [ + ( + "deploymentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_deployment_rest_unset_required_fields(): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("deploymentId",)) + & set( + ( + "parent", + "deploymentId", + "deployment", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deployment_rest_interceptors(null_interceptor): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GSuiteAddOnsRestInterceptor(), + ) + client = GSuiteAddOnsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "post_create_deployment" + ) as post, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "pre_create_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gsuiteaddons.CreateDeploymentRequest.pb( + gsuiteaddons.CreateDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gsuiteaddons.Deployment.to_json( + gsuiteaddons.Deployment() + ) + + request = gsuiteaddons.CreateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsuiteaddons.Deployment() + + client.create_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_deployment_rest_bad_request( + transport: str = "rest", request_type=gsuiteaddons.CreateDeploymentRequest +): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["deployment"] = { + "name": "name_value", + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "add_ons": { + "common": { + "name": "name_value", + "logo_url": "logo_url_value", + "layout_properties": { + "primary_color": "primary_color_value", + "secondary_color": "secondary_color_value", + }, + "add_on_widget_set": {"used_widgets": [1]}, + "use_locale_from_app": True, + "homepage_trigger": { + "run_function": "run_function_value", + "enabled": {"value": True}, + }, + "universal_actions": [ + { + "label": "label_value", + "open_link": "open_link_value", + "run_function": "run_function_value", + } + ], + "open_link_url_prefixes": { + "values": [ + { + "null_value": 0, + "number_value": 0.1285, + "string_value": "string_value_value", + "bool_value": True, + "struct_value": {"fields": {}}, + "list_value": {}, + } + ] + }, + }, + "gmail": { + "homepage_trigger": {}, + "contextual_triggers": [ + { + "unconditional": {}, + "on_trigger_function": "on_trigger_function_value", + } + ], + "universal_actions": [ + { + "text": "text_value", + "open_link": "open_link_value", + "run_function": "run_function_value", + } + ], + "compose_trigger": { + "actions": [ + { + "run_function": "run_function_value", + "label": "label_value", + "logo_url": "logo_url_value", + } + ], + "draft_access": 1, + }, + "authorization_check_function": "authorization_check_function_value", + }, + "drive": { + "homepage_trigger": {}, + "on_items_selected_trigger": {"run_function": "run_function_value"}, + }, + "calendar": { + "homepage_trigger": {}, + "conference_solution": [ + { + "on_create_function": "on_create_function_value", + "id": "id_value", + "name": "name_value", + "logo_url": "logo_url_value", + } + ], + "create_settings_url_function": "create_settings_url_function_value", + "event_open_trigger": {"run_function": "run_function_value"}, + "event_update_trigger": {}, + "current_event_access": 1, + }, + "docs": { + "homepage_trigger": {}, + "on_file_scope_granted_trigger": {"run_function": "run_function_value"}, + }, + "sheets": { + "homepage_trigger": {}, + "on_file_scope_granted_trigger": {"run_function": "run_function_value"}, + }, + "slides": { + "homepage_trigger": {}, + "on_file_scope_granted_trigger": {"run_function": "run_function_value"}, + }, + "http_options": {"authorization_header": 1}, + }, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_deployment(request) + + +def test_create_deployment_rest_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.Deployment() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + deployment=gsuiteaddons.Deployment(name="name_value"), + deployment_id="deployment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gsuiteaddons.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*}/deployments" % client.transport._host, args[1] + ) + + +def test_create_deployment_rest_flattened_error(transport: str = "rest"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deployment( + gsuiteaddons.CreateDeploymentRequest(), + parent="parent_value", + deployment=gsuiteaddons.Deployment(name="name_value"), + deployment_id="deployment_id_value", + ) + + +def test_create_deployment_rest_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.ReplaceDeploymentRequest, + dict, + ], +) +def test_replace_deployment_rest(request_type): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"deployment": {"name": "projects/sample1/deployments/sample2"}} + request_init["deployment"] = { + "name": "projects/sample1/deployments/sample2", + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "add_ons": { + "common": { + "name": "name_value", + "logo_url": "logo_url_value", + "layout_properties": { + "primary_color": "primary_color_value", + "secondary_color": "secondary_color_value", + }, + "add_on_widget_set": {"used_widgets": [1]}, + "use_locale_from_app": True, + "homepage_trigger": { + "run_function": "run_function_value", + "enabled": {"value": True}, + }, + "universal_actions": [ + { + "label": "label_value", + "open_link": "open_link_value", + "run_function": "run_function_value", + } + ], + "open_link_url_prefixes": { + "values": [ + { + "null_value": 0, + "number_value": 0.1285, + "string_value": "string_value_value", + "bool_value": True, + "struct_value": {"fields": {}}, + "list_value": {}, + } + ] + }, + }, + "gmail": { + "homepage_trigger": {}, + "contextual_triggers": [ + { + "unconditional": {}, + "on_trigger_function": "on_trigger_function_value", + } + ], + "universal_actions": [ + { + "text": "text_value", + "open_link": "open_link_value", + "run_function": "run_function_value", + } + ], + "compose_trigger": { + "actions": [ + { + "run_function": "run_function_value", + "label": "label_value", + "logo_url": "logo_url_value", + } + ], + "draft_access": 1, + }, + "authorization_check_function": "authorization_check_function_value", + }, + "drive": { + "homepage_trigger": {}, + "on_items_selected_trigger": {"run_function": "run_function_value"}, + }, + "calendar": { + "homepage_trigger": {}, + "conference_solution": [ + { + "on_create_function": "on_create_function_value", + "id": "id_value", + "name": "name_value", + "logo_url": "logo_url_value", + } + ], + "create_settings_url_function": "create_settings_url_function_value", + "event_open_trigger": {"run_function": "run_function_value"}, + "event_update_trigger": {}, + "current_event_access": 1, + }, + "docs": { + "homepage_trigger": {}, + "on_file_scope_granted_trigger": {"run_function": "run_function_value"}, + }, + "sheets": { + "homepage_trigger": {}, + "on_file_scope_granted_trigger": {"run_function": "run_function_value"}, + }, + "slides": { + "homepage_trigger": {}, + "on_file_scope_granted_trigger": {"run_function": "run_function_value"}, + }, + "http_options": {"authorization_header": 1}, + }, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.Deployment( + name="name_value", + oauth_scopes=["oauth_scopes_value"], + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gsuiteaddons.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.replace_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.Deployment) + assert response.name == "name_value" + assert response.oauth_scopes == ["oauth_scopes_value"] + assert response.etag == "etag_value" + + +def test_replace_deployment_rest_required_fields( + request_type=gsuiteaddons.ReplaceDeploymentRequest, +): + transport_class = transports.GSuiteAddOnsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).replace_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).replace_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "put", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gsuiteaddons.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.replace_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_replace_deployment_rest_unset_required_fields(): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.replace_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("deployment",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_replace_deployment_rest_interceptors(null_interceptor): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GSuiteAddOnsRestInterceptor(), + ) + client = GSuiteAddOnsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "post_replace_deployment" + ) as post, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "pre_replace_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gsuiteaddons.ReplaceDeploymentRequest.pb( + gsuiteaddons.ReplaceDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gsuiteaddons.Deployment.to_json( + gsuiteaddons.Deployment() + ) + + request = gsuiteaddons.ReplaceDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsuiteaddons.Deployment() + + client.replace_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_replace_deployment_rest_bad_request( + transport: str = "rest", request_type=gsuiteaddons.ReplaceDeploymentRequest +): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"deployment": {"name": "projects/sample1/deployments/sample2"}} + request_init["deployment"] = { + "name": "projects/sample1/deployments/sample2", + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "add_ons": { + "common": { + "name": "name_value", + "logo_url": "logo_url_value", + "layout_properties": { + "primary_color": "primary_color_value", + "secondary_color": "secondary_color_value", + }, + "add_on_widget_set": {"used_widgets": [1]}, + "use_locale_from_app": True, + "homepage_trigger": { + "run_function": "run_function_value", + "enabled": {"value": True}, + }, + "universal_actions": [ + { + "label": "label_value", + "open_link": "open_link_value", + "run_function": "run_function_value", + } + ], + "open_link_url_prefixes": { + "values": [ + { + "null_value": 0, + "number_value": 0.1285, + "string_value": "string_value_value", + "bool_value": True, + "struct_value": {"fields": {}}, + "list_value": {}, + } + ] + }, + }, + "gmail": { + "homepage_trigger": {}, + "contextual_triggers": [ + { + "unconditional": {}, + "on_trigger_function": "on_trigger_function_value", + } + ], + "universal_actions": [ + { + "text": "text_value", + "open_link": "open_link_value", + "run_function": "run_function_value", + } + ], + "compose_trigger": { + "actions": [ + { + "run_function": "run_function_value", + "label": "label_value", + "logo_url": "logo_url_value", + } + ], + "draft_access": 1, + }, + "authorization_check_function": "authorization_check_function_value", + }, + "drive": { + "homepage_trigger": {}, + "on_items_selected_trigger": {"run_function": "run_function_value"}, + }, + "calendar": { + "homepage_trigger": {}, + "conference_solution": [ + { + "on_create_function": "on_create_function_value", + "id": "id_value", + "name": "name_value", + "logo_url": "logo_url_value", + } + ], + "create_settings_url_function": "create_settings_url_function_value", + "event_open_trigger": {"run_function": "run_function_value"}, + "event_update_trigger": {}, + "current_event_access": 1, + }, + "docs": { + "homepage_trigger": {}, + "on_file_scope_granted_trigger": {"run_function": "run_function_value"}, + }, + "sheets": { + "homepage_trigger": {}, + "on_file_scope_granted_trigger": {"run_function": "run_function_value"}, + }, + "slides": { + "homepage_trigger": {}, + "on_file_scope_granted_trigger": {"run_function": "run_function_value"}, + }, + "http_options": {"authorization_header": 1}, + }, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.replace_deployment(request) + + +def test_replace_deployment_rest_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.Deployment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "deployment": {"name": "projects/sample1/deployments/sample2"} + } + + # get truthy value for each flattened field + mock_args = dict( + deployment=gsuiteaddons.Deployment(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gsuiteaddons.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.replace_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{deployment.name=projects/*/deployments/*}" % client.transport._host, + args[1], + ) + + +def test_replace_deployment_rest_flattened_error(transport: str = "rest"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.replace_deployment( + gsuiteaddons.ReplaceDeploymentRequest(), + deployment=gsuiteaddons.Deployment(name="name_value"), + ) + + +def test_replace_deployment_rest_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.GetDeploymentRequest, + dict, + ], +) +def test_get_deployment_rest(request_type): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/deployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.Deployment( + name="name_value", + oauth_scopes=["oauth_scopes_value"], + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gsuiteaddons.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.Deployment) + assert response.name == "name_value" + assert response.oauth_scopes == ["oauth_scopes_value"] + assert response.etag == "etag_value" + + +def test_get_deployment_rest_required_fields( + request_type=gsuiteaddons.GetDeploymentRequest, +): + transport_class = transports.GSuiteAddOnsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.Deployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gsuiteaddons.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_deployment_rest_unset_required_fields(): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deployment_rest_interceptors(null_interceptor): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GSuiteAddOnsRestInterceptor(), + ) + client = GSuiteAddOnsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "post_get_deployment" + ) as post, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "pre_get_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gsuiteaddons.GetDeploymentRequest.pb( + gsuiteaddons.GetDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gsuiteaddons.Deployment.to_json( + gsuiteaddons.Deployment() + ) + + request = gsuiteaddons.GetDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsuiteaddons.Deployment() + + client.get_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_deployment_rest_bad_request( + transport: str = "rest", request_type=gsuiteaddons.GetDeploymentRequest +): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/deployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_deployment(request) + + +def test_get_deployment_rest_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.Deployment() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/deployments/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gsuiteaddons.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/deployments/*}" % client.transport._host, args[1] + ) + + +def test_get_deployment_rest_flattened_error(transport: str = "rest"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deployment( + gsuiteaddons.GetDeploymentRequest(), + name="name_value", + ) + + +def test_get_deployment_rest_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.ListDeploymentsRequest, + dict, + ], +) +def test_list_deployments_rest(request_type): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.ListDeploymentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gsuiteaddons.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_deployments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_deployments_rest_required_fields( + request_type=gsuiteaddons.ListDeploymentsRequest, +): + transport_class = transports.GSuiteAddOnsRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.ListDeploymentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gsuiteaddons.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_deployments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_deployments_rest_unset_required_fields(): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_deployments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deployments_rest_interceptors(null_interceptor): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GSuiteAddOnsRestInterceptor(), + ) + client = GSuiteAddOnsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "post_list_deployments" + ) as post, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "pre_list_deployments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gsuiteaddons.ListDeploymentsRequest.pb( + gsuiteaddons.ListDeploymentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gsuiteaddons.ListDeploymentsResponse.to_json( + gsuiteaddons.ListDeploymentsResponse() + ) + + request = gsuiteaddons.ListDeploymentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsuiteaddons.ListDeploymentsResponse() + + client.list_deployments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_deployments_rest_bad_request( + transport: str = "rest", request_type=gsuiteaddons.ListDeploymentsRequest +): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_deployments(request) + + +def test_list_deployments_rest_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.ListDeploymentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gsuiteaddons.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_deployments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*}/deployments" % client.transport._host, args[1] + ) + + +def test_list_deployments_rest_flattened_error(transport: str = "rest"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deployments( + gsuiteaddons.ListDeploymentsRequest(), + parent="parent_value", + ) + + +def test_list_deployments_rest_pager(transport: str = "rest"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + ], + next_page_token="abc", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + ], + next_page_token="ghi", + ), + gsuiteaddons.ListDeploymentsResponse( + deployments=[ + gsuiteaddons.Deployment(), + gsuiteaddons.Deployment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + gsuiteaddons.ListDeploymentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.list_deployments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, gsuiteaddons.Deployment) for i in results) + + pages = list(client.list_deployments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.DeleteDeploymentRequest, + dict, + ], +) +def test_delete_deployment_rest(request_type): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/deployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_deployment(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deployment_rest_required_fields( + request_type=gsuiteaddons.DeleteDeploymentRequest, +): + transport_class = transports.GSuiteAddOnsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_deployment_rest_unset_required_fields(): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deployment_rest_interceptors(null_interceptor): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GSuiteAddOnsRestInterceptor(), + ) + client = GSuiteAddOnsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "pre_delete_deployment" + ) as pre: + pre.assert_not_called() + pb_message = gsuiteaddons.DeleteDeploymentRequest.pb( + gsuiteaddons.DeleteDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = gsuiteaddons.DeleteDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_deployment_rest_bad_request( + transport: str = "rest", request_type=gsuiteaddons.DeleteDeploymentRequest +): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/deployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_deployment(request) + + +def test_delete_deployment_rest_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/deployments/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/deployments/*}" % client.transport._host, args[1] + ) + + +def test_delete_deployment_rest_flattened_error(transport: str = "rest"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deployment( + gsuiteaddons.DeleteDeploymentRequest(), + name="name_value", + ) + + +def test_delete_deployment_rest_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.InstallDeploymentRequest, + dict, + ], +) +def test_install_deployment_rest(request_type): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/deployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.install_deployment(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_install_deployment_rest_required_fields( + request_type=gsuiteaddons.InstallDeploymentRequest, +): + transport_class = transports.GSuiteAddOnsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).install_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).install_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.install_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_install_deployment_rest_unset_required_fields(): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.install_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_install_deployment_rest_interceptors(null_interceptor): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GSuiteAddOnsRestInterceptor(), + ) + client = GSuiteAddOnsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "pre_install_deployment" + ) as pre: + pre.assert_not_called() + pb_message = gsuiteaddons.InstallDeploymentRequest.pb( + gsuiteaddons.InstallDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = gsuiteaddons.InstallDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.install_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_install_deployment_rest_bad_request( + transport: str = "rest", request_type=gsuiteaddons.InstallDeploymentRequest +): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/deployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.install_deployment(request) + + +def test_install_deployment_rest_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/deployments/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.install_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/deployments/*}:install" % client.transport._host, + args[1], + ) + + +def test_install_deployment_rest_flattened_error(transport: str = "rest"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.install_deployment( + gsuiteaddons.InstallDeploymentRequest(), + name="name_value", + ) + + +def test_install_deployment_rest_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.UninstallDeploymentRequest, + dict, + ], +) +def test_uninstall_deployment_rest(request_type): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/deployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.uninstall_deployment(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_uninstall_deployment_rest_required_fields( + request_type=gsuiteaddons.UninstallDeploymentRequest, +): + transport_class = transports.GSuiteAddOnsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).uninstall_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).uninstall_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.uninstall_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_uninstall_deployment_rest_unset_required_fields(): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.uninstall_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_uninstall_deployment_rest_interceptors(null_interceptor): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GSuiteAddOnsRestInterceptor(), + ) + client = GSuiteAddOnsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "pre_uninstall_deployment" + ) as pre: + pre.assert_not_called() + pb_message = gsuiteaddons.UninstallDeploymentRequest.pb( + gsuiteaddons.UninstallDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = gsuiteaddons.UninstallDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.uninstall_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_uninstall_deployment_rest_bad_request( + transport: str = "rest", request_type=gsuiteaddons.UninstallDeploymentRequest +): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/deployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.uninstall_deployment(request) + + +def test_uninstall_deployment_rest_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/deployments/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.uninstall_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/deployments/*}:uninstall" % client.transport._host, + args[1], + ) + + +def test_uninstall_deployment_rest_flattened_error(transport: str = "rest"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.uninstall_deployment( + gsuiteaddons.UninstallDeploymentRequest(), + name="name_value", + ) + + +def test_uninstall_deployment_rest_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gsuiteaddons.GetInstallStatusRequest, + dict, + ], +) +def test_get_install_status_rest(request_type): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/deployments/sample2/installStatus"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.InstallStatus( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gsuiteaddons.InstallStatus.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_install_status(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsuiteaddons.InstallStatus) + assert response.name == "name_value" + + +def test_get_install_status_rest_required_fields( + request_type=gsuiteaddons.GetInstallStatusRequest, +): + transport_class = transports.GSuiteAddOnsRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_install_status._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_install_status._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.InstallStatus() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gsuiteaddons.InstallStatus.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_install_status(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_install_status_rest_unset_required_fields(): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_install_status._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_install_status_rest_interceptors(null_interceptor): + transport = transports.GSuiteAddOnsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GSuiteAddOnsRestInterceptor(), + ) + client = GSuiteAddOnsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "post_get_install_status" + ) as post, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "pre_get_install_status" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gsuiteaddons.GetInstallStatusRequest.pb( + gsuiteaddons.GetInstallStatusRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gsuiteaddons.InstallStatus.to_json( + gsuiteaddons.InstallStatus() + ) + + request = gsuiteaddons.GetInstallStatusRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsuiteaddons.InstallStatus() + + client.get_install_status( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_install_status_rest_bad_request( + transport: str = "rest", request_type=gsuiteaddons.GetInstallStatusRequest +): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/deployments/sample2/installStatus"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_install_status(request) + + +def test_get_install_status_rest_flattened(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gsuiteaddons.InstallStatus() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/deployments/sample2/installStatus"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gsuiteaddons.InstallStatus.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_install_status(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/deployments/*/installStatus}" + % client.transport._host, + args[1], + ) + + +def test_get_install_status_rest_flattened_error(transport: str = "rest"): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_install_status( + gsuiteaddons.GetInstallStatusRequest(), + name="name_value", + ) + + +def test_get_install_status_rest_error(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GSuiteAddOnsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GSuiteAddOnsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GSuiteAddOnsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.GSuiteAddOnsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GSuiteAddOnsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GSuiteAddOnsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GSuiteAddOnsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GSuiteAddOnsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GSuiteAddOnsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GSuiteAddOnsClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.GSuiteAddOnsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.GSuiteAddOnsGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GSuiteAddOnsGrpcTransport, + transports.GSuiteAddOnsGrpcAsyncIOTransport, + transports.GSuiteAddOnsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = GSuiteAddOnsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.GSuiteAddOnsGrpcTransport, + ) + + +def test_g_suite_add_ons_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GSuiteAddOnsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_g_suite_add_ons_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.gsuiteaddons_v1.services.g_suite_add_ons.transports.GSuiteAddOnsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.GSuiteAddOnsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_authorization", + "create_deployment", + "replace_deployment", + "get_deployment", + "list_deployments", + "delete_deployment", + "install_deployment", + "uninstall_deployment", + "get_install_status", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_g_suite_add_ons_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.gsuiteaddons_v1.services.g_suite_add_ons.transports.GSuiteAddOnsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GSuiteAddOnsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_g_suite_add_ons_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.gsuiteaddons_v1.services.g_suite_add_ons.transports.GSuiteAddOnsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GSuiteAddOnsTransport() + adc.assert_called_once() + + +def test_g_suite_add_ons_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GSuiteAddOnsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GSuiteAddOnsGrpcTransport, + transports.GSuiteAddOnsGrpcAsyncIOTransport, + ], +) +def test_g_suite_add_ons_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GSuiteAddOnsGrpcTransport, + transports.GSuiteAddOnsGrpcAsyncIOTransport, + transports.GSuiteAddOnsRestTransport, + ], +) +def test_g_suite_add_ons_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.GSuiteAddOnsGrpcTransport, grpc_helpers), + (transports.GSuiteAddOnsGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_g_suite_add_ons_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "gsuiteaddons.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="gsuiteaddons.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.GSuiteAddOnsGrpcTransport, transports.GSuiteAddOnsGrpcAsyncIOTransport], +) +def test_g_suite_add_ons_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_g_suite_add_ons_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.GSuiteAddOnsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_g_suite_add_ons_host_no_port(transport_name): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="gsuiteaddons.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "gsuiteaddons.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://gsuiteaddons.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_g_suite_add_ons_host_with_port(transport_name): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="gsuiteaddons.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "gsuiteaddons.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://gsuiteaddons.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_g_suite_add_ons_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = GSuiteAddOnsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = GSuiteAddOnsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_authorization._session + session2 = client2.transport.get_authorization._session + assert session1 != session2 + session1 = client1.transport.create_deployment._session + session2 = client2.transport.create_deployment._session + assert session1 != session2 + session1 = client1.transport.replace_deployment._session + session2 = client2.transport.replace_deployment._session + assert session1 != session2 + session1 = client1.transport.get_deployment._session + session2 = client2.transport.get_deployment._session + assert session1 != session2 + session1 = client1.transport.list_deployments._session + session2 = client2.transport.list_deployments._session + assert session1 != session2 + session1 = client1.transport.delete_deployment._session + session2 = client2.transport.delete_deployment._session + assert session1 != session2 + session1 = client1.transport.install_deployment._session + session2 = client2.transport.install_deployment._session + assert session1 != session2 + session1 = client1.transport.uninstall_deployment._session + session2 = client2.transport.uninstall_deployment._session + assert session1 != session2 + session1 = client1.transport.get_install_status._session + session2 = client2.transport.get_install_status._session + assert session1 != session2 + + +def test_g_suite_add_ons_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GSuiteAddOnsGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_g_suite_add_ons_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GSuiteAddOnsGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.GSuiteAddOnsGrpcTransport, transports.GSuiteAddOnsGrpcAsyncIOTransport], +) +def test_g_suite_add_ons_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.GSuiteAddOnsGrpcTransport, transports.GSuiteAddOnsGrpcAsyncIOTransport], +) +def test_g_suite_add_ons_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_authorization_path(): + project = "squid" + expected = "projects/{project}/authorization".format( + project=project, + ) + actual = GSuiteAddOnsClient.authorization_path(project) + assert expected == actual + + +def test_parse_authorization_path(): + expected = { + "project": "clam", + } + path = GSuiteAddOnsClient.authorization_path(**expected) + + # Check that the path construction is reversible. + actual = GSuiteAddOnsClient.parse_authorization_path(path) + assert expected == actual + + +def test_deployment_path(): + project = "whelk" + deployment = "octopus" + expected = "projects/{project}/deployments/{deployment}".format( + project=project, + deployment=deployment, + ) + actual = GSuiteAddOnsClient.deployment_path(project, deployment) + assert expected == actual + + +def test_parse_deployment_path(): + expected = { + "project": "oyster", + "deployment": "nudibranch", + } + path = GSuiteAddOnsClient.deployment_path(**expected) + + # Check that the path construction is reversible. + actual = GSuiteAddOnsClient.parse_deployment_path(path) + assert expected == actual + + +def test_install_status_path(): + project = "cuttlefish" + deployment = "mussel" + expected = "projects/{project}/deployments/{deployment}/installStatus".format( + project=project, + deployment=deployment, + ) + actual = GSuiteAddOnsClient.install_status_path(project, deployment) + assert expected == actual + + +def test_parse_install_status_path(): + expected = { + "project": "winkle", + "deployment": "nautilus", + } + path = GSuiteAddOnsClient.install_status_path(**expected) + + # Check that the path construction is reversible. + actual = GSuiteAddOnsClient.parse_install_status_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = GSuiteAddOnsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = GSuiteAddOnsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GSuiteAddOnsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = GSuiteAddOnsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = GSuiteAddOnsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GSuiteAddOnsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = GSuiteAddOnsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = GSuiteAddOnsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GSuiteAddOnsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = GSuiteAddOnsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = GSuiteAddOnsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GSuiteAddOnsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = GSuiteAddOnsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = GSuiteAddOnsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GSuiteAddOnsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.GSuiteAddOnsTransport, "_prep_wrapped_messages" + ) as prep: + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.GSuiteAddOnsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = GSuiteAddOnsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = GSuiteAddOnsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = GSuiteAddOnsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (GSuiteAddOnsClient, transports.GSuiteAddOnsGrpcTransport), + (GSuiteAddOnsAsyncClient, transports.GSuiteAddOnsGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-network-services/CONTRIBUTING.rst b/packages/google-cloud-network-services/CONTRIBUTING.rst index 5eee180b2a9a..3c96426dbcfb 100644 --- a/packages/google-cloud-network-services/CONTRIBUTING.rst +++ b/packages/google-cloud-network-services/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system- -- -k + $ nox -s system-3.11 -- -k .. note:: - System tests are only configured to run under Python. + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-network-services/google/cloud/network_services/gapic_version.py b/packages/google-cloud-network-services/google/cloud/network_services/gapic_version.py index d6d3182d3f2a..360a0d13ebdd 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services/gapic_version.py +++ b/packages/google-cloud-network-services/google/cloud/network_services/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_version.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_version.py index d6d3182d3f2a..360a0d13ebdd 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_version.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-network-services/noxfile.py b/packages/google-cloud-network-services/noxfile.py index 9a2acd8b6787..be54712bfa8f 100644 --- a/packages/google-cloud-network-services/noxfile.py +++ b/packages/google-cloud-network-services/noxfile.py @@ -46,7 +46,7 @@ UNIT_TEST_EXTRAS = [] UNIT_TEST_EXTRAS_BY_PYTHON = {} -SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -405,24 +405,3 @@ def prerelease_deps(session): session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_endpoint_policy_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_endpoint_policy_async.py new file mode 100644 index 000000000000..c84910a50adc --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_endpoint_policy_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEndpointPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateEndpointPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_create_endpoint_policy(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + endpoint_policy = network_services_v1.EndpointPolicy() + endpoint_policy.name = "name_value" + endpoint_policy.type_ = "GRPC_SERVER" + + request = network_services_v1.CreateEndpointPolicyRequest( + parent="parent_value", + endpoint_policy_id="endpoint_policy_id_value", + endpoint_policy=endpoint_policy, + ) + + # Make the request + operation = client.create_endpoint_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateEndpointPolicy_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_endpoint_policy_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_endpoint_policy_sync.py new file mode 100644 index 000000000000..86a24875fcd1 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_endpoint_policy_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEndpointPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateEndpointPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_create_endpoint_policy(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + endpoint_policy = network_services_v1.EndpointPolicy() + endpoint_policy.name = "name_value" + endpoint_policy.type_ = "GRPC_SERVER" + + request = network_services_v1.CreateEndpointPolicyRequest( + parent="parent_value", + endpoint_policy_id="endpoint_policy_id_value", + endpoint_policy=endpoint_policy, + ) + + # Make the request + operation = client.create_endpoint_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateEndpointPolicy_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_gateway_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_gateway_async.py new file mode 100644 index 000000000000..bede0a5f7f58 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_gateway_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateGateway_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_create_gateway(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + gateway = network_services_v1.Gateway() + gateway.name = "name_value" + gateway.ports = [569, 570] + gateway.scope = "scope_value" + + request = network_services_v1.CreateGatewayRequest( + parent="parent_value", + gateway_id="gateway_id_value", + gateway=gateway, + ) + + # Make the request + operation = client.create_gateway(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateGateway_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_gateway_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_gateway_sync.py new file mode 100644 index 000000000000..78926d84840a --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_gateway_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateGateway_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_create_gateway(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + gateway = network_services_v1.Gateway() + gateway.name = "name_value" + gateway.ports = [569, 570] + gateway.scope = "scope_value" + + request = network_services_v1.CreateGatewayRequest( + parent="parent_value", + gateway_id="gateway_id_value", + gateway=gateway, + ) + + # Make the request + operation = client.create_gateway(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateGateway_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_grpc_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_grpc_route_async.py new file mode 100644 index 000000000000..021c8a4e3eaa --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_grpc_route_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGrpcRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateGrpcRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_create_grpc_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + grpc_route = network_services_v1.GrpcRoute() + grpc_route.name = "name_value" + grpc_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + + request = network_services_v1.CreateGrpcRouteRequest( + parent="parent_value", + grpc_route_id="grpc_route_id_value", + grpc_route=grpc_route, + ) + + # Make the request + operation = client.create_grpc_route(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateGrpcRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_grpc_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_grpc_route_sync.py new file mode 100644 index 000000000000..c7db8405e7a1 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_grpc_route_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGrpcRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateGrpcRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_create_grpc_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + grpc_route = network_services_v1.GrpcRoute() + grpc_route.name = "name_value" + grpc_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + + request = network_services_v1.CreateGrpcRouteRequest( + parent="parent_value", + grpc_route_id="grpc_route_id_value", + grpc_route=grpc_route, + ) + + # Make the request + operation = client.create_grpc_route(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateGrpcRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_http_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_http_route_async.py new file mode 100644 index 000000000000..1cf2b0fdbdaa --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_http_route_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateHttpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateHttpRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_create_http_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + http_route = network_services_v1.HttpRoute() + http_route.name = "name_value" + http_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + + request = network_services_v1.CreateHttpRouteRequest( + parent="parent_value", + http_route_id="http_route_id_value", + http_route=http_route, + ) + + # Make the request + operation = client.create_http_route(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateHttpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_http_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_http_route_sync.py new file mode 100644 index 000000000000..2682fb1f6ead --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_http_route_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateHttpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateHttpRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_create_http_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + http_route = network_services_v1.HttpRoute() + http_route.name = "name_value" + http_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + + request = network_services_v1.CreateHttpRouteRequest( + parent="parent_value", + http_route_id="http_route_id_value", + http_route=http_route, + ) + + # Make the request + operation = client.create_http_route(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateHttpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_mesh_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_mesh_async.py new file mode 100644 index 000000000000..b384749a14bd --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_mesh_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMesh +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateMesh_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_create_mesh(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + mesh = network_services_v1.Mesh() + mesh.name = "name_value" + + request = network_services_v1.CreateMeshRequest( + parent="parent_value", + mesh_id="mesh_id_value", + mesh=mesh, + ) + + # Make the request + operation = client.create_mesh(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateMesh_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_mesh_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_mesh_sync.py new file mode 100644 index 000000000000..5b1935ddb376 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_mesh_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMesh +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateMesh_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_create_mesh(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + mesh = network_services_v1.Mesh() + mesh.name = "name_value" + + request = network_services_v1.CreateMeshRequest( + parent="parent_value", + mesh_id="mesh_id_value", + mesh=mesh, + ) + + # Make the request + operation = client.create_mesh(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateMesh_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_binding_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_binding_async.py new file mode 100644 index 000000000000..1b68cbe7e550 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_binding_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateServiceBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateServiceBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_create_service_binding(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + service_binding = network_services_v1.ServiceBinding() + service_binding.name = "name_value" + service_binding.service = "service_value" + + request = network_services_v1.CreateServiceBindingRequest( + parent="parent_value", + service_binding_id="service_binding_id_value", + service_binding=service_binding, + ) + + # Make the request + operation = client.create_service_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateServiceBinding_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_binding_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_binding_sync.py new file mode 100644 index 000000000000..f18e2acc3671 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_binding_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateServiceBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateServiceBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_create_service_binding(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + service_binding = network_services_v1.ServiceBinding() + service_binding.name = "name_value" + service_binding.service = "service_value" + + request = network_services_v1.CreateServiceBindingRequest( + parent="parent_value", + service_binding_id="service_binding_id_value", + service_binding=service_binding, + ) + + # Make the request + operation = client.create_service_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateServiceBinding_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tcp_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tcp_route_async.py new file mode 100644 index 000000000000..72d0731b239a --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tcp_route_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTcpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateTcpRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_create_tcp_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + tcp_route = network_services_v1.TcpRoute() + tcp_route.name = "name_value" + + request = network_services_v1.CreateTcpRouteRequest( + parent="parent_value", + tcp_route_id="tcp_route_id_value", + tcp_route=tcp_route, + ) + + # Make the request + operation = client.create_tcp_route(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateTcpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tcp_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tcp_route_sync.py new file mode 100644 index 000000000000..380d54ec92e9 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tcp_route_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTcpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateTcpRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_create_tcp_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + tcp_route = network_services_v1.TcpRoute() + tcp_route.name = "name_value" + + request = network_services_v1.CreateTcpRouteRequest( + parent="parent_value", + tcp_route_id="tcp_route_id_value", + tcp_route=tcp_route, + ) + + # Make the request + operation = client.create_tcp_route(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateTcpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tls_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tls_route_async.py new file mode 100644 index 000000000000..00eb32008c7d --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tls_route_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTlsRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateTlsRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_create_tls_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + tls_route = network_services_v1.TlsRoute() + tls_route.name = "name_value" + tls_route.rules.action.destinations.service_name = "service_name_value" + + request = network_services_v1.CreateTlsRouteRequest( + parent="parent_value", + tls_route_id="tls_route_id_value", + tls_route=tls_route, + ) + + # Make the request + operation = client.create_tls_route(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateTlsRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tls_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tls_route_sync.py new file mode 100644 index 000000000000..faeb086512cf --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tls_route_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTlsRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_CreateTlsRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_create_tls_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + tls_route = network_services_v1.TlsRoute() + tls_route.name = "name_value" + tls_route.rules.action.destinations.service_name = "service_name_value" + + request = network_services_v1.CreateTlsRouteRequest( + parent="parent_value", + tls_route_id="tls_route_id_value", + tls_route=tls_route, + ) + + # Make the request + operation = client.create_tls_route(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_CreateTlsRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_endpoint_policy_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_endpoint_policy_async.py new file mode 100644 index 000000000000..8cd7209ac9b4 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_endpoint_policy_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEndpointPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteEndpointPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_delete_endpoint_policy(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteEndpointPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_endpoint_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteEndpointPolicy_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_endpoint_policy_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_endpoint_policy_sync.py new file mode 100644 index 000000000000..a3251b40fb7e --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_endpoint_policy_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEndpointPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteEndpointPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_delete_endpoint_policy(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteEndpointPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_endpoint_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteEndpointPolicy_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_gateway_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_gateway_async.py new file mode 100644 index 000000000000..eb556e75bcbb --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_gateway_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteGateway_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_delete_gateway(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteGatewayRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_gateway(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteGateway_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_gateway_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_gateway_sync.py new file mode 100644 index 000000000000..b26ccdf06dca --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_gateway_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteGateway_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_delete_gateway(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteGatewayRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_gateway(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteGateway_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_grpc_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_grpc_route_async.py new file mode 100644 index 000000000000..80bd6fada3b7 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_grpc_route_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGrpcRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteGrpcRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_delete_grpc_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteGrpcRouteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_grpc_route(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteGrpcRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_grpc_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_grpc_route_sync.py new file mode 100644 index 000000000000..c23ca2b54df3 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_grpc_route_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGrpcRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteGrpcRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_delete_grpc_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteGrpcRouteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_grpc_route(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteGrpcRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_http_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_http_route_async.py new file mode 100644 index 000000000000..278f46aeb939 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_http_route_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteHttpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteHttpRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_delete_http_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteHttpRouteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_http_route(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteHttpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_http_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_http_route_sync.py new file mode 100644 index 000000000000..1fc1d7eac1f3 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_http_route_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteHttpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteHttpRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_delete_http_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteHttpRouteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_http_route(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteHttpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_mesh_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_mesh_async.py new file mode 100644 index 000000000000..ae2a8d4f61a2 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_mesh_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMesh +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteMesh_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_delete_mesh(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteMeshRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mesh(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteMesh_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_mesh_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_mesh_sync.py new file mode 100644 index 000000000000..bb60b81942ee --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_mesh_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMesh +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteMesh_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_delete_mesh(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteMeshRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_mesh(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteMesh_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_binding_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_binding_async.py new file mode 100644 index 000000000000..324d618dc907 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_binding_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteServiceBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteServiceBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_delete_service_binding(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteServiceBindingRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_service_binding(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteServiceBinding_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_binding_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_binding_sync.py new file mode 100644 index 000000000000..8af6fddaf4ba --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_binding_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteServiceBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteServiceBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_delete_service_binding(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteServiceBindingRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_service_binding(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteServiceBinding_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tcp_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tcp_route_async.py new file mode 100644 index 000000000000..36081629b4d4 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tcp_route_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTcpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteTcpRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_delete_tcp_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteTcpRouteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_tcp_route(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteTcpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tcp_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tcp_route_sync.py new file mode 100644 index 000000000000..492e5996cc58 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tcp_route_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTcpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteTcpRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_delete_tcp_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteTcpRouteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_tcp_route(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteTcpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tls_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tls_route_async.py new file mode 100644 index 000000000000..30abb6466e68 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tls_route_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTlsRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteTlsRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_delete_tls_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteTlsRouteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_tls_route(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteTlsRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tls_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tls_route_sync.py new file mode 100644 index 000000000000..abece3f2466b --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tls_route_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTlsRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_DeleteTlsRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_delete_tls_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteTlsRouteRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_tls_route(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_DeleteTlsRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_endpoint_policy_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_endpoint_policy_async.py new file mode 100644 index 000000000000..7fbca33e501d --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_endpoint_policy_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEndpointPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetEndpointPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_get_endpoint_policy(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.GetEndpointPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_endpoint_policy(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetEndpointPolicy_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_endpoint_policy_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_endpoint_policy_sync.py new file mode 100644 index 000000000000..efd6e6a83503 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_endpoint_policy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEndpointPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetEndpointPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_get_endpoint_policy(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.GetEndpointPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_endpoint_policy(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetEndpointPolicy_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_async.py new file mode 100644 index 000000000000..37cd04773678 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetGateway_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_get_gateway(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.GetGatewayRequest( + name="name_value", + ) + + # Make the request + response = await client.get_gateway(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetGateway_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_sync.py new file mode 100644 index 000000000000..4e83fb7ac6fe --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetGateway_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_get_gateway(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.GetGatewayRequest( + name="name_value", + ) + + # Make the request + response = client.get_gateway(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetGateway_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_grpc_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_grpc_route_async.py new file mode 100644 index 000000000000..389ee3448144 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_grpc_route_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGrpcRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetGrpcRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_get_grpc_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.GetGrpcRouteRequest( + name="name_value", + ) + + # Make the request + response = await client.get_grpc_route(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetGrpcRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_grpc_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_grpc_route_sync.py new file mode 100644 index 000000000000..67528a958309 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_grpc_route_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGrpcRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetGrpcRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_get_grpc_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.GetGrpcRouteRequest( + name="name_value", + ) + + # Make the request + response = client.get_grpc_route(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetGrpcRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_http_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_http_route_async.py new file mode 100644 index 000000000000..792978c3f294 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_http_route_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHttpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetHttpRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_get_http_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.GetHttpRouteRequest( + name="name_value", + ) + + # Make the request + response = await client.get_http_route(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetHttpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_http_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_http_route_sync.py new file mode 100644 index 000000000000..c14cbb39b933 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_http_route_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHttpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetHttpRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_get_http_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.GetHttpRouteRequest( + name="name_value", + ) + + # Make the request + response = client.get_http_route(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetHttpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_async.py new file mode 100644 index 000000000000..495e73a4d909 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMesh +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetMesh_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_get_mesh(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.GetMeshRequest( + name="name_value", + ) + + # Make the request + response = await client.get_mesh(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetMesh_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_sync.py new file mode 100644 index 000000000000..f21e69f6f215 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMesh +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetMesh_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_get_mesh(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.GetMeshRequest( + name="name_value", + ) + + # Make the request + response = client.get_mesh(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetMesh_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_binding_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_binding_async.py new file mode 100644 index 000000000000..15689df099d7 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_binding_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetServiceBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetServiceBinding_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_get_service_binding(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.GetServiceBindingRequest( + name="name_value", + ) + + # Make the request + response = await client.get_service_binding(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetServiceBinding_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_binding_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_binding_sync.py new file mode 100644 index 000000000000..5ea1437a1e9e --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_binding_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetServiceBinding +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetServiceBinding_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_get_service_binding(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.GetServiceBindingRequest( + name="name_value", + ) + + # Make the request + response = client.get_service_binding(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetServiceBinding_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tcp_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tcp_route_async.py new file mode 100644 index 000000000000..a766b72cc494 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tcp_route_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTcpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetTcpRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_get_tcp_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.GetTcpRouteRequest( + name="name_value", + ) + + # Make the request + response = await client.get_tcp_route(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetTcpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tcp_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tcp_route_sync.py new file mode 100644 index 000000000000..f52090423a78 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tcp_route_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTcpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetTcpRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_get_tcp_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.GetTcpRouteRequest( + name="name_value", + ) + + # Make the request + response = client.get_tcp_route(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetTcpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tls_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tls_route_async.py new file mode 100644 index 000000000000..17c4f3dc531a --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tls_route_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTlsRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetTlsRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_get_tls_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.GetTlsRouteRequest( + name="name_value", + ) + + # Make the request + response = await client.get_tls_route(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetTlsRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tls_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tls_route_sync.py new file mode 100644 index 000000000000..9a54a67de677 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tls_route_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTlsRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_GetTlsRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_get_tls_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.GetTlsRouteRequest( + name="name_value", + ) + + # Make the request + response = client.get_tls_route(request=request) + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_GetTlsRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_endpoint_policies_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_endpoint_policies_async.py new file mode 100644 index 000000000000..801988b1b725 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_endpoint_policies_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEndpointPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListEndpointPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_list_endpoint_policies(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.ListEndpointPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_endpoint_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListEndpointPolicies_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_endpoint_policies_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_endpoint_policies_sync.py new file mode 100644 index 000000000000..1c43d3fdbd0c --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_endpoint_policies_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEndpointPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListEndpointPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_list_endpoint_policies(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.ListEndpointPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_endpoint_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListEndpointPolicies_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateways_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateways_async.py new file mode 100644 index 000000000000..f030cfcc8be2 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateways_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGateways +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListGateways_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_list_gateways(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.ListGatewaysRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gateways(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListGateways_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateways_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateways_sync.py new file mode 100644 index 000000000000..f531206a7008 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateways_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGateways +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListGateways_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_list_gateways(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.ListGatewaysRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_gateways(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListGateways_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_grpc_routes_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_grpc_routes_async.py new file mode 100644 index 000000000000..9b026fd7803c --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_grpc_routes_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGrpcRoutes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListGrpcRoutes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_list_grpc_routes(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.ListGrpcRoutesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_grpc_routes(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListGrpcRoutes_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_grpc_routes_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_grpc_routes_sync.py new file mode 100644 index 000000000000..df2ac6101587 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_grpc_routes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGrpcRoutes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListGrpcRoutes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_list_grpc_routes(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.ListGrpcRoutesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_grpc_routes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListGrpcRoutes_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_http_routes_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_http_routes_async.py new file mode 100644 index 000000000000..1151a8a741ee --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_http_routes_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListHttpRoutes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListHttpRoutes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_list_http_routes(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.ListHttpRoutesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_http_routes(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListHttpRoutes_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_http_routes_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_http_routes_sync.py new file mode 100644 index 000000000000..6e8c8af453ae --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_http_routes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListHttpRoutes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListHttpRoutes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_list_http_routes(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.ListHttpRoutesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_http_routes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListHttpRoutes_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_meshes_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_meshes_async.py new file mode 100644 index 000000000000..c520e99c1ce2 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_meshes_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMeshes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListMeshes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_list_meshes(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.ListMeshesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_meshes(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListMeshes_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_meshes_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_meshes_sync.py new file mode 100644 index 000000000000..13e2d7a21566 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_meshes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMeshes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListMeshes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_list_meshes(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.ListMeshesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_meshes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListMeshes_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_bindings_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_bindings_async.py new file mode 100644 index 000000000000..f509b60fec75 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_bindings_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListServiceBindings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListServiceBindings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_list_service_bindings(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.ListServiceBindingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_service_bindings(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListServiceBindings_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_bindings_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_bindings_sync.py new file mode 100644 index 000000000000..ceefaf4c2223 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_bindings_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListServiceBindings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListServiceBindings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_list_service_bindings(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.ListServiceBindingsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_service_bindings(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListServiceBindings_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tcp_routes_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tcp_routes_async.py new file mode 100644 index 000000000000..cfd339f70965 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tcp_routes_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTcpRoutes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListTcpRoutes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_list_tcp_routes(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.ListTcpRoutesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tcp_routes(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListTcpRoutes_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tcp_routes_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tcp_routes_sync.py new file mode 100644 index 000000000000..bdc7fc211afc --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tcp_routes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTcpRoutes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListTcpRoutes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_list_tcp_routes(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.ListTcpRoutesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tcp_routes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListTcpRoutes_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tls_routes_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tls_routes_async.py new file mode 100644 index 000000000000..60c447c897d8 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tls_routes_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTlsRoutes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListTlsRoutes_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_list_tls_routes(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.ListTlsRoutesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tls_routes(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListTlsRoutes_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tls_routes_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tls_routes_sync.py new file mode 100644 index 000000000000..7bd069323dcf --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tls_routes_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTlsRoutes +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_ListTlsRoutes_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_list_tls_routes(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + request = network_services_v1.ListTlsRoutesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tls_routes(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END networkservices_v1_generated_NetworkServices_ListTlsRoutes_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_endpoint_policy_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_endpoint_policy_async.py new file mode 100644 index 000000000000..50c6bbe8c692 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_endpoint_policy_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEndpointPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_UpdateEndpointPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_update_endpoint_policy(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + endpoint_policy = network_services_v1.EndpointPolicy() + endpoint_policy.name = "name_value" + endpoint_policy.type_ = "GRPC_SERVER" + + request = network_services_v1.UpdateEndpointPolicyRequest( + endpoint_policy=endpoint_policy, + ) + + # Make the request + operation = client.update_endpoint_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_UpdateEndpointPolicy_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_endpoint_policy_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_endpoint_policy_sync.py new file mode 100644 index 000000000000..235278c9dd2a --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_endpoint_policy_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEndpointPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_UpdateEndpointPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_update_endpoint_policy(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + endpoint_policy = network_services_v1.EndpointPolicy() + endpoint_policy.name = "name_value" + endpoint_policy.type_ = "GRPC_SERVER" + + request = network_services_v1.UpdateEndpointPolicyRequest( + endpoint_policy=endpoint_policy, + ) + + # Make the request + operation = client.update_endpoint_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_UpdateEndpointPolicy_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_gateway_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_gateway_async.py new file mode 100644 index 000000000000..63867e7c4e38 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_gateway_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_UpdateGateway_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_update_gateway(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + gateway = network_services_v1.Gateway() + gateway.name = "name_value" + gateway.ports = [569, 570] + gateway.scope = "scope_value" + + request = network_services_v1.UpdateGatewayRequest( + gateway=gateway, + ) + + # Make the request + operation = client.update_gateway(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_UpdateGateway_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_gateway_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_gateway_sync.py new file mode 100644 index 000000000000..f6fd47d3d0a9 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_gateway_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGateway +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_UpdateGateway_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_update_gateway(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + gateway = network_services_v1.Gateway() + gateway.name = "name_value" + gateway.ports = [569, 570] + gateway.scope = "scope_value" + + request = network_services_v1.UpdateGatewayRequest( + gateway=gateway, + ) + + # Make the request + operation = client.update_gateway(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_UpdateGateway_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_grpc_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_grpc_route_async.py new file mode 100644 index 000000000000..c35ba12065b4 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_grpc_route_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGrpcRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_UpdateGrpcRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_update_grpc_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + grpc_route = network_services_v1.GrpcRoute() + grpc_route.name = "name_value" + grpc_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + + request = network_services_v1.UpdateGrpcRouteRequest( + grpc_route=grpc_route, + ) + + # Make the request + operation = client.update_grpc_route(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_UpdateGrpcRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_grpc_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_grpc_route_sync.py new file mode 100644 index 000000000000..9511063269a3 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_grpc_route_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGrpcRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_UpdateGrpcRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_update_grpc_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + grpc_route = network_services_v1.GrpcRoute() + grpc_route.name = "name_value" + grpc_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + + request = network_services_v1.UpdateGrpcRouteRequest( + grpc_route=grpc_route, + ) + + # Make the request + operation = client.update_grpc_route(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_UpdateGrpcRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_http_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_http_route_async.py new file mode 100644 index 000000000000..43a5f301f729 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_http_route_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateHttpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_UpdateHttpRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_update_http_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + http_route = network_services_v1.HttpRoute() + http_route.name = "name_value" + http_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + + request = network_services_v1.UpdateHttpRouteRequest( + http_route=http_route, + ) + + # Make the request + operation = client.update_http_route(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_UpdateHttpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_http_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_http_route_sync.py new file mode 100644 index 000000000000..cbedb591897b --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_http_route_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateHttpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_UpdateHttpRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_update_http_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + http_route = network_services_v1.HttpRoute() + http_route.name = "name_value" + http_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + + request = network_services_v1.UpdateHttpRouteRequest( + http_route=http_route, + ) + + # Make the request + operation = client.update_http_route(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_UpdateHttpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_mesh_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_mesh_async.py new file mode 100644 index 000000000000..846c73a14d24 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_mesh_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMesh +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_UpdateMesh_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_update_mesh(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + mesh = network_services_v1.Mesh() + mesh.name = "name_value" + + request = network_services_v1.UpdateMeshRequest( + mesh=mesh, + ) + + # Make the request + operation = client.update_mesh(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_UpdateMesh_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_mesh_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_mesh_sync.py new file mode 100644 index 000000000000..e04df313206d --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_mesh_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateMesh +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_UpdateMesh_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_update_mesh(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + mesh = network_services_v1.Mesh() + mesh.name = "name_value" + + request = network_services_v1.UpdateMeshRequest( + mesh=mesh, + ) + + # Make the request + operation = client.update_mesh(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_UpdateMesh_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tcp_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tcp_route_async.py new file mode 100644 index 000000000000..37235093dfd3 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tcp_route_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTcpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_UpdateTcpRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_update_tcp_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + tcp_route = network_services_v1.TcpRoute() + tcp_route.name = "name_value" + + request = network_services_v1.UpdateTcpRouteRequest( + tcp_route=tcp_route, + ) + + # Make the request + operation = client.update_tcp_route(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_UpdateTcpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tcp_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tcp_route_sync.py new file mode 100644 index 000000000000..682b4a7f1839 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tcp_route_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTcpRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_UpdateTcpRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_update_tcp_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + tcp_route = network_services_v1.TcpRoute() + tcp_route.name = "name_value" + + request = network_services_v1.UpdateTcpRouteRequest( + tcp_route=tcp_route, + ) + + # Make the request + operation = client.update_tcp_route(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_UpdateTcpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tls_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tls_route_async.py new file mode 100644 index 000000000000..21e61aa06cab --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tls_route_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTlsRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_UpdateTlsRoute_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_update_tls_route(): + # Create a client + client = network_services_v1.NetworkServicesAsyncClient() + + # Initialize request argument(s) + tls_route = network_services_v1.TlsRoute() + tls_route.name = "name_value" + tls_route.rules.action.destinations.service_name = "service_name_value" + + request = network_services_v1.UpdateTlsRouteRequest( + tls_route=tls_route, + ) + + # Make the request + operation = client.update_tls_route(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_UpdateTlsRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tls_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tls_route_sync.py new file mode 100644 index 000000000000..5f01bf036d6a --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tls_route_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTlsRoute +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_NetworkServices_UpdateTlsRoute_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_update_tls_route(): + # Create a client + client = network_services_v1.NetworkServicesClient() + + # Initialize request argument(s) + tls_route = network_services_v1.TlsRoute() + tls_route.name = "name_value" + tls_route.rules.action.destinations.service_name = "service_name_value" + + request = network_services_v1.UpdateTlsRouteRequest( + tls_route=tls_route, + ) + + # Make the request + operation = client.update_tls_route(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END networkservices_v1_generated_NetworkServices_UpdateTlsRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json b/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json new file mode 100644 index 000000000000..e3ddce6ac243 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json @@ -0,0 +1,6478 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.networkservices.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-network-services", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.create_endpoint_policy", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateEndpointPolicy", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateEndpointPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateEndpointPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "endpoint_policy", + "type": "google.cloud.network_services_v1.types.EndpointPolicy" + }, + { + "name": "endpoint_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_endpoint_policy" + }, + "description": "Sample for CreateEndpointPolicy", + "file": "networkservices_v1_generated_network_services_create_endpoint_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateEndpointPolicy_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_endpoint_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.create_endpoint_policy", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateEndpointPolicy", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateEndpointPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateEndpointPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "endpoint_policy", + "type": "google.cloud.network_services_v1.types.EndpointPolicy" + }, + { + "name": "endpoint_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_endpoint_policy" + }, + "description": "Sample for CreateEndpointPolicy", + "file": "networkservices_v1_generated_network_services_create_endpoint_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateEndpointPolicy_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_endpoint_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.create_gateway", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateGateway", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateGatewayRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "gateway", + "type": "google.cloud.network_services_v1.types.Gateway" + }, + { + "name": "gateway_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_gateway" + }, + "description": "Sample for CreateGateway", + "file": "networkservices_v1_generated_network_services_create_gateway_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateGateway_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_gateway_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.create_gateway", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateGateway", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateGatewayRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "gateway", + "type": "google.cloud.network_services_v1.types.Gateway" + }, + { + "name": "gateway_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_gateway" + }, + "description": "Sample for CreateGateway", + "file": "networkservices_v1_generated_network_services_create_gateway_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateGateway_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_gateway_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.create_grpc_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateGrpcRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateGrpcRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateGrpcRouteRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "grpc_route", + "type": "google.cloud.network_services_v1.types.GrpcRoute" + }, + { + "name": "grpc_route_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_grpc_route" + }, + "description": "Sample for CreateGrpcRoute", + "file": "networkservices_v1_generated_network_services_create_grpc_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateGrpcRoute_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_grpc_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.create_grpc_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateGrpcRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateGrpcRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateGrpcRouteRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "grpc_route", + "type": "google.cloud.network_services_v1.types.GrpcRoute" + }, + { + "name": "grpc_route_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_grpc_route" + }, + "description": "Sample for CreateGrpcRoute", + "file": "networkservices_v1_generated_network_services_create_grpc_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateGrpcRoute_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_grpc_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.create_http_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateHttpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateHttpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateHttpRouteRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "http_route", + "type": "google.cloud.network_services_v1.types.HttpRoute" + }, + { + "name": "http_route_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_http_route" + }, + "description": "Sample for CreateHttpRoute", + "file": "networkservices_v1_generated_network_services_create_http_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateHttpRoute_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_http_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.create_http_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateHttpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateHttpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateHttpRouteRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "http_route", + "type": "google.cloud.network_services_v1.types.HttpRoute" + }, + { + "name": "http_route_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_http_route" + }, + "description": "Sample for CreateHttpRoute", + "file": "networkservices_v1_generated_network_services_create_http_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateHttpRoute_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_http_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.create_mesh", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateMesh", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateMesh" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateMeshRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "mesh", + "type": "google.cloud.network_services_v1.types.Mesh" + }, + { + "name": "mesh_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_mesh" + }, + "description": "Sample for CreateMesh", + "file": "networkservices_v1_generated_network_services_create_mesh_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateMesh_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_mesh_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.create_mesh", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateMesh", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateMesh" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateMeshRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "mesh", + "type": "google.cloud.network_services_v1.types.Mesh" + }, + { + "name": "mesh_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_mesh" + }, + "description": "Sample for CreateMesh", + "file": "networkservices_v1_generated_network_services_create_mesh_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateMesh_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_mesh_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.create_service_binding", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateServiceBinding", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateServiceBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateServiceBindingRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "service_binding", + "type": "google.cloud.network_services_v1.types.ServiceBinding" + }, + { + "name": "service_binding_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_service_binding" + }, + "description": "Sample for CreateServiceBinding", + "file": "networkservices_v1_generated_network_services_create_service_binding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateServiceBinding_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_service_binding_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.create_service_binding", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateServiceBinding", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateServiceBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateServiceBindingRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "service_binding", + "type": "google.cloud.network_services_v1.types.ServiceBinding" + }, + { + "name": "service_binding_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_service_binding" + }, + "description": "Sample for CreateServiceBinding", + "file": "networkservices_v1_generated_network_services_create_service_binding_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateServiceBinding_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_service_binding_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.create_tcp_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateTcpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateTcpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateTcpRouteRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tcp_route", + "type": "google.cloud.network_services_v1.types.TcpRoute" + }, + { + "name": "tcp_route_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_tcp_route" + }, + "description": "Sample for CreateTcpRoute", + "file": "networkservices_v1_generated_network_services_create_tcp_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateTcpRoute_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_tcp_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.create_tcp_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateTcpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateTcpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateTcpRouteRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tcp_route", + "type": "google.cloud.network_services_v1.types.TcpRoute" + }, + { + "name": "tcp_route_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_tcp_route" + }, + "description": "Sample for CreateTcpRoute", + "file": "networkservices_v1_generated_network_services_create_tcp_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateTcpRoute_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_tcp_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.create_tls_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateTlsRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateTlsRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateTlsRouteRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tls_route", + "type": "google.cloud.network_services_v1.types.TlsRoute" + }, + { + "name": "tls_route_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_tls_route" + }, + "description": "Sample for CreateTlsRoute", + "file": "networkservices_v1_generated_network_services_create_tls_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateTlsRoute_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_tls_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.create_tls_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.CreateTlsRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "CreateTlsRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateTlsRouteRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tls_route", + "type": "google.cloud.network_services_v1.types.TlsRoute" + }, + { + "name": "tls_route_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_tls_route" + }, + "description": "Sample for CreateTlsRoute", + "file": "networkservices_v1_generated_network_services_create_tls_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_CreateTlsRoute_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_create_tls_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.delete_endpoint_policy", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteEndpointPolicy", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteEndpointPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteEndpointPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_endpoint_policy" + }, + "description": "Sample for DeleteEndpointPolicy", + "file": "networkservices_v1_generated_network_services_delete_endpoint_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteEndpointPolicy_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_endpoint_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.delete_endpoint_policy", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteEndpointPolicy", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteEndpointPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteEndpointPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_endpoint_policy" + }, + "description": "Sample for DeleteEndpointPolicy", + "file": "networkservices_v1_generated_network_services_delete_endpoint_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteEndpointPolicy_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_endpoint_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.delete_gateway", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteGateway", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteGatewayRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_gateway" + }, + "description": "Sample for DeleteGateway", + "file": "networkservices_v1_generated_network_services_delete_gateway_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteGateway_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_gateway_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.delete_gateway", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteGateway", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteGatewayRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_gateway" + }, + "description": "Sample for DeleteGateway", + "file": "networkservices_v1_generated_network_services_delete_gateway_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteGateway_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_gateway_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.delete_grpc_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteGrpcRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteGrpcRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteGrpcRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_grpc_route" + }, + "description": "Sample for DeleteGrpcRoute", + "file": "networkservices_v1_generated_network_services_delete_grpc_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteGrpcRoute_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_grpc_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.delete_grpc_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteGrpcRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteGrpcRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteGrpcRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_grpc_route" + }, + "description": "Sample for DeleteGrpcRoute", + "file": "networkservices_v1_generated_network_services_delete_grpc_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteGrpcRoute_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_grpc_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.delete_http_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteHttpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteHttpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteHttpRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_http_route" + }, + "description": "Sample for DeleteHttpRoute", + "file": "networkservices_v1_generated_network_services_delete_http_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteHttpRoute_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_http_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.delete_http_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteHttpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteHttpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteHttpRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_http_route" + }, + "description": "Sample for DeleteHttpRoute", + "file": "networkservices_v1_generated_network_services_delete_http_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteHttpRoute_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_http_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.delete_mesh", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteMesh", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteMesh" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteMeshRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_mesh" + }, + "description": "Sample for DeleteMesh", + "file": "networkservices_v1_generated_network_services_delete_mesh_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteMesh_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_mesh_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.delete_mesh", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteMesh", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteMesh" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteMeshRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_mesh" + }, + "description": "Sample for DeleteMesh", + "file": "networkservices_v1_generated_network_services_delete_mesh_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteMesh_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_mesh_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.delete_service_binding", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteServiceBinding", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteServiceBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteServiceBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_service_binding" + }, + "description": "Sample for DeleteServiceBinding", + "file": "networkservices_v1_generated_network_services_delete_service_binding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteServiceBinding_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_service_binding_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.delete_service_binding", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteServiceBinding", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteServiceBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteServiceBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_service_binding" + }, + "description": "Sample for DeleteServiceBinding", + "file": "networkservices_v1_generated_network_services_delete_service_binding_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteServiceBinding_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_service_binding_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.delete_tcp_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteTcpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteTcpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteTcpRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_tcp_route" + }, + "description": "Sample for DeleteTcpRoute", + "file": "networkservices_v1_generated_network_services_delete_tcp_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteTcpRoute_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_tcp_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.delete_tcp_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteTcpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteTcpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteTcpRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_tcp_route" + }, + "description": "Sample for DeleteTcpRoute", + "file": "networkservices_v1_generated_network_services_delete_tcp_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteTcpRoute_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_tcp_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.delete_tls_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteTlsRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteTlsRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteTlsRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_tls_route" + }, + "description": "Sample for DeleteTlsRoute", + "file": "networkservices_v1_generated_network_services_delete_tls_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteTlsRoute_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_tls_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.delete_tls_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.DeleteTlsRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "DeleteTlsRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.DeleteTlsRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_tls_route" + }, + "description": "Sample for DeleteTlsRoute", + "file": "networkservices_v1_generated_network_services_delete_tls_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_DeleteTlsRoute_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_delete_tls_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.get_endpoint_policy", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetEndpointPolicy", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetEndpointPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetEndpointPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.EndpointPolicy", + "shortName": "get_endpoint_policy" + }, + "description": "Sample for GetEndpointPolicy", + "file": "networkservices_v1_generated_network_services_get_endpoint_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetEndpointPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_endpoint_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.get_endpoint_policy", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetEndpointPolicy", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetEndpointPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetEndpointPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.EndpointPolicy", + "shortName": "get_endpoint_policy" + }, + "description": "Sample for GetEndpointPolicy", + "file": "networkservices_v1_generated_network_services_get_endpoint_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetEndpointPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_endpoint_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.get_gateway", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetGateway", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetGatewayRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.Gateway", + "shortName": "get_gateway" + }, + "description": "Sample for GetGateway", + "file": "networkservices_v1_generated_network_services_get_gateway_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetGateway_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_gateway_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.get_gateway", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetGateway", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetGatewayRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.Gateway", + "shortName": "get_gateway" + }, + "description": "Sample for GetGateway", + "file": "networkservices_v1_generated_network_services_get_gateway_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetGateway_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_gateway_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.get_grpc_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetGrpcRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetGrpcRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetGrpcRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.GrpcRoute", + "shortName": "get_grpc_route" + }, + "description": "Sample for GetGrpcRoute", + "file": "networkservices_v1_generated_network_services_get_grpc_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetGrpcRoute_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_grpc_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.get_grpc_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetGrpcRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetGrpcRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetGrpcRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.GrpcRoute", + "shortName": "get_grpc_route" + }, + "description": "Sample for GetGrpcRoute", + "file": "networkservices_v1_generated_network_services_get_grpc_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetGrpcRoute_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_grpc_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.get_http_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetHttpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetHttpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetHttpRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.HttpRoute", + "shortName": "get_http_route" + }, + "description": "Sample for GetHttpRoute", + "file": "networkservices_v1_generated_network_services_get_http_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetHttpRoute_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_http_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.get_http_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetHttpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetHttpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetHttpRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.HttpRoute", + "shortName": "get_http_route" + }, + "description": "Sample for GetHttpRoute", + "file": "networkservices_v1_generated_network_services_get_http_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetHttpRoute_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_http_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.get_mesh", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetMesh", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetMesh" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetMeshRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.Mesh", + "shortName": "get_mesh" + }, + "description": "Sample for GetMesh", + "file": "networkservices_v1_generated_network_services_get_mesh_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetMesh_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_mesh_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.get_mesh", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetMesh", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetMesh" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetMeshRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.Mesh", + "shortName": "get_mesh" + }, + "description": "Sample for GetMesh", + "file": "networkservices_v1_generated_network_services_get_mesh_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetMesh_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_mesh_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.get_service_binding", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetServiceBinding", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetServiceBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetServiceBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.ServiceBinding", + "shortName": "get_service_binding" + }, + "description": "Sample for GetServiceBinding", + "file": "networkservices_v1_generated_network_services_get_service_binding_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetServiceBinding_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_service_binding_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.get_service_binding", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetServiceBinding", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetServiceBinding" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetServiceBindingRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.ServiceBinding", + "shortName": "get_service_binding" + }, + "description": "Sample for GetServiceBinding", + "file": "networkservices_v1_generated_network_services_get_service_binding_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetServiceBinding_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_service_binding_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.get_tcp_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetTcpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetTcpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetTcpRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.TcpRoute", + "shortName": "get_tcp_route" + }, + "description": "Sample for GetTcpRoute", + "file": "networkservices_v1_generated_network_services_get_tcp_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetTcpRoute_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_tcp_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.get_tcp_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetTcpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetTcpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetTcpRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.TcpRoute", + "shortName": "get_tcp_route" + }, + "description": "Sample for GetTcpRoute", + "file": "networkservices_v1_generated_network_services_get_tcp_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetTcpRoute_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_tcp_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.get_tls_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetTlsRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetTlsRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetTlsRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.TlsRoute", + "shortName": "get_tls_route" + }, + "description": "Sample for GetTlsRoute", + "file": "networkservices_v1_generated_network_services_get_tls_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetTlsRoute_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_tls_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.get_tls_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.GetTlsRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "GetTlsRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetTlsRouteRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.types.TlsRoute", + "shortName": "get_tls_route" + }, + "description": "Sample for GetTlsRoute", + "file": "networkservices_v1_generated_network_services_get_tls_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_GetTlsRoute_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_get_tls_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.list_endpoint_policies", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListEndpointPolicies", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListEndpointPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListEndpointPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListEndpointPoliciesAsyncPager", + "shortName": "list_endpoint_policies" + }, + "description": "Sample for ListEndpointPolicies", + "file": "networkservices_v1_generated_network_services_list_endpoint_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListEndpointPolicies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_endpoint_policies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.list_endpoint_policies", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListEndpointPolicies", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListEndpointPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListEndpointPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListEndpointPoliciesPager", + "shortName": "list_endpoint_policies" + }, + "description": "Sample for ListEndpointPolicies", + "file": "networkservices_v1_generated_network_services_list_endpoint_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListEndpointPolicies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_endpoint_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.list_gateways", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListGateways", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListGateways" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListGatewaysRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListGatewaysAsyncPager", + "shortName": "list_gateways" + }, + "description": "Sample for ListGateways", + "file": "networkservices_v1_generated_network_services_list_gateways_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListGateways_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_gateways_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.list_gateways", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListGateways", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListGateways" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListGatewaysRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListGatewaysPager", + "shortName": "list_gateways" + }, + "description": "Sample for ListGateways", + "file": "networkservices_v1_generated_network_services_list_gateways_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListGateways_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_gateways_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.list_grpc_routes", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListGrpcRoutes", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListGrpcRoutes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListGrpcRoutesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListGrpcRoutesAsyncPager", + "shortName": "list_grpc_routes" + }, + "description": "Sample for ListGrpcRoutes", + "file": "networkservices_v1_generated_network_services_list_grpc_routes_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListGrpcRoutes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_grpc_routes_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.list_grpc_routes", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListGrpcRoutes", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListGrpcRoutes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListGrpcRoutesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListGrpcRoutesPager", + "shortName": "list_grpc_routes" + }, + "description": "Sample for ListGrpcRoutes", + "file": "networkservices_v1_generated_network_services_list_grpc_routes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListGrpcRoutes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_grpc_routes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.list_http_routes", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListHttpRoutes", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListHttpRoutes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListHttpRoutesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListHttpRoutesAsyncPager", + "shortName": "list_http_routes" + }, + "description": "Sample for ListHttpRoutes", + "file": "networkservices_v1_generated_network_services_list_http_routes_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListHttpRoutes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_http_routes_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.list_http_routes", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListHttpRoutes", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListHttpRoutes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListHttpRoutesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListHttpRoutesPager", + "shortName": "list_http_routes" + }, + "description": "Sample for ListHttpRoutes", + "file": "networkservices_v1_generated_network_services_list_http_routes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListHttpRoutes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_http_routes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.list_meshes", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListMeshes", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListMeshes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListMeshesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListMeshesAsyncPager", + "shortName": "list_meshes" + }, + "description": "Sample for ListMeshes", + "file": "networkservices_v1_generated_network_services_list_meshes_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListMeshes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_meshes_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.list_meshes", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListMeshes", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListMeshes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListMeshesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListMeshesPager", + "shortName": "list_meshes" + }, + "description": "Sample for ListMeshes", + "file": "networkservices_v1_generated_network_services_list_meshes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListMeshes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_meshes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.list_service_bindings", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListServiceBindings", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListServiceBindings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListServiceBindingsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListServiceBindingsAsyncPager", + "shortName": "list_service_bindings" + }, + "description": "Sample for ListServiceBindings", + "file": "networkservices_v1_generated_network_services_list_service_bindings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListServiceBindings_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_service_bindings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.list_service_bindings", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListServiceBindings", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListServiceBindings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListServiceBindingsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListServiceBindingsPager", + "shortName": "list_service_bindings" + }, + "description": "Sample for ListServiceBindings", + "file": "networkservices_v1_generated_network_services_list_service_bindings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListServiceBindings_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_service_bindings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.list_tcp_routes", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListTcpRoutes", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListTcpRoutes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListTcpRoutesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListTcpRoutesAsyncPager", + "shortName": "list_tcp_routes" + }, + "description": "Sample for ListTcpRoutes", + "file": "networkservices_v1_generated_network_services_list_tcp_routes_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListTcpRoutes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_tcp_routes_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.list_tcp_routes", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListTcpRoutes", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListTcpRoutes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListTcpRoutesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListTcpRoutesPager", + "shortName": "list_tcp_routes" + }, + "description": "Sample for ListTcpRoutes", + "file": "networkservices_v1_generated_network_services_list_tcp_routes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListTcpRoutes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_tcp_routes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.list_tls_routes", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListTlsRoutes", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListTlsRoutes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListTlsRoutesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListTlsRoutesAsyncPager", + "shortName": "list_tls_routes" + }, + "description": "Sample for ListTlsRoutes", + "file": "networkservices_v1_generated_network_services_list_tls_routes_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListTlsRoutes_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_tls_routes_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.list_tls_routes", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.ListTlsRoutes", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "ListTlsRoutes" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.ListTlsRoutesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.network_services_v1.services.network_services.pagers.ListTlsRoutesPager", + "shortName": "list_tls_routes" + }, + "description": "Sample for ListTlsRoutes", + "file": "networkservices_v1_generated_network_services_list_tls_routes_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_ListTlsRoutes_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_list_tls_routes_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.update_endpoint_policy", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.UpdateEndpointPolicy", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "UpdateEndpointPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateEndpointPolicyRequest" + }, + { + "name": "endpoint_policy", + "type": "google.cloud.network_services_v1.types.EndpointPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_endpoint_policy" + }, + "description": "Sample for UpdateEndpointPolicy", + "file": "networkservices_v1_generated_network_services_update_endpoint_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_UpdateEndpointPolicy_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_update_endpoint_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.update_endpoint_policy", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.UpdateEndpointPolicy", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "UpdateEndpointPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateEndpointPolicyRequest" + }, + { + "name": "endpoint_policy", + "type": "google.cloud.network_services_v1.types.EndpointPolicy" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_endpoint_policy" + }, + "description": "Sample for UpdateEndpointPolicy", + "file": "networkservices_v1_generated_network_services_update_endpoint_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_UpdateEndpointPolicy_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_update_endpoint_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.update_gateway", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.UpdateGateway", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "UpdateGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateGatewayRequest" + }, + { + "name": "gateway", + "type": "google.cloud.network_services_v1.types.Gateway" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_gateway" + }, + "description": "Sample for UpdateGateway", + "file": "networkservices_v1_generated_network_services_update_gateway_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_UpdateGateway_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_update_gateway_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.update_gateway", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.UpdateGateway", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "UpdateGateway" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateGatewayRequest" + }, + { + "name": "gateway", + "type": "google.cloud.network_services_v1.types.Gateway" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_gateway" + }, + "description": "Sample for UpdateGateway", + "file": "networkservices_v1_generated_network_services_update_gateway_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_UpdateGateway_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_update_gateway_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.update_grpc_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.UpdateGrpcRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "UpdateGrpcRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateGrpcRouteRequest" + }, + { + "name": "grpc_route", + "type": "google.cloud.network_services_v1.types.GrpcRoute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_grpc_route" + }, + "description": "Sample for UpdateGrpcRoute", + "file": "networkservices_v1_generated_network_services_update_grpc_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_UpdateGrpcRoute_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_update_grpc_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.update_grpc_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.UpdateGrpcRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "UpdateGrpcRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateGrpcRouteRequest" + }, + { + "name": "grpc_route", + "type": "google.cloud.network_services_v1.types.GrpcRoute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_grpc_route" + }, + "description": "Sample for UpdateGrpcRoute", + "file": "networkservices_v1_generated_network_services_update_grpc_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_UpdateGrpcRoute_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_update_grpc_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.update_http_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.UpdateHttpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "UpdateHttpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateHttpRouteRequest" + }, + { + "name": "http_route", + "type": "google.cloud.network_services_v1.types.HttpRoute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_http_route" + }, + "description": "Sample for UpdateHttpRoute", + "file": "networkservices_v1_generated_network_services_update_http_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_UpdateHttpRoute_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_update_http_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.update_http_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.UpdateHttpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "UpdateHttpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateHttpRouteRequest" + }, + { + "name": "http_route", + "type": "google.cloud.network_services_v1.types.HttpRoute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_http_route" + }, + "description": "Sample for UpdateHttpRoute", + "file": "networkservices_v1_generated_network_services_update_http_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_UpdateHttpRoute_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_update_http_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.update_mesh", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.UpdateMesh", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "UpdateMesh" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateMeshRequest" + }, + { + "name": "mesh", + "type": "google.cloud.network_services_v1.types.Mesh" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_mesh" + }, + "description": "Sample for UpdateMesh", + "file": "networkservices_v1_generated_network_services_update_mesh_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_UpdateMesh_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_update_mesh_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.update_mesh", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.UpdateMesh", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "UpdateMesh" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateMeshRequest" + }, + { + "name": "mesh", + "type": "google.cloud.network_services_v1.types.Mesh" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_mesh" + }, + "description": "Sample for UpdateMesh", + "file": "networkservices_v1_generated_network_services_update_mesh_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_UpdateMesh_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_update_mesh_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.update_tcp_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.UpdateTcpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "UpdateTcpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateTcpRouteRequest" + }, + { + "name": "tcp_route", + "type": "google.cloud.network_services_v1.types.TcpRoute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_tcp_route" + }, + "description": "Sample for UpdateTcpRoute", + "file": "networkservices_v1_generated_network_services_update_tcp_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_UpdateTcpRoute_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_update_tcp_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.update_tcp_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.UpdateTcpRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "UpdateTcpRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateTcpRouteRequest" + }, + { + "name": "tcp_route", + "type": "google.cloud.network_services_v1.types.TcpRoute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_tcp_route" + }, + "description": "Sample for UpdateTcpRoute", + "file": "networkservices_v1_generated_network_services_update_tcp_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_UpdateTcpRoute_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_update_tcp_route_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient", + "shortName": "NetworkServicesAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesAsyncClient.update_tls_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.UpdateTlsRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "UpdateTlsRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateTlsRouteRequest" + }, + { + "name": "tls_route", + "type": "google.cloud.network_services_v1.types.TlsRoute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_tls_route" + }, + "description": "Sample for UpdateTlsRoute", + "file": "networkservices_v1_generated_network_services_update_tls_route_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_UpdateTlsRoute_async", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_update_tls_route_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.NetworkServicesClient", + "shortName": "NetworkServicesClient" + }, + "fullName": "google.cloud.network_services_v1.NetworkServicesClient.update_tls_route", + "method": { + "fullName": "google.cloud.networkservices.v1.NetworkServices.UpdateTlsRoute", + "service": { + "fullName": "google.cloud.networkservices.v1.NetworkServices", + "shortName": "NetworkServices" + }, + "shortName": "UpdateTlsRoute" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateTlsRouteRequest" + }, + { + "name": "tls_route", + "type": "google.cloud.network_services_v1.types.TlsRoute" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_tls_route" + }, + "description": "Sample for UpdateTlsRoute", + "file": "networkservices_v1_generated_network_services_update_tls_route_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_NetworkServices_UpdateTlsRoute_sync", + "segments": [ + { + "end": 59, + "start": 27, + "type": "FULL" + }, + { + "end": 59, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 56, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 60, + "start": 57, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_network_services_update_tls_route_sync.py" + } + ] +} diff --git a/packages/google-cloud-os-config/.OwlBot.yaml b/packages/google-cloud-os-config/.OwlBot.yaml new file mode 100644 index 000000000000..a2e75aa79ef0 --- /dev/null +++ b/packages/google-cloud-os-config/.OwlBot.yaml @@ -0,0 +1,27 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-preserve-regex: + - /owl-bot-staging/google-cloud-os-config/v1beta + +deep-copy-regex: + - source: /google/cloud/osconfig/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-os-config/$1 + +begin-after-commit-hash: b06c9034cfcbce180ba732d03be6526e5c8ea1bc + diff --git a/packages/google-cloud-os-config/.coveragerc b/packages/google-cloud-os-config/.coveragerc new file mode 100644 index 000000000000..4dfbdbe232dc --- /dev/null +++ b/packages/google-cloud-os-config/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/osconfig/__init__.py + google/cloud/osconfig/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-os-config/.eggs/README.txt b/packages/google-cloud-os-config/.eggs/README.txt new file mode 100644 index 000000000000..5d01668824f4 --- /dev/null +++ b/packages/google-cloud-os-config/.eggs/README.txt @@ -0,0 +1,6 @@ +This directory contains eggs that were downloaded by setuptools to build, test, and run plug-ins. + +This directory caches those eggs to prevent repeated downloads. + +However, it is safe to delete this directory. + diff --git a/packages/google-cloud-os-config/.flake8 b/packages/google-cloud-os-config/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-os-config/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-os-config/.gitignore b/packages/google-cloud-os-config/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-os-config/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-os-config/.repo-metadata.json b/packages/google-cloud-os-config/.repo-metadata.json new file mode 100644 index 000000000000..13a22b506572 --- /dev/null +++ b/packages/google-cloud-os-config/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "osconfig", + "name_pretty": "OS Config", + "product_documentation": "https://cloud.google.com/compute/docs/manage-os", + "client_documentation": "https://cloud.google.com/python/docs/reference/osconfig/latest", + "issue_tracker": "", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-os-config", + "api_id": "osconfig.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "osconfig", + "api_description": "provides OS management tools that can be used for patch management, patch compliance, and configuration management on VM instances." +} diff --git a/packages/google-cloud-os-config/CHANGELOG.md b/packages/google-cloud-os-config/CHANGELOG.md new file mode 100644 index 000000000000..4bb1b481f86a --- /dev/null +++ b/packages/google-cloud-os-config/CHANGELOG.md @@ -0,0 +1,343 @@ +# Changelog + +## [1.15.2](https://github.com/googleapis/python-os-config/compare/v1.15.1...v1.15.2) (2023-07-04) + + +### Bug Fixes + +* Add async context manager return types ([#280](https://github.com/googleapis/python-os-config/issues/280)) ([ba03471](https://github.com/googleapis/python-os-config/commit/ba0347119e6e14ea6a30793a1499ceaf60e46b56)) + +## [1.15.1](https://github.com/googleapis/python-os-config/compare/v1.15.0...v1.15.1) (2023-03-23) + + +### Documentation + +* Fix formatting of request arg in docstring ([#272](https://github.com/googleapis/python-os-config/issues/272)) ([4fdfa8b](https://github.com/googleapis/python-os-config/commit/4fdfa8bb23c85bfb6785aa6e9450fdb042a3fb83)) + +## [1.15.0](https://github.com/googleapis/python-os-config/compare/v1.14.1...v1.15.0) (2023-02-21) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#266](https://github.com/googleapis/python-os-config/issues/266)) ([679246f](https://github.com/googleapis/python-os-config/commit/679246ff80d7768e23fbdc7375515ecd6c1684b3)) + +## [1.14.1](https://github.com/googleapis/python-os-config/compare/v1.14.0...v1.14.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([97b46fa](https://github.com/googleapis/python-os-config/commit/97b46fa4d0bb76692202b9645d6489cc5e97b6b8)) + + +### Documentation + +* Add documentation for enums ([97b46fa](https://github.com/googleapis/python-os-config/commit/97b46fa4d0bb76692202b9645d6489cc5e97b6b8)) + +## [1.14.0](https://github.com/googleapis/python-os-config/compare/v1.13.0...v1.14.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#257](https://github.com/googleapis/python-os-config/issues/257)) ([fa3a63f](https://github.com/googleapis/python-os-config/commit/fa3a63f72b1c1c6603655aae1c1457638f53b489)) + +## [1.13.0](https://github.com/googleapis/python-os-config/compare/v1.12.4...v1.13.0) (2022-12-13) + + +### Features + +* Add support for `google.cloud.osconfig.__version__` ([0980ecc](https://github.com/googleapis/python-os-config/commit/0980eccc0b4d528719b13849f2ff9bb2707e4a21)) +* Add typing to proto.Message based class attributes ([0980ecc](https://github.com/googleapis/python-os-config/commit/0980eccc0b4d528719b13849f2ff9bb2707e4a21)) + + +### Bug Fixes + +* Add dict typing for client_options ([0980ecc](https://github.com/googleapis/python-os-config/commit/0980eccc0b4d528719b13849f2ff9bb2707e4a21)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([326bd8c](https://github.com/googleapis/python-os-config/commit/326bd8cc374f342187beb50f95308408fe839986)) +* Drop usage of pkg_resources ([326bd8c](https://github.com/googleapis/python-os-config/commit/326bd8cc374f342187beb50f95308408fe839986)) +* Fix timeout default values ([326bd8c](https://github.com/googleapis/python-os-config/commit/326bd8cc374f342187beb50f95308408fe839986)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([0980ecc](https://github.com/googleapis/python-os-config/commit/0980eccc0b4d528719b13849f2ff9bb2707e4a21)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([326bd8c](https://github.com/googleapis/python-os-config/commit/326bd8cc374f342187beb50f95308408fe839986)) + +## [1.12.4](https://github.com/googleapis/python-os-config/compare/v1.12.3...v1.12.4) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#245](https://github.com/googleapis/python-os-config/issues/245)) ([adeb2d1](https://github.com/googleapis/python-os-config/commit/adeb2d1bc30f88c032a1079f9469046e6ca5b5f1)) + +## [1.12.3](https://github.com/googleapis/python-os-config/compare/v1.12.2...v1.12.3) (2022-09-29) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#243](https://github.com/googleapis/python-os-config/issues/243)) ([73c75aa](https://github.com/googleapis/python-os-config/commit/73c75aa2a64e2accb6093dd8b5011370fb02bfe4)) + +## [1.12.2](https://github.com/googleapis/python-os-config/compare/v1.12.1...v1.12.2) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#227](https://github.com/googleapis/python-os-config/issues/227)) ([c3a2047](https://github.com/googleapis/python-os-config/commit/c3a2047ac58a718e6bc34b6f6f43f48e42b65f92)) +* **deps:** require proto-plus >= 1.22.0 ([c3a2047](https://github.com/googleapis/python-os-config/commit/c3a2047ac58a718e6bc34b6f6f43f48e42b65f92)) + +## [1.12.1](https://github.com/googleapis/python-os-config/compare/v1.12.0...v1.12.1) (2022-07-13) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#218](https://github.com/googleapis/python-os-config/issues/218)) ([50330f2](https://github.com/googleapis/python-os-config/commit/50330f24ebe9bcc6851725ad6669331a9349020b)) + +## [1.12.0](https://github.com/googleapis/python-os-config/compare/v1.11.2...v1.12.0) (2022-07-07) + + +### Features + +* add audience parameter ([d700d11](https://github.com/googleapis/python-os-config/commit/d700d1171f2ceab7aaf362eeff385c9733cf456c)) + + +### Bug Fixes + +* **deps:** require google-api-core >= 2.8.0 ([#213](https://github.com/googleapis/python-os-config/issues/213)) ([d700d11](https://github.com/googleapis/python-os-config/commit/d700d1171f2ceab7aaf362eeff385c9733cf456c)) +* require python 3.7+ ([#215](https://github.com/googleapis/python-os-config/issues/215)) ([397e04d](https://github.com/googleapis/python-os-config/commit/397e04d1b76dc8d5145109140ac41be84609adac)) + +## [1.11.2](https://github.com/googleapis/python-os-config/compare/v1.11.1...v1.11.2) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#205](https://github.com/googleapis/python-os-config/issues/205)) ([14b8400](https://github.com/googleapis/python-os-config/commit/14b8400e2f23158f402f18b4af7c5e1fac0b35c5)) + + +### Documentation + +* fix changelog header to consistent size ([#206](https://github.com/googleapis/python-os-config/issues/206)) ([17b8505](https://github.com/googleapis/python-os-config/commit/17b850599b0da9625dc1dcd26064f17f30448448)) + +## [1.11.1](https://github.com/googleapis/python-os-config/compare/v1.11.0...v1.11.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#174](https://github.com/googleapis/python-os-config/issues/174)) ([a47fc64](https://github.com/googleapis/python-os-config/commit/a47fc64945a6f21bbd42bcd4bf3191c245e04b61)) + +## [1.11.0](https://github.com/googleapis/python-os-config/compare/v1.10.0...v1.11.0) (2022-02-26) + + +### Features + +* Add existing os_policy_assignment_reports.proto ([eb6bbb7](https://github.com/googleapis/python-os-config/commit/eb6bbb7a5e99dc22377d49a4c739c2a22f8e0558)) +* Add GetOsPolicyAssignmentReport and analogous List rpc method ([eb6bbb7](https://github.com/googleapis/python-os-config/commit/eb6bbb7a5e99dc22377d49a4c739c2a22f8e0558)) +* Add Inventory to InstanceFilter ([eb6bbb7](https://github.com/googleapis/python-os-config/commit/eb6bbb7a5e99dc22377d49a4c739c2a22f8e0558)) +* Add item that is affected by vulnerability ([eb6bbb7](https://github.com/googleapis/python-os-config/commit/eb6bbb7a5e99dc22377d49a4c739c2a22f8e0558)) + + +### Bug Fixes + +* Mark methods as deprecated ([eb6bbb7](https://github.com/googleapis/python-os-config/commit/eb6bbb7a5e99dc22377d49a4c739c2a22f8e0558)) + + +### Documentation + +* add generated snippets ([eb6bbb7](https://github.com/googleapis/python-os-config/commit/eb6bbb7a5e99dc22377d49a4c739c2a22f8e0558)) + +## [1.10.0](https://github.com/googleapis/python-os-config/compare/v1.9.0...v1.10.0) (2022-02-08) + + +### Features + +* add ability to change the state of a patch deployment ([#163](https://github.com/googleapis/python-os-config/issues/163)) ([fbf55b5](https://github.com/googleapis/python-os-config/commit/fbf55b5abbe63d946d9fea4a335a9edd5495ad86)) +* add api key support ([#158](https://github.com/googleapis/python-os-config/issues/158)) ([4de2275](https://github.com/googleapis/python-os-config/commit/4de22754f2d45bfce7c20b97f0a6a6a2f30b7a97)) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([e7f8549](https://github.com/googleapis/python-os-config/commit/e7f854968db11231c3f5a8d4469e66a9cfc196e3)) + + +### Documentation + +* add autogenerated code snippets ([fbf55b5](https://github.com/googleapis/python-os-config/commit/fbf55b5abbe63d946d9fea4a335a9edd5495ad86)) + +## [1.9.0](https://www.github.com/googleapis/python-os-config/compare/v1.8.0...v1.9.0) (2021-11-11) + + +### Features + +* **v1:** Add OS inventory item ([#147](https://www.github.com/googleapis/python-os-config/issues/147)) ([14102d9](https://www.github.com/googleapis/python-os-config/commit/14102d97b78f5d9c10aca07452f11112d53a1788)) + +## [1.8.0](https://www.github.com/googleapis/python-os-config/compare/v1.7.1...v1.8.0) (2021-11-04) + + +### Features + +* add OS policy assignment rpcs ([#142](https://www.github.com/googleapis/python-os-config/issues/142)) ([44c158d](https://www.github.com/googleapis/python-os-config/commit/44c158dd19c4329678e170733377494821ca955f)) + +## [1.7.1](https://www.github.com/googleapis/python-os-config/compare/v1.7.0...v1.7.1) (2021-11-02) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([df69ccc](https://www.github.com/googleapis/python-os-config/commit/df69ccc66f45ccb4e94fbe8251c3f58e744fcf6b)) +* **deps:** require google-api-core >= 1.28.0 ([df69ccc](https://www.github.com/googleapis/python-os-config/commit/df69ccc66f45ccb4e94fbe8251c3f58e744fcf6b)) + + +### Documentation + +* list oneofs in docstring ([df69ccc](https://www.github.com/googleapis/python-os-config/commit/df69ccc66f45ccb4e94fbe8251c3f58e744fcf6b)) + +## [1.7.0](https://www.github.com/googleapis/python-os-config/compare/v1.6.0...v1.7.0) (2021-10-18) + + +### Features + +* add support for python 3.10 ([#133](https://www.github.com/googleapis/python-os-config/issues/133)) ([44e23f4](https://www.github.com/googleapis/python-os-config/commit/44e23f4b82fad2079b79366670b8a14002a37d68)) +* Update RecurringSchedule.Frequency with DAILY frequency ([#137](https://www.github.com/googleapis/python-os-config/issues/137)) ([75b232e](https://www.github.com/googleapis/python-os-config/commit/75b232e9ca86beeb6a9d2a9f45629e2ffa458c6d)) + +## [1.6.0](https://www.github.com/googleapis/python-os-config/compare/v1.5.2...v1.6.0) (2021-10-08) + + +### Features + +* add context manager support in client ([#129](https://www.github.com/googleapis/python-os-config/issues/129)) ([b207115](https://www.github.com/googleapis/python-os-config/commit/b207115ed97544585c5f0dc7512d71fd94b5aae2)) + +## [1.5.2](https://www.github.com/googleapis/python-os-config/compare/v1.5.1...v1.5.2) (2021-09-30) + + +### Bug Fixes + +* improper types in pagers generation ([e6d6242](https://www.github.com/googleapis/python-os-config/commit/e6d62422e555f33cd5107eb59073c8d88d292681)) + +## [1.5.1](https://www.github.com/googleapis/python-os-config/compare/v1.5.0...v1.5.1) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([911871e](https://www.github.com/googleapis/python-os-config/commit/911871e8154fdb16a6e182764563864cf2235153)) + +## [1.5.0](https://www.github.com/googleapis/python-os-config/compare/v1.4.0...v1.5.0) (2021-09-07) + + +### Features + +* add OSConfigZonalService API ([#116](https://www.github.com/googleapis/python-os-config/issues/116)) ([72bb90f](https://www.github.com/googleapis/python-os-config/commit/72bb90f67be410d981854f9a5f34fd31b1934693)) + +## [1.4.0](https://www.github.com/googleapis/python-os-config/compare/v1.3.2...v1.4.0) (2021-08-30) + + +### Features + +* Update osconfig v1 and v1alpha with WindowsApplication ([#108](https://www.github.com/googleapis/python-os-config/issues/108)) ([befbfdc](https://www.github.com/googleapis/python-os-config/commit/befbfdcd6bffdc402330bd0b715593ac788bd3b0)) + +## [1.3.2](https://www.github.com/googleapis/python-os-config/compare/v1.3.1...v1.3.2) (2021-07-28) + + +### Bug Fixes + +* enable self signed jwt for grpc ([#101](https://www.github.com/googleapis/python-os-config/issues/101)) ([5f6c367](https://www.github.com/googleapis/python-os-config/commit/5f6c367753fb780f15ff38245b2c85387e01965e)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#97](https://www.github.com/googleapis/python-os-config/issues/97)) ([404adc3](https://www.github.com/googleapis/python-os-config/commit/404adc3419aaa40b0b66f55fc3ed92758287816b)) + + +### Miscellaneous Chores + +* release as 1.3.2 ([#102](https://www.github.com/googleapis/python-os-config/issues/102)) ([7c642b0](https://www.github.com/googleapis/python-os-config/commit/7c642b0eb32171275ee47db7ab64900176d0a4a1)) + +## [1.3.1](https://www.github.com/googleapis/python-os-config/compare/v1.3.0...v1.3.1) (2021-07-20) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#96](https://www.github.com/googleapis/python-os-config/issues/96)) ([022e149](https://www.github.com/googleapis/python-os-config/commit/022e149322e719465f1b0b66850def2b94c42eb1)) + +## [1.3.0](https://www.github.com/googleapis/python-os-config/compare/v1.2.0...v1.3.0) (2021-07-14) + + +### Features + +* add always_use_jwt_access ([#88](https://www.github.com/googleapis/python-os-config/issues/88)) ([abb4837](https://www.github.com/googleapis/python-os-config/commit/abb48378d71deab058958c3b3b1efff5c253c99e)) + + +### Bug Fixes + +* disable always_use_jwt_access ([#92](https://www.github.com/googleapis/python-os-config/issues/92)) ([5d8a4bb](https://www.github.com/googleapis/python-os-config/commit/5d8a4bb9ef477f8fd81344fbd02631ac31660169)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-os-config/issues/1127)) ([#83](https://www.github.com/googleapis/python-os-config/issues/83)) ([b9fc494](https://www.github.com/googleapis/python-os-config/commit/b9fc4948a320fcf6a7154a2c7a1476cc78736c4d)) + +## [1.2.0](https://www.github.com/googleapis/python-os-config/compare/v1.1.0...v1.2.0) (2021-06-09) + + +### Features + +* release as GA ([#46](https://www.github.com/googleapis/python-os-config/issues/46)) ([d5aece9](https://www.github.com/googleapis/python-os-config/commit/d5aece996ff225dc747e7c59978576bfcb79a3d1)) +* support self-signed JWT flow for service accounts ([6fbaf4b](https://www.github.com/googleapis/python-os-config/commit/6fbaf4bb16b0bb381edf13957b85297c1659a206)) +* add v1alpha ([#80](https://www.github.com/googleapis/python-os-config/issues/80)) ([493ac75](https://www.github.com/googleapis/python-os-config/commit/493ac75a5fec0185fa15415fe4feffe0c36ca7e9)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([6fbaf4b](https://www.github.com/googleapis/python-os-config/commit/6fbaf4bb16b0bb381edf13957b85297c1659a206)) +* **deps:** add packaging requirement ([#72](https://www.github.com/googleapis/python-os-config/issues/72)) ([44e0947](https://www.github.com/googleapis/python-os-config/commit/44e09479922f8569b8d95657009e7c806eb101f9)) + + +### Documentation + +* fix sphinx identifiers ([#52](https://www.github.com/googleapis/python-os-config/issues/52)) ([940916d](https://www.github.com/googleapis/python-os-config/commit/940916de78ac19bea3f63f75ce073648f920c70b)) + +## [1.1.0](https://www.github.com/googleapis/python-os-config/compare/v1.0.0...v1.1.0) (2021-02-12) + + +### Features + +* add `from_service_account_info` ([#31](https://www.github.com/googleapis/python-os-config/issues/31)) ([d8d921f](https://www.github.com/googleapis/python-os-config/commit/d8d921fc28d294039c574e4dc327fbe1caa27337)) + + +### Bug Fixes + +* remove client side receive limits ([#29](https://www.github.com/googleapis/python-os-config/issues/29)) ([628ada4](https://www.github.com/googleapis/python-os-config/commit/628ada4004b1add04f5c2d95b9b1cad48616cf2c)) + +## [1.0.0](https://www.github.com/googleapis/python-os-config/compare/v0.1.2...v1.0.0) (2020-11-18) + + +### ⚠ BREAKING CHANGES + +* rename attributes that conflict with builtins (#24) + * `Instance.type` ->`Instance.type_` + * `GcsObject.object` -> `GcsObject.object_` + * `PatchInstanceFilter.all` -> `PatchInstanceFilter.all_` + +### Features + +* add async client ([#8](https://www.github.com/googleapis/python-os-config/issues/8)) ([33f46ba](https://www.github.com/googleapis/python-os-config/commit/33f46ba4aa34e066a70a5ad792254574b5985f83)) +* add patch rollout to patch deployments ([#24](https://www.github.com/googleapis/python-os-config/issues/24)) ([4d8605e](https://www.github.com/googleapis/python-os-config/commit/4d8605e2d92af271b2c363490926689266c1d4b6)) +* add common resource path helpers ([#24](https://www.github.com/googleapis/python-os-config/issues/24)) ([4d8605e](https://www.github.com/googleapis/python-os-config/commit/4d8605e2d92af271b2c363490926689266c1d4b6)) +* make client transport public ([#24](https://www.github.com/googleapis/python-os-config/issues/24)) ([4d8605e](https://www.github.com/googleapis/python-os-config/commit/4d8605e2d92af271b2c363490926689266c1d4b6)) +--- +## [0.1.2](https://www.github.com/googleapis/python-os-config/compare/v0.1.1...v0.1.2) (2020-06-11) + + +### Bug Fixes + +* remove duplicate version ([#6](https://www.github.com/googleapis/python-os-config/issues/6)) ([351b553](https://www.github.com/googleapis/python-os-config/commit/351b5531244bb207fc6696625dbeaf840e7a469f)) + +## [0.1.1](https://www.github.com/googleapis/python-os-config/compare/v0.1.0...v0.1.1) (2020-06-11) + + +### Bug Fixes + +* fix documentation links ([#2](https://www.github.com/googleapis/python-os-config/issues/2)) ([9d71787](https://www.github.com/googleapis/python-os-config/commit/9d717874d310d40efdb8f2a316521ea90e8c0e63)) + +## 0.1.0 (2020-06-10) + + +### Features + +* generate v1 ([5d1f582](https://www.github.com/googleapis/python-os-config/commit/5d1f582b5b02d128ef44120d285941805d234ec7)) diff --git a/packages/google-cloud-os-config/CODE_OF_CONDUCT.md b/packages/google-cloud-os-config/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-os-config/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-os-config/CONTRIBUTING.rst b/packages/google-cloud-os-config/CONTRIBUTING.rst new file mode 100644 index 000000000000..523eb6b162b8 --- /dev/null +++ b/packages/google-cloud-os-config/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.11 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-os-config + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-os-config/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-os-config/LICENSE b/packages/google-cloud-os-config/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-os-config/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-os-config/MANIFEST.in b/packages/google-cloud-os-config/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-os-config/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-os-config/README.rst b/packages/google-cloud-os-config/README.rst new file mode 100644 index 000000000000..067a953263fe --- /dev/null +++ b/packages/google-cloud-os-config/README.rst @@ -0,0 +1,108 @@ +Python Client for OS Config +=========================== + +|stable| |pypi| |versions| + +`OS Config`_: provides OS management tools that can be used for patch management, patch compliance, and configuration management on VM instances. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-os-config.svg + :target: https://pypi.org/project/google-cloud-os-config/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-os-config.svg + :target: https://pypi.org/project/google-cloud-os-config/ +.. _OS Config: https://cloud.google.com/compute/docs/manage-os +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/osconfig/latest +.. _Product Documentation: https://cloud.google.com/compute/docs/manage-os + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the OS Config.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the OS Config.: https://cloud.google.com/compute/docs/manage-os +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-config/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-os-config + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-os-config + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for OS Config + to see other available methods on the client. +- Read the `OS Config Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _OS Config Product documentation: https://cloud.google.com/compute/docs/manage-os +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-os-config/SECURITY.md b/packages/google-cloud-os-config/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-os-config/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-os-config/docs/CHANGELOG.md b/packages/google-cloud-os-config/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-os-config/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-os-config/docs/README.rst b/packages/google-cloud-os-config/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-os-config/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-os-config/docs/_static/custom.css b/packages/google-cloud-os-config/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-os-config/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-os-config/docs/_templates/layout.html b/packages/google-cloud-os-config/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-os-config/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-os-config/docs/conf.py b/packages/google-cloud-os-config/docs/conf.py new file mode 100644 index 000000000000..81b0a50e25a5 --- /dev/null +++ b/packages/google-cloud-os-config/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-os-config documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-os-config" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-os-config", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-os-config-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-os-config.tex", + "google-cloud-os-config Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-os-config", + "google-cloud-os-config Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-os-config", + "google-cloud-os-config Documentation", + author, + "google-cloud-os-config", + "google-cloud-os-config Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-os-config/docs/index.rst b/packages/google-cloud-os-config/docs/index.rst new file mode 100644 index 000000000000..bbeaa3788725 --- /dev/null +++ b/packages/google-cloud-os-config/docs/index.rst @@ -0,0 +1,34 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of OS Config. +By default, you will get version ``osconfig_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + osconfig_v1/services + osconfig_v1/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + osconfig_v1alpha/services + osconfig_v1alpha/types + + +Changelog +--------- + +For a list of all ``google-cloud-os-config`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-os-config/docs/multiprocessing.rst b/packages/google-cloud-os-config/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-os-config/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-os-config/docs/osconfig_v1/os_config_service.rst b/packages/google-cloud-os-config/docs/osconfig_v1/os_config_service.rst new file mode 100644 index 000000000000..e082a4b65651 --- /dev/null +++ b/packages/google-cloud-os-config/docs/osconfig_v1/os_config_service.rst @@ -0,0 +1,10 @@ +OsConfigService +--------------------------------- + +.. automodule:: google.cloud.osconfig_v1.services.os_config_service + :members: + :inherited-members: + +.. automodule:: google.cloud.osconfig_v1.services.os_config_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-os-config/docs/osconfig_v1/os_config_zonal_service.rst b/packages/google-cloud-os-config/docs/osconfig_v1/os_config_zonal_service.rst new file mode 100644 index 000000000000..fb0b5d1b30cd --- /dev/null +++ b/packages/google-cloud-os-config/docs/osconfig_v1/os_config_zonal_service.rst @@ -0,0 +1,10 @@ +OsConfigZonalService +-------------------------------------- + +.. automodule:: google.cloud.osconfig_v1.services.os_config_zonal_service + :members: + :inherited-members: + +.. automodule:: google.cloud.osconfig_v1.services.os_config_zonal_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-os-config/docs/osconfig_v1/services.rst b/packages/google-cloud-os-config/docs/osconfig_v1/services.rst new file mode 100644 index 000000000000..8c1733aaa8de --- /dev/null +++ b/packages/google-cloud-os-config/docs/osconfig_v1/services.rst @@ -0,0 +1,7 @@ +Services for Google Cloud Osconfig v1 API +========================================= +.. toctree:: + :maxdepth: 2 + + os_config_service + os_config_zonal_service diff --git a/packages/google-cloud-os-config/docs/osconfig_v1/types.rst b/packages/google-cloud-os-config/docs/osconfig_v1/types.rst new file mode 100644 index 000000000000..2fdbc307d8b2 --- /dev/null +++ b/packages/google-cloud-os-config/docs/osconfig_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Osconfig v1 API +====================================== + +.. automodule:: google.cloud.osconfig_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-os-config/docs/osconfig_v1alpha/os_config_zonal_service.rst b/packages/google-cloud-os-config/docs/osconfig_v1alpha/os_config_zonal_service.rst new file mode 100644 index 000000000000..c4cbbeec7f4d --- /dev/null +++ b/packages/google-cloud-os-config/docs/osconfig_v1alpha/os_config_zonal_service.rst @@ -0,0 +1,10 @@ +OsConfigZonalService +-------------------------------------- + +.. automodule:: google.cloud.osconfig_v1alpha.services.os_config_zonal_service + :members: + :inherited-members: + +.. automodule:: google.cloud.osconfig_v1alpha.services.os_config_zonal_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-os-config/docs/osconfig_v1alpha/services.rst b/packages/google-cloud-os-config/docs/osconfig_v1alpha/services.rst new file mode 100644 index 000000000000..2f77a4cfedda --- /dev/null +++ b/packages/google-cloud-os-config/docs/osconfig_v1alpha/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Osconfig v1alpha API +============================================== +.. toctree:: + :maxdepth: 2 + + os_config_zonal_service diff --git a/packages/google-cloud-os-config/docs/osconfig_v1alpha/types.rst b/packages/google-cloud-os-config/docs/osconfig_v1alpha/types.rst new file mode 100644 index 000000000000..e1f371b4bc3c --- /dev/null +++ b/packages/google-cloud-os-config/docs/osconfig_v1alpha/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Osconfig v1alpha API +=========================================== + +.. automodule:: google.cloud.osconfig_v1alpha.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-os-config/google/cloud/osconfig/__init__.py b/packages/google-cloud-os-config/google/cloud/osconfig/__init__.py new file mode 100644 index 000000000000..944e5bbb1ddb --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig/__init__.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.osconfig import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.osconfig_v1.services.os_config_service.async_client import ( + OsConfigServiceAsyncClient, +) +from google.cloud.osconfig_v1.services.os_config_service.client import ( + OsConfigServiceClient, +) +from google.cloud.osconfig_v1.services.os_config_zonal_service.async_client import ( + OsConfigZonalServiceAsyncClient, +) +from google.cloud.osconfig_v1.services.os_config_zonal_service.client import ( + OsConfigZonalServiceClient, +) +from google.cloud.osconfig_v1.types.inventory import ( + GetInventoryRequest, + Inventory, + InventoryView, + ListInventoriesRequest, + ListInventoriesResponse, +) +from google.cloud.osconfig_v1.types.os_policy import OSPolicy +from google.cloud.osconfig_v1.types.os_policy_assignment_reports import ( + GetOSPolicyAssignmentReportRequest, + ListOSPolicyAssignmentReportsRequest, + ListOSPolicyAssignmentReportsResponse, + OSPolicyAssignmentReport, +) +from google.cloud.osconfig_v1.types.os_policy_assignments import ( + CreateOSPolicyAssignmentRequest, + DeleteOSPolicyAssignmentRequest, + GetOSPolicyAssignmentRequest, + ListOSPolicyAssignmentRevisionsRequest, + ListOSPolicyAssignmentRevisionsResponse, + ListOSPolicyAssignmentsRequest, + ListOSPolicyAssignmentsResponse, + OSPolicyAssignment, + OSPolicyAssignmentOperationMetadata, + UpdateOSPolicyAssignmentRequest, +) +from google.cloud.osconfig_v1.types.osconfig_common import FixedOrPercent +from google.cloud.osconfig_v1.types.patch_deployments import ( + CreatePatchDeploymentRequest, + DeletePatchDeploymentRequest, + GetPatchDeploymentRequest, + ListPatchDeploymentsRequest, + ListPatchDeploymentsResponse, + MonthlySchedule, + OneTimeSchedule, + PatchDeployment, + PausePatchDeploymentRequest, + RecurringSchedule, + ResumePatchDeploymentRequest, + UpdatePatchDeploymentRequest, + WeekDayOfMonth, + WeeklySchedule, +) +from google.cloud.osconfig_v1.types.patch_jobs import ( + AptSettings, + CancelPatchJobRequest, + ExecStep, + ExecStepConfig, + ExecutePatchJobRequest, + GcsObject, + GetPatchJobRequest, + GooSettings, + Instance, + ListPatchJobInstanceDetailsRequest, + ListPatchJobInstanceDetailsResponse, + ListPatchJobsRequest, + ListPatchJobsResponse, + PatchConfig, + PatchInstanceFilter, + PatchJob, + PatchJobInstanceDetails, + PatchRollout, + WindowsUpdateSettings, + YumSettings, + ZypperSettings, +) +from google.cloud.osconfig_v1.types.vulnerability import ( + CVSSv3, + GetVulnerabilityReportRequest, + ListVulnerabilityReportsRequest, + ListVulnerabilityReportsResponse, + VulnerabilityReport, +) + +__all__ = ( + "OsConfigServiceClient", + "OsConfigServiceAsyncClient", + "OsConfigZonalServiceClient", + "OsConfigZonalServiceAsyncClient", + "GetInventoryRequest", + "Inventory", + "ListInventoriesRequest", + "ListInventoriesResponse", + "InventoryView", + "OSPolicy", + "GetOSPolicyAssignmentReportRequest", + "ListOSPolicyAssignmentReportsRequest", + "ListOSPolicyAssignmentReportsResponse", + "OSPolicyAssignmentReport", + "CreateOSPolicyAssignmentRequest", + "DeleteOSPolicyAssignmentRequest", + "GetOSPolicyAssignmentRequest", + "ListOSPolicyAssignmentRevisionsRequest", + "ListOSPolicyAssignmentRevisionsResponse", + "ListOSPolicyAssignmentsRequest", + "ListOSPolicyAssignmentsResponse", + "OSPolicyAssignment", + "OSPolicyAssignmentOperationMetadata", + "UpdateOSPolicyAssignmentRequest", + "FixedOrPercent", + "CreatePatchDeploymentRequest", + "DeletePatchDeploymentRequest", + "GetPatchDeploymentRequest", + "ListPatchDeploymentsRequest", + "ListPatchDeploymentsResponse", + "MonthlySchedule", + "OneTimeSchedule", + "PatchDeployment", + "PausePatchDeploymentRequest", + "RecurringSchedule", + "ResumePatchDeploymentRequest", + "UpdatePatchDeploymentRequest", + "WeekDayOfMonth", + "WeeklySchedule", + "AptSettings", + "CancelPatchJobRequest", + "ExecStep", + "ExecStepConfig", + "ExecutePatchJobRequest", + "GcsObject", + "GetPatchJobRequest", + "GooSettings", + "Instance", + "ListPatchJobInstanceDetailsRequest", + "ListPatchJobInstanceDetailsResponse", + "ListPatchJobsRequest", + "ListPatchJobsResponse", + "PatchConfig", + "PatchInstanceFilter", + "PatchJob", + "PatchJobInstanceDetails", + "PatchRollout", + "WindowsUpdateSettings", + "YumSettings", + "ZypperSettings", + "CVSSv3", + "GetVulnerabilityReportRequest", + "ListVulnerabilityReportsRequest", + "ListVulnerabilityReportsResponse", + "VulnerabilityReport", +) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig/gapic_version.py b/packages/google-cloud-os-config/google/cloud/osconfig/gapic_version.py new file mode 100644 index 000000000000..45be81c3fdee --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.15.2" # {x-release-please-version} diff --git a/packages/google-cloud-os-config/google/cloud/osconfig/py.typed b/packages/google-cloud-os-config/google/cloud/osconfig/py.typed new file mode 100644 index 000000000000..ebf4fbd316fb --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-os-config package uses inline types. diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/__init__.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/__init__.py new file mode 100644 index 000000000000..01b7f580a120 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/__init__.py @@ -0,0 +1,169 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.osconfig_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.os_config_service import ( + OsConfigServiceAsyncClient, + OsConfigServiceClient, +) +from .services.os_config_zonal_service import ( + OsConfigZonalServiceAsyncClient, + OsConfigZonalServiceClient, +) +from .types.inventory import ( + GetInventoryRequest, + Inventory, + InventoryView, + ListInventoriesRequest, + ListInventoriesResponse, +) +from .types.os_policy import OSPolicy +from .types.os_policy_assignment_reports import ( + GetOSPolicyAssignmentReportRequest, + ListOSPolicyAssignmentReportsRequest, + ListOSPolicyAssignmentReportsResponse, + OSPolicyAssignmentReport, +) +from .types.os_policy_assignments import ( + CreateOSPolicyAssignmentRequest, + DeleteOSPolicyAssignmentRequest, + GetOSPolicyAssignmentRequest, + ListOSPolicyAssignmentRevisionsRequest, + ListOSPolicyAssignmentRevisionsResponse, + ListOSPolicyAssignmentsRequest, + ListOSPolicyAssignmentsResponse, + OSPolicyAssignment, + OSPolicyAssignmentOperationMetadata, + UpdateOSPolicyAssignmentRequest, +) +from .types.osconfig_common import FixedOrPercent +from .types.patch_deployments import ( + CreatePatchDeploymentRequest, + DeletePatchDeploymentRequest, + GetPatchDeploymentRequest, + ListPatchDeploymentsRequest, + ListPatchDeploymentsResponse, + MonthlySchedule, + OneTimeSchedule, + PatchDeployment, + PausePatchDeploymentRequest, + RecurringSchedule, + ResumePatchDeploymentRequest, + UpdatePatchDeploymentRequest, + WeekDayOfMonth, + WeeklySchedule, +) +from .types.patch_jobs import ( + AptSettings, + CancelPatchJobRequest, + ExecStep, + ExecStepConfig, + ExecutePatchJobRequest, + GcsObject, + GetPatchJobRequest, + GooSettings, + Instance, + ListPatchJobInstanceDetailsRequest, + ListPatchJobInstanceDetailsResponse, + ListPatchJobsRequest, + ListPatchJobsResponse, + PatchConfig, + PatchInstanceFilter, + PatchJob, + PatchJobInstanceDetails, + PatchRollout, + WindowsUpdateSettings, + YumSettings, + ZypperSettings, +) +from .types.vulnerability import ( + CVSSv3, + GetVulnerabilityReportRequest, + ListVulnerabilityReportsRequest, + ListVulnerabilityReportsResponse, + VulnerabilityReport, +) + +__all__ = ( + "OsConfigServiceAsyncClient", + "OsConfigZonalServiceAsyncClient", + "AptSettings", + "CVSSv3", + "CancelPatchJobRequest", + "CreateOSPolicyAssignmentRequest", + "CreatePatchDeploymentRequest", + "DeleteOSPolicyAssignmentRequest", + "DeletePatchDeploymentRequest", + "ExecStep", + "ExecStepConfig", + "ExecutePatchJobRequest", + "FixedOrPercent", + "GcsObject", + "GetInventoryRequest", + "GetOSPolicyAssignmentReportRequest", + "GetOSPolicyAssignmentRequest", + "GetPatchDeploymentRequest", + "GetPatchJobRequest", + "GetVulnerabilityReportRequest", + "GooSettings", + "Instance", + "Inventory", + "InventoryView", + "ListInventoriesRequest", + "ListInventoriesResponse", + "ListOSPolicyAssignmentReportsRequest", + "ListOSPolicyAssignmentReportsResponse", + "ListOSPolicyAssignmentRevisionsRequest", + "ListOSPolicyAssignmentRevisionsResponse", + "ListOSPolicyAssignmentsRequest", + "ListOSPolicyAssignmentsResponse", + "ListPatchDeploymentsRequest", + "ListPatchDeploymentsResponse", + "ListPatchJobInstanceDetailsRequest", + "ListPatchJobInstanceDetailsResponse", + "ListPatchJobsRequest", + "ListPatchJobsResponse", + "ListVulnerabilityReportsRequest", + "ListVulnerabilityReportsResponse", + "MonthlySchedule", + "OSPolicy", + "OSPolicyAssignment", + "OSPolicyAssignmentOperationMetadata", + "OSPolicyAssignmentReport", + "OneTimeSchedule", + "OsConfigServiceClient", + "OsConfigZonalServiceClient", + "PatchConfig", + "PatchDeployment", + "PatchInstanceFilter", + "PatchJob", + "PatchJobInstanceDetails", + "PatchRollout", + "PausePatchDeploymentRequest", + "RecurringSchedule", + "ResumePatchDeploymentRequest", + "UpdateOSPolicyAssignmentRequest", + "UpdatePatchDeploymentRequest", + "VulnerabilityReport", + "WeekDayOfMonth", + "WeeklySchedule", + "WindowsUpdateSettings", + "YumSettings", + "ZypperSettings", +) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/gapic_metadata.json b/packages/google-cloud-os-config/google/cloud/osconfig_v1/gapic_metadata.json new file mode 100644 index 000000000000..b4916a8658ce --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/gapic_metadata.json @@ -0,0 +1,407 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.osconfig_v1", + "protoPackage": "google.cloud.osconfig.v1", + "schema": "1.0", + "services": { + "OsConfigService": { + "clients": { + "grpc": { + "libraryClient": "OsConfigServiceClient", + "rpcs": { + "CancelPatchJob": { + "methods": [ + "cancel_patch_job" + ] + }, + "CreatePatchDeployment": { + "methods": [ + "create_patch_deployment" + ] + }, + "DeletePatchDeployment": { + "methods": [ + "delete_patch_deployment" + ] + }, + "ExecutePatchJob": { + "methods": [ + "execute_patch_job" + ] + }, + "GetPatchDeployment": { + "methods": [ + "get_patch_deployment" + ] + }, + "GetPatchJob": { + "methods": [ + "get_patch_job" + ] + }, + "ListPatchDeployments": { + "methods": [ + "list_patch_deployments" + ] + }, + "ListPatchJobInstanceDetails": { + "methods": [ + "list_patch_job_instance_details" + ] + }, + "ListPatchJobs": { + "methods": [ + "list_patch_jobs" + ] + }, + "PausePatchDeployment": { + "methods": [ + "pause_patch_deployment" + ] + }, + "ResumePatchDeployment": { + "methods": [ + "resume_patch_deployment" + ] + }, + "UpdatePatchDeployment": { + "methods": [ + "update_patch_deployment" + ] + } + } + }, + "grpc-async": { + "libraryClient": "OsConfigServiceAsyncClient", + "rpcs": { + "CancelPatchJob": { + "methods": [ + "cancel_patch_job" + ] + }, + "CreatePatchDeployment": { + "methods": [ + "create_patch_deployment" + ] + }, + "DeletePatchDeployment": { + "methods": [ + "delete_patch_deployment" + ] + }, + "ExecutePatchJob": { + "methods": [ + "execute_patch_job" + ] + }, + "GetPatchDeployment": { + "methods": [ + "get_patch_deployment" + ] + }, + "GetPatchJob": { + "methods": [ + "get_patch_job" + ] + }, + "ListPatchDeployments": { + "methods": [ + "list_patch_deployments" + ] + }, + "ListPatchJobInstanceDetails": { + "methods": [ + "list_patch_job_instance_details" + ] + }, + "ListPatchJobs": { + "methods": [ + "list_patch_jobs" + ] + }, + "PausePatchDeployment": { + "methods": [ + "pause_patch_deployment" + ] + }, + "ResumePatchDeployment": { + "methods": [ + "resume_patch_deployment" + ] + }, + "UpdatePatchDeployment": { + "methods": [ + "update_patch_deployment" + ] + } + } + }, + "rest": { + "libraryClient": "OsConfigServiceClient", + "rpcs": { + "CancelPatchJob": { + "methods": [ + "cancel_patch_job" + ] + }, + "CreatePatchDeployment": { + "methods": [ + "create_patch_deployment" + ] + }, + "DeletePatchDeployment": { + "methods": [ + "delete_patch_deployment" + ] + }, + "ExecutePatchJob": { + "methods": [ + "execute_patch_job" + ] + }, + "GetPatchDeployment": { + "methods": [ + "get_patch_deployment" + ] + }, + "GetPatchJob": { + "methods": [ + "get_patch_job" + ] + }, + "ListPatchDeployments": { + "methods": [ + "list_patch_deployments" + ] + }, + "ListPatchJobInstanceDetails": { + "methods": [ + "list_patch_job_instance_details" + ] + }, + "ListPatchJobs": { + "methods": [ + "list_patch_jobs" + ] + }, + "PausePatchDeployment": { + "methods": [ + "pause_patch_deployment" + ] + }, + "ResumePatchDeployment": { + "methods": [ + "resume_patch_deployment" + ] + }, + "UpdatePatchDeployment": { + "methods": [ + "update_patch_deployment" + ] + } + } + } + } + }, + "OsConfigZonalService": { + "clients": { + "grpc": { + "libraryClient": "OsConfigZonalServiceClient", + "rpcs": { + "CreateOSPolicyAssignment": { + "methods": [ + "create_os_policy_assignment" + ] + }, + "DeleteOSPolicyAssignment": { + "methods": [ + "delete_os_policy_assignment" + ] + }, + "GetInventory": { + "methods": [ + "get_inventory" + ] + }, + "GetOSPolicyAssignment": { + "methods": [ + "get_os_policy_assignment" + ] + }, + "GetOSPolicyAssignmentReport": { + "methods": [ + "get_os_policy_assignment_report" + ] + }, + "GetVulnerabilityReport": { + "methods": [ + "get_vulnerability_report" + ] + }, + "ListInventories": { + "methods": [ + "list_inventories" + ] + }, + "ListOSPolicyAssignmentReports": { + "methods": [ + "list_os_policy_assignment_reports" + ] + }, + "ListOSPolicyAssignmentRevisions": { + "methods": [ + "list_os_policy_assignment_revisions" + ] + }, + "ListOSPolicyAssignments": { + "methods": [ + "list_os_policy_assignments" + ] + }, + "ListVulnerabilityReports": { + "methods": [ + "list_vulnerability_reports" + ] + }, + "UpdateOSPolicyAssignment": { + "methods": [ + "update_os_policy_assignment" + ] + } + } + }, + "grpc-async": { + "libraryClient": "OsConfigZonalServiceAsyncClient", + "rpcs": { + "CreateOSPolicyAssignment": { + "methods": [ + "create_os_policy_assignment" + ] + }, + "DeleteOSPolicyAssignment": { + "methods": [ + "delete_os_policy_assignment" + ] + }, + "GetInventory": { + "methods": [ + "get_inventory" + ] + }, + "GetOSPolicyAssignment": { + "methods": [ + "get_os_policy_assignment" + ] + }, + "GetOSPolicyAssignmentReport": { + "methods": [ + "get_os_policy_assignment_report" + ] + }, + "GetVulnerabilityReport": { + "methods": [ + "get_vulnerability_report" + ] + }, + "ListInventories": { + "methods": [ + "list_inventories" + ] + }, + "ListOSPolicyAssignmentReports": { + "methods": [ + "list_os_policy_assignment_reports" + ] + }, + "ListOSPolicyAssignmentRevisions": { + "methods": [ + "list_os_policy_assignment_revisions" + ] + }, + "ListOSPolicyAssignments": { + "methods": [ + "list_os_policy_assignments" + ] + }, + "ListVulnerabilityReports": { + "methods": [ + "list_vulnerability_reports" + ] + }, + "UpdateOSPolicyAssignment": { + "methods": [ + "update_os_policy_assignment" + ] + } + } + }, + "rest": { + "libraryClient": "OsConfigZonalServiceClient", + "rpcs": { + "CreateOSPolicyAssignment": { + "methods": [ + "create_os_policy_assignment" + ] + }, + "DeleteOSPolicyAssignment": { + "methods": [ + "delete_os_policy_assignment" + ] + }, + "GetInventory": { + "methods": [ + "get_inventory" + ] + }, + "GetOSPolicyAssignment": { + "methods": [ + "get_os_policy_assignment" + ] + }, + "GetOSPolicyAssignmentReport": { + "methods": [ + "get_os_policy_assignment_report" + ] + }, + "GetVulnerabilityReport": { + "methods": [ + "get_vulnerability_report" + ] + }, + "ListInventories": { + "methods": [ + "list_inventories" + ] + }, + "ListOSPolicyAssignmentReports": { + "methods": [ + "list_os_policy_assignment_reports" + ] + }, + "ListOSPolicyAssignmentRevisions": { + "methods": [ + "list_os_policy_assignment_revisions" + ] + }, + "ListOSPolicyAssignments": { + "methods": [ + "list_os_policy_assignments" + ] + }, + "ListVulnerabilityReports": { + "methods": [ + "list_vulnerability_reports" + ] + }, + "UpdateOSPolicyAssignment": { + "methods": [ + "update_os_policy_assignment" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/gapic_version.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/gapic_version.py new file mode 100644 index 000000000000..45be81c3fdee --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.15.2" # {x-release-please-version} diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/py.typed b/packages/google-cloud-os-config/google/cloud/osconfig_v1/py.typed new file mode 100644 index 000000000000..ebf4fbd316fb --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-os-config package uses inline types. diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/__init__.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/__init__.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/__init__.py new file mode 100644 index 000000000000..de8dfc0bd197 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import OsConfigServiceAsyncClient +from .client import OsConfigServiceClient + +__all__ = ( + "OsConfigServiceClient", + "OsConfigServiceAsyncClient", +) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/async_client.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/async_client.py new file mode 100644 index 000000000000..8c1f6a16fa77 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/async_client.py @@ -0,0 +1,1557 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.osconfig_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.osconfig_v1.services.os_config_service import pagers +from google.cloud.osconfig_v1.types import patch_deployments, patch_jobs + +from .client import OsConfigServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, OsConfigServiceTransport +from .transports.grpc_asyncio import OsConfigServiceGrpcAsyncIOTransport + + +class OsConfigServiceAsyncClient: + """OS Config API + + The OS Config service is a server-side component that you can + use to manage package installations and patch jobs for virtual + machine instances. + """ + + _client: OsConfigServiceClient + + DEFAULT_ENDPOINT = OsConfigServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = OsConfigServiceClient.DEFAULT_MTLS_ENDPOINT + + instance_path = staticmethod(OsConfigServiceClient.instance_path) + parse_instance_path = staticmethod(OsConfigServiceClient.parse_instance_path) + patch_deployment_path = staticmethod(OsConfigServiceClient.patch_deployment_path) + parse_patch_deployment_path = staticmethod( + OsConfigServiceClient.parse_patch_deployment_path + ) + patch_job_path = staticmethod(OsConfigServiceClient.patch_job_path) + parse_patch_job_path = staticmethod(OsConfigServiceClient.parse_patch_job_path) + common_billing_account_path = staticmethod( + OsConfigServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + OsConfigServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(OsConfigServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + OsConfigServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + OsConfigServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + OsConfigServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(OsConfigServiceClient.common_project_path) + parse_common_project_path = staticmethod( + OsConfigServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(OsConfigServiceClient.common_location_path) + parse_common_location_path = staticmethod( + OsConfigServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OsConfigServiceAsyncClient: The constructed client. + """ + return OsConfigServiceClient.from_service_account_info.__func__(OsConfigServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OsConfigServiceAsyncClient: The constructed client. + """ + return OsConfigServiceClient.from_service_account_file.__func__(OsConfigServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return OsConfigServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> OsConfigServiceTransport: + """Returns the transport used by the client instance. + + Returns: + OsConfigServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(OsConfigServiceClient).get_transport_class, type(OsConfigServiceClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, OsConfigServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the os config service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.OsConfigServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = OsConfigServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def execute_patch_job( + self, + request: Optional[Union[patch_jobs.ExecutePatchJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_jobs.PatchJob: + r"""Patch VM instances by creating and running a patch + job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_execute_patch_job(): + # Create a client + client = osconfig_v1.OsConfigServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.ExecutePatchJobRequest( + parent="parent_value", + ) + + # Make the request + response = await client.execute_patch_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.ExecutePatchJobRequest, dict]]): + The request object. A request message to initiate + patching across Compute Engine + instances. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchJob: + A high level representation of a patch job that is either in progress + or has completed. + + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. + + For more information about patch jobs, see [Creating + patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/create-patch-job). + + """ + # Create or coerce a protobuf request object. + request = patch_jobs.ExecutePatchJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.execute_patch_job, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_patch_job( + self, + request: Optional[Union[patch_jobs.GetPatchJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_jobs.PatchJob: + r"""Get the patch job. This can be used to track the + progress of an ongoing patch job or review the details + of completed jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_get_patch_job(): + # Create a client + client = osconfig_v1.OsConfigServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.GetPatchJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_patch_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.GetPatchJobRequest, dict]]): + The request object. Request to get an active or completed + patch job. + name (:class:`str`): + Required. Name of the patch in the form + ``projects/*/patchJobs/*`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchJob: + A high level representation of a patch job that is either in progress + or has completed. + + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. + + For more information about patch jobs, see [Creating + patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/create-patch-job). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = patch_jobs.GetPatchJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_patch_job, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_patch_job( + self, + request: Optional[Union[patch_jobs.CancelPatchJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_jobs.PatchJob: + r"""Cancel a patch job. The patch job must be active. + Canceled patch jobs cannot be restarted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_cancel_patch_job(): + # Create a client + client = osconfig_v1.OsConfigServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.CancelPatchJobRequest( + name="name_value", + ) + + # Make the request + response = await client.cancel_patch_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.CancelPatchJobRequest, dict]]): + The request object. Message for canceling a patch job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchJob: + A high level representation of a patch job that is either in progress + or has completed. + + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. + + For more information about patch jobs, see [Creating + patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/create-patch-job). + + """ + # Create or coerce a protobuf request object. + request = patch_jobs.CancelPatchJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_patch_job, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_patch_jobs( + self, + request: Optional[Union[patch_jobs.ListPatchJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPatchJobsAsyncPager: + r"""Get a list of patch jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_list_patch_jobs(): + # Create a client + client = osconfig_v1.OsConfigServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.ListPatchJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_patch_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.ListPatchJobsRequest, dict]]): + The request object. A request message for listing patch + jobs. + parent (:class:`str`): + Required. In the form of ``projects/*`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_service.pagers.ListPatchJobsAsyncPager: + A response message for listing patch + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = patch_jobs.ListPatchJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_patch_jobs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPatchJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_patch_job_instance_details( + self, + request: Optional[ + Union[patch_jobs.ListPatchJobInstanceDetailsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPatchJobInstanceDetailsAsyncPager: + r"""Get a list of instance details for a given patch job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_list_patch_job_instance_details(): + # Create a client + client = osconfig_v1.OsConfigServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.ListPatchJobInstanceDetailsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_patch_job_instance_details(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsRequest, dict]]): + The request object. Request to list details for all + instances that are part of a patch job. + parent (:class:`str`): + Required. The parent for the instances are in the form + of ``projects/*/patchJobs/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_service.pagers.ListPatchJobInstanceDetailsAsyncPager: + A response message for listing the + instances details for a patch job. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = patch_jobs.ListPatchJobInstanceDetailsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_patch_job_instance_details, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPatchJobInstanceDetailsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_patch_deployment( + self, + request: Optional[ + Union[patch_deployments.CreatePatchDeploymentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + patch_deployment: Optional[patch_deployments.PatchDeployment] = None, + patch_deployment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Create an OS Config patch deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_create_patch_deployment(): + # Create a client + client = osconfig_v1.OsConfigServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.CreatePatchDeploymentRequest( + parent="parent_value", + patch_deployment_id="patch_deployment_id_value", + ) + + # Make the request + response = await client.create_patch_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.CreatePatchDeploymentRequest, dict]]): + The request object. A request message for creating a + patch deployment. + parent (:class:`str`): + Required. The project to apply this patch deployment to + in the form ``projects/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + patch_deployment (:class:`google.cloud.osconfig_v1.types.PatchDeployment`): + Required. The patch deployment to + create. + + This corresponds to the ``patch_deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + patch_deployment_id (:class:`str`): + Required. A name for the patch deployment in the + project. When creating a name the following rules apply: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the project. + + This corresponds to the ``patch_deployment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchDeployment: + Patch deployments are configurations that individual patch jobs use to + complete a patch. These configurations include + instance filter, package repository settings, and a + schedule. For more information about creating and + managing patch deployments, see [Scheduling patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, patch_deployment, patch_deployment_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = patch_deployments.CreatePatchDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if patch_deployment is not None: + request.patch_deployment = patch_deployment + if patch_deployment_id is not None: + request.patch_deployment_id = patch_deployment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_patch_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_patch_deployment( + self, + request: Optional[ + Union[patch_deployments.GetPatchDeploymentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Get an OS Config patch deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_get_patch_deployment(): + # Create a client + client = osconfig_v1.OsConfigServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.GetPatchDeploymentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_patch_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.GetPatchDeploymentRequest, dict]]): + The request object. A request message for retrieving a + patch deployment. + name (:class:`str`): + Required. The resource name of the patch deployment in + the form ``projects/*/patchDeployments/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchDeployment: + Patch deployments are configurations that individual patch jobs use to + complete a patch. These configurations include + instance filter, package repository settings, and a + schedule. For more information about creating and + managing patch deployments, see [Scheduling patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = patch_deployments.GetPatchDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_patch_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_patch_deployments( + self, + request: Optional[ + Union[patch_deployments.ListPatchDeploymentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPatchDeploymentsAsyncPager: + r"""Get a page of OS Config patch deployments. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_list_patch_deployments(): + # Create a client + client = osconfig_v1.OsConfigServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.ListPatchDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_patch_deployments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.ListPatchDeploymentsRequest, dict]]): + The request object. A request message for listing patch + deployments. + parent (:class:`str`): + Required. The resource name of the parent in the form + ``projects/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_service.pagers.ListPatchDeploymentsAsyncPager: + A response message for listing patch + deployments. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = patch_deployments.ListPatchDeploymentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_patch_deployments, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPatchDeploymentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_patch_deployment( + self, + request: Optional[ + Union[patch_deployments.DeletePatchDeploymentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete an OS Config patch deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_delete_patch_deployment(): + # Create a client + client = osconfig_v1.OsConfigServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.DeletePatchDeploymentRequest( + name="name_value", + ) + + # Make the request + await client.delete_patch_deployment(request=request) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.DeletePatchDeploymentRequest, dict]]): + The request object. A request message for deleting a + patch deployment. + name (:class:`str`): + Required. The resource name of the patch deployment in + the form ``projects/*/patchDeployments/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = patch_deployments.DeletePatchDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_patch_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_patch_deployment( + self, + request: Optional[ + Union[patch_deployments.UpdatePatchDeploymentRequest, dict] + ] = None, + *, + patch_deployment: Optional[patch_deployments.PatchDeployment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Update an OS Config patch deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_update_patch_deployment(): + # Create a client + client = osconfig_v1.OsConfigServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.UpdatePatchDeploymentRequest( + ) + + # Make the request + response = await client.update_patch_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.UpdatePatchDeploymentRequest, dict]]): + The request object. A request message for updating a + patch deployment. + patch_deployment (:class:`google.cloud.osconfig_v1.types.PatchDeployment`): + Required. The patch deployment to + Update. + + This corresponds to the ``patch_deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask that controls + which fields of the patch deployment + should be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchDeployment: + Patch deployments are configurations that individual patch jobs use to + complete a patch. These configurations include + instance filter, package repository settings, and a + schedule. For more information about creating and + managing patch deployments, see [Scheduling patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([patch_deployment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = patch_deployments.UpdatePatchDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if patch_deployment is not None: + request.patch_deployment = patch_deployment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_patch_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("patch_deployment.name", request.patch_deployment.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def pause_patch_deployment( + self, + request: Optional[ + Union[patch_deployments.PausePatchDeploymentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Change state of patch deployment to "PAUSED". + Patch deployment in paused state doesn't generate patch + jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_pause_patch_deployment(): + # Create a client + client = osconfig_v1.OsConfigServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.PausePatchDeploymentRequest( + name="name_value", + ) + + # Make the request + response = await client.pause_patch_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.PausePatchDeploymentRequest, dict]]): + The request object. A request message for pausing a patch + deployment. + name (:class:`str`): + Required. The resource name of the patch deployment in + the form ``projects/*/patchDeployments/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchDeployment: + Patch deployments are configurations that individual patch jobs use to + complete a patch. These configurations include + instance filter, package repository settings, and a + schedule. For more information about creating and + managing patch deployments, see [Scheduling patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = patch_deployments.PausePatchDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pause_patch_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def resume_patch_deployment( + self, + request: Optional[ + Union[patch_deployments.ResumePatchDeploymentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Change state of patch deployment back to "ACTIVE". + Patch deployment in active state continues to generate + patch jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_resume_patch_deployment(): + # Create a client + client = osconfig_v1.OsConfigServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.ResumePatchDeploymentRequest( + name="name_value", + ) + + # Make the request + response = await client.resume_patch_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.ResumePatchDeploymentRequest, dict]]): + The request object. A request message for resuming a + patch deployment. + name (:class:`str`): + Required. The resource name of the patch deployment in + the form ``projects/*/patchDeployments/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchDeployment: + Patch deployments are configurations that individual patch jobs use to + complete a patch. These configurations include + instance filter, package repository settings, and a + schedule. For more information about creating and + managing patch deployments, see [Scheduling patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = patch_deployments.ResumePatchDeploymentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_patch_deployment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "OsConfigServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("OsConfigServiceAsyncClient",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/client.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/client.py new file mode 100644 index 000000000000..8dd31e20bd91 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/client.py @@ -0,0 +1,1828 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.osconfig_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.osconfig_v1.services.os_config_service import pagers +from google.cloud.osconfig_v1.types import patch_deployments, patch_jobs + +from .transports.base import DEFAULT_CLIENT_INFO, OsConfigServiceTransport +from .transports.grpc import OsConfigServiceGrpcTransport +from .transports.grpc_asyncio import OsConfigServiceGrpcAsyncIOTransport +from .transports.rest import OsConfigServiceRestTransport + + +class OsConfigServiceClientMeta(type): + """Metaclass for the OsConfigService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[OsConfigServiceTransport]] + _transport_registry["grpc"] = OsConfigServiceGrpcTransport + _transport_registry["grpc_asyncio"] = OsConfigServiceGrpcAsyncIOTransport + _transport_registry["rest"] = OsConfigServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[OsConfigServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class OsConfigServiceClient(metaclass=OsConfigServiceClientMeta): + """OS Config API + + The OS Config service is a server-side component that you can + use to manage package installations and patch jobs for virtual + machine instances. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "osconfig.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OsConfigServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OsConfigServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> OsConfigServiceTransport: + """Returns the transport used by the client instance. + + Returns: + OsConfigServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def instance_path( + project: str, + zone: str, + instance: str, + ) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/zones/{zone}/instances/{instance}".format( + project=project, + zone=zone, + instance=instance, + ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str, str]: + """Parses a instance path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/zones/(?P.+?)/instances/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def patch_deployment_path( + project: str, + patch_deployment: str, + ) -> str: + """Returns a fully-qualified patch_deployment string.""" + return "projects/{project}/patchDeployments/{patch_deployment}".format( + project=project, + patch_deployment=patch_deployment, + ) + + @staticmethod + def parse_patch_deployment_path(path: str) -> Dict[str, str]: + """Parses a patch_deployment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/patchDeployments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def patch_job_path( + project: str, + patch_job: str, + ) -> str: + """Returns a fully-qualified patch_job string.""" + return "projects/{project}/patchJobs/{patch_job}".format( + project=project, + patch_job=patch_job, + ) + + @staticmethod + def parse_patch_job_path(path: str) -> Dict[str, str]: + """Parses a patch_job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/patchJobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, OsConfigServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the os config service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, OsConfigServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, OsConfigServiceTransport): + # transport is a OsConfigServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def execute_patch_job( + self, + request: Optional[Union[patch_jobs.ExecutePatchJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_jobs.PatchJob: + r"""Patch VM instances by creating and running a patch + job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_execute_patch_job(): + # Create a client + client = osconfig_v1.OsConfigServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.ExecutePatchJobRequest( + parent="parent_value", + ) + + # Make the request + response = client.execute_patch_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.ExecutePatchJobRequest, dict]): + The request object. A request message to initiate + patching across Compute Engine + instances. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchJob: + A high level representation of a patch job that is either in progress + or has completed. + + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. + + For more information about patch jobs, see [Creating + patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/create-patch-job). + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a patch_jobs.ExecutePatchJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, patch_jobs.ExecutePatchJobRequest): + request = patch_jobs.ExecutePatchJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_patch_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_patch_job( + self, + request: Optional[Union[patch_jobs.GetPatchJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_jobs.PatchJob: + r"""Get the patch job. This can be used to track the + progress of an ongoing patch job or review the details + of completed jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_get_patch_job(): + # Create a client + client = osconfig_v1.OsConfigServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.GetPatchJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_patch_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.GetPatchJobRequest, dict]): + The request object. Request to get an active or completed + patch job. + name (str): + Required. Name of the patch in the form + ``projects/*/patchJobs/*`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchJob: + A high level representation of a patch job that is either in progress + or has completed. + + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. + + For more information about patch jobs, see [Creating + patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/create-patch-job). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a patch_jobs.GetPatchJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, patch_jobs.GetPatchJobRequest): + request = patch_jobs.GetPatchJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_patch_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_patch_job( + self, + request: Optional[Union[patch_jobs.CancelPatchJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_jobs.PatchJob: + r"""Cancel a patch job. The patch job must be active. + Canceled patch jobs cannot be restarted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_cancel_patch_job(): + # Create a client + client = osconfig_v1.OsConfigServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.CancelPatchJobRequest( + name="name_value", + ) + + # Make the request + response = client.cancel_patch_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.CancelPatchJobRequest, dict]): + The request object. Message for canceling a patch job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchJob: + A high level representation of a patch job that is either in progress + or has completed. + + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. + + For more information about patch jobs, see [Creating + patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/create-patch-job). + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a patch_jobs.CancelPatchJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, patch_jobs.CancelPatchJobRequest): + request = patch_jobs.CancelPatchJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_patch_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_patch_jobs( + self, + request: Optional[Union[patch_jobs.ListPatchJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPatchJobsPager: + r"""Get a list of patch jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_list_patch_jobs(): + # Create a client + client = osconfig_v1.OsConfigServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.ListPatchJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_patch_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.ListPatchJobsRequest, dict]): + The request object. A request message for listing patch + jobs. + parent (str): + Required. In the form of ``projects/*`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_service.pagers.ListPatchJobsPager: + A response message for listing patch + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a patch_jobs.ListPatchJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, patch_jobs.ListPatchJobsRequest): + request = patch_jobs.ListPatchJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_patch_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPatchJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_patch_job_instance_details( + self, + request: Optional[ + Union[patch_jobs.ListPatchJobInstanceDetailsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPatchJobInstanceDetailsPager: + r"""Get a list of instance details for a given patch job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_list_patch_job_instance_details(): + # Create a client + client = osconfig_v1.OsConfigServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.ListPatchJobInstanceDetailsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_patch_job_instance_details(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsRequest, dict]): + The request object. Request to list details for all + instances that are part of a patch job. + parent (str): + Required. The parent for the instances are in the form + of ``projects/*/patchJobs/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_service.pagers.ListPatchJobInstanceDetailsPager: + A response message for listing the + instances details for a patch job. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a patch_jobs.ListPatchJobInstanceDetailsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, patch_jobs.ListPatchJobInstanceDetailsRequest): + request = patch_jobs.ListPatchJobInstanceDetailsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_patch_job_instance_details + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPatchJobInstanceDetailsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_patch_deployment( + self, + request: Optional[ + Union[patch_deployments.CreatePatchDeploymentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + patch_deployment: Optional[patch_deployments.PatchDeployment] = None, + patch_deployment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Create an OS Config patch deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_create_patch_deployment(): + # Create a client + client = osconfig_v1.OsConfigServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.CreatePatchDeploymentRequest( + parent="parent_value", + patch_deployment_id="patch_deployment_id_value", + ) + + # Make the request + response = client.create_patch_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.CreatePatchDeploymentRequest, dict]): + The request object. A request message for creating a + patch deployment. + parent (str): + Required. The project to apply this patch deployment to + in the form ``projects/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + patch_deployment (google.cloud.osconfig_v1.types.PatchDeployment): + Required. The patch deployment to + create. + + This corresponds to the ``patch_deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + patch_deployment_id (str): + Required. A name for the patch deployment in the + project. When creating a name the following rules apply: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the project. + + This corresponds to the ``patch_deployment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchDeployment: + Patch deployments are configurations that individual patch jobs use to + complete a patch. These configurations include + instance filter, package repository settings, and a + schedule. For more information about creating and + managing patch deployments, see [Scheduling patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, patch_deployment, patch_deployment_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a patch_deployments.CreatePatchDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, patch_deployments.CreatePatchDeploymentRequest): + request = patch_deployments.CreatePatchDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if patch_deployment is not None: + request.patch_deployment = patch_deployment + if patch_deployment_id is not None: + request.patch_deployment_id = patch_deployment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_patch_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_patch_deployment( + self, + request: Optional[ + Union[patch_deployments.GetPatchDeploymentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Get an OS Config patch deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_get_patch_deployment(): + # Create a client + client = osconfig_v1.OsConfigServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.GetPatchDeploymentRequest( + name="name_value", + ) + + # Make the request + response = client.get_patch_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.GetPatchDeploymentRequest, dict]): + The request object. A request message for retrieving a + patch deployment. + name (str): + Required. The resource name of the patch deployment in + the form ``projects/*/patchDeployments/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchDeployment: + Patch deployments are configurations that individual patch jobs use to + complete a patch. These configurations include + instance filter, package repository settings, and a + schedule. For more information about creating and + managing patch deployments, see [Scheduling patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a patch_deployments.GetPatchDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, patch_deployments.GetPatchDeploymentRequest): + request = patch_deployments.GetPatchDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_patch_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_patch_deployments( + self, + request: Optional[ + Union[patch_deployments.ListPatchDeploymentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPatchDeploymentsPager: + r"""Get a page of OS Config patch deployments. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_list_patch_deployments(): + # Create a client + client = osconfig_v1.OsConfigServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.ListPatchDeploymentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_patch_deployments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.ListPatchDeploymentsRequest, dict]): + The request object. A request message for listing patch + deployments. + parent (str): + Required. The resource name of the parent in the form + ``projects/*``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_service.pagers.ListPatchDeploymentsPager: + A response message for listing patch + deployments. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a patch_deployments.ListPatchDeploymentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, patch_deployments.ListPatchDeploymentsRequest): + request = patch_deployments.ListPatchDeploymentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_patch_deployments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPatchDeploymentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_patch_deployment( + self, + request: Optional[ + Union[patch_deployments.DeletePatchDeploymentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete an OS Config patch deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_delete_patch_deployment(): + # Create a client + client = osconfig_v1.OsConfigServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.DeletePatchDeploymentRequest( + name="name_value", + ) + + # Make the request + client.delete_patch_deployment(request=request) + + Args: + request (Union[google.cloud.osconfig_v1.types.DeletePatchDeploymentRequest, dict]): + The request object. A request message for deleting a + patch deployment. + name (str): + Required. The resource name of the patch deployment in + the form ``projects/*/patchDeployments/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a patch_deployments.DeletePatchDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, patch_deployments.DeletePatchDeploymentRequest): + request = patch_deployments.DeletePatchDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_patch_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_patch_deployment( + self, + request: Optional[ + Union[patch_deployments.UpdatePatchDeploymentRequest, dict] + ] = None, + *, + patch_deployment: Optional[patch_deployments.PatchDeployment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Update an OS Config patch deployment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_update_patch_deployment(): + # Create a client + client = osconfig_v1.OsConfigServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.UpdatePatchDeploymentRequest( + ) + + # Make the request + response = client.update_patch_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.UpdatePatchDeploymentRequest, dict]): + The request object. A request message for updating a + patch deployment. + patch_deployment (google.cloud.osconfig_v1.types.PatchDeployment): + Required. The patch deployment to + Update. + + This corresponds to the ``patch_deployment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that controls + which fields of the patch deployment + should be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchDeployment: + Patch deployments are configurations that individual patch jobs use to + complete a patch. These configurations include + instance filter, package repository settings, and a + schedule. For more information about creating and + managing patch deployments, see [Scheduling patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([patch_deployment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a patch_deployments.UpdatePatchDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, patch_deployments.UpdatePatchDeploymentRequest): + request = patch_deployments.UpdatePatchDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if patch_deployment is not None: + request.patch_deployment = patch_deployment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_patch_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("patch_deployment.name", request.patch_deployment.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def pause_patch_deployment( + self, + request: Optional[ + Union[patch_deployments.PausePatchDeploymentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Change state of patch deployment to "PAUSED". + Patch deployment in paused state doesn't generate patch + jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_pause_patch_deployment(): + # Create a client + client = osconfig_v1.OsConfigServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.PausePatchDeploymentRequest( + name="name_value", + ) + + # Make the request + response = client.pause_patch_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.PausePatchDeploymentRequest, dict]): + The request object. A request message for pausing a patch + deployment. + name (str): + Required. The resource name of the patch deployment in + the form ``projects/*/patchDeployments/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchDeployment: + Patch deployments are configurations that individual patch jobs use to + complete a patch. These configurations include + instance filter, package repository settings, and a + schedule. For more information about creating and + managing patch deployments, see [Scheduling patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a patch_deployments.PausePatchDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, patch_deployments.PausePatchDeploymentRequest): + request = patch_deployments.PausePatchDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pause_patch_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resume_patch_deployment( + self, + request: Optional[ + Union[patch_deployments.ResumePatchDeploymentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Change state of patch deployment back to "ACTIVE". + Patch deployment in active state continues to generate + patch jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_resume_patch_deployment(): + # Create a client + client = osconfig_v1.OsConfigServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.ResumePatchDeploymentRequest( + name="name_value", + ) + + # Make the request + response = client.resume_patch_deployment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.ResumePatchDeploymentRequest, dict]): + The request object. A request message for resuming a + patch deployment. + name (str): + Required. The resource name of the patch deployment in + the form ``projects/*/patchDeployments/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.PatchDeployment: + Patch deployments are configurations that individual patch jobs use to + complete a patch. These configurations include + instance filter, package repository settings, and a + schedule. For more information about creating and + managing patch deployments, see [Scheduling patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a patch_deployments.ResumePatchDeploymentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, patch_deployments.ResumePatchDeploymentRequest): + request = patch_deployments.ResumePatchDeploymentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_patch_deployment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "OsConfigServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("OsConfigServiceClient",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/pagers.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/pagers.py new file mode 100644 index 000000000000..9405c486135b --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/pagers.py @@ -0,0 +1,419 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.osconfig_v1.types import patch_deployments, patch_jobs + + +class ListPatchJobsPager: + """A pager for iterating through ``list_patch_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListPatchJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``patch_jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPatchJobs`` requests and continue to iterate + through the ``patch_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListPatchJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., patch_jobs.ListPatchJobsResponse], + request: patch_jobs.ListPatchJobsRequest, + response: patch_jobs.ListPatchJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListPatchJobsRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListPatchJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = patch_jobs.ListPatchJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[patch_jobs.ListPatchJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[patch_jobs.PatchJob]: + for page in self.pages: + yield from page.patch_jobs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPatchJobsAsyncPager: + """A pager for iterating through ``list_patch_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListPatchJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``patch_jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPatchJobs`` requests and continue to iterate + through the ``patch_jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListPatchJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[patch_jobs.ListPatchJobsResponse]], + request: patch_jobs.ListPatchJobsRequest, + response: patch_jobs.ListPatchJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListPatchJobsRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListPatchJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = patch_jobs.ListPatchJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[patch_jobs.ListPatchJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[patch_jobs.PatchJob]: + async def async_generator(): + async for page in self.pages: + for response in page.patch_jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPatchJobInstanceDetailsPager: + """A pager for iterating through ``list_patch_job_instance_details`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``patch_job_instance_details`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPatchJobInstanceDetails`` requests and continue to iterate + through the ``patch_job_instance_details`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., patch_jobs.ListPatchJobInstanceDetailsResponse], + request: patch_jobs.ListPatchJobInstanceDetailsRequest, + response: patch_jobs.ListPatchJobInstanceDetailsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = patch_jobs.ListPatchJobInstanceDetailsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[patch_jobs.ListPatchJobInstanceDetailsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[patch_jobs.PatchJobInstanceDetails]: + for page in self.pages: + yield from page.patch_job_instance_details + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPatchJobInstanceDetailsAsyncPager: + """A pager for iterating through ``list_patch_job_instance_details`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``patch_job_instance_details`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPatchJobInstanceDetails`` requests and continue to iterate + through the ``patch_job_instance_details`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[patch_jobs.ListPatchJobInstanceDetailsResponse] + ], + request: patch_jobs.ListPatchJobInstanceDetailsRequest, + response: patch_jobs.ListPatchJobInstanceDetailsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = patch_jobs.ListPatchJobInstanceDetailsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[patch_jobs.ListPatchJobInstanceDetailsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[patch_jobs.PatchJobInstanceDetails]: + async def async_generator(): + async for page in self.pages: + for response in page.patch_job_instance_details: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPatchDeploymentsPager: + """A pager for iterating through ``list_patch_deployments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListPatchDeploymentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``patch_deployments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPatchDeployments`` requests and continue to iterate + through the ``patch_deployments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListPatchDeploymentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., patch_deployments.ListPatchDeploymentsResponse], + request: patch_deployments.ListPatchDeploymentsRequest, + response: patch_deployments.ListPatchDeploymentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListPatchDeploymentsRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListPatchDeploymentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = patch_deployments.ListPatchDeploymentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[patch_deployments.ListPatchDeploymentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[patch_deployments.PatchDeployment]: + for page in self.pages: + yield from page.patch_deployments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPatchDeploymentsAsyncPager: + """A pager for iterating through ``list_patch_deployments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListPatchDeploymentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``patch_deployments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPatchDeployments`` requests and continue to iterate + through the ``patch_deployments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListPatchDeploymentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[patch_deployments.ListPatchDeploymentsResponse] + ], + request: patch_deployments.ListPatchDeploymentsRequest, + response: patch_deployments.ListPatchDeploymentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListPatchDeploymentsRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListPatchDeploymentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = patch_deployments.ListPatchDeploymentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[patch_deployments.ListPatchDeploymentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[patch_deployments.PatchDeployment]: + async def async_generator(): + async for page in self.pages: + for response in page.patch_deployments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/__init__.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/__init__.py new file mode 100644 index 000000000000..87ffdec3bd71 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import OsConfigServiceTransport +from .grpc import OsConfigServiceGrpcTransport +from .grpc_asyncio import OsConfigServiceGrpcAsyncIOTransport +from .rest import OsConfigServiceRestInterceptor, OsConfigServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[OsConfigServiceTransport]] +_transport_registry["grpc"] = OsConfigServiceGrpcTransport +_transport_registry["grpc_asyncio"] = OsConfigServiceGrpcAsyncIOTransport +_transport_registry["rest"] = OsConfigServiceRestTransport + +__all__ = ( + "OsConfigServiceTransport", + "OsConfigServiceGrpcTransport", + "OsConfigServiceGrpcAsyncIOTransport", + "OsConfigServiceRestTransport", + "OsConfigServiceRestInterceptor", +) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/base.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/base.py new file mode 100644 index 000000000000..368eb882159c --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/base.py @@ -0,0 +1,334 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.osconfig_v1 import gapic_version as package_version +from google.cloud.osconfig_v1.types import patch_deployments, patch_jobs + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class OsConfigServiceTransport(abc.ABC): + """Abstract transport class for OsConfigService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "osconfig.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.execute_patch_job: gapic_v1.method.wrap_method( + self.execute_patch_job, + default_timeout=None, + client_info=client_info, + ), + self.get_patch_job: gapic_v1.method.wrap_method( + self.get_patch_job, + default_timeout=None, + client_info=client_info, + ), + self.cancel_patch_job: gapic_v1.method.wrap_method( + self.cancel_patch_job, + default_timeout=None, + client_info=client_info, + ), + self.list_patch_jobs: gapic_v1.method.wrap_method( + self.list_patch_jobs, + default_timeout=None, + client_info=client_info, + ), + self.list_patch_job_instance_details: gapic_v1.method.wrap_method( + self.list_patch_job_instance_details, + default_timeout=None, + client_info=client_info, + ), + self.create_patch_deployment: gapic_v1.method.wrap_method( + self.create_patch_deployment, + default_timeout=None, + client_info=client_info, + ), + self.get_patch_deployment: gapic_v1.method.wrap_method( + self.get_patch_deployment, + default_timeout=None, + client_info=client_info, + ), + self.list_patch_deployments: gapic_v1.method.wrap_method( + self.list_patch_deployments, + default_timeout=None, + client_info=client_info, + ), + self.delete_patch_deployment: gapic_v1.method.wrap_method( + self.delete_patch_deployment, + default_timeout=None, + client_info=client_info, + ), + self.update_patch_deployment: gapic_v1.method.wrap_method( + self.update_patch_deployment, + default_timeout=None, + client_info=client_info, + ), + self.pause_patch_deployment: gapic_v1.method.wrap_method( + self.pause_patch_deployment, + default_timeout=None, + client_info=client_info, + ), + self.resume_patch_deployment: gapic_v1.method.wrap_method( + self.resume_patch_deployment, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def execute_patch_job( + self, + ) -> Callable[ + [patch_jobs.ExecutePatchJobRequest], + Union[patch_jobs.PatchJob, Awaitable[patch_jobs.PatchJob]], + ]: + raise NotImplementedError() + + @property + def get_patch_job( + self, + ) -> Callable[ + [patch_jobs.GetPatchJobRequest], + Union[patch_jobs.PatchJob, Awaitable[patch_jobs.PatchJob]], + ]: + raise NotImplementedError() + + @property + def cancel_patch_job( + self, + ) -> Callable[ + [patch_jobs.CancelPatchJobRequest], + Union[patch_jobs.PatchJob, Awaitable[patch_jobs.PatchJob]], + ]: + raise NotImplementedError() + + @property + def list_patch_jobs( + self, + ) -> Callable[ + [patch_jobs.ListPatchJobsRequest], + Union[ + patch_jobs.ListPatchJobsResponse, + Awaitable[patch_jobs.ListPatchJobsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_patch_job_instance_details( + self, + ) -> Callable[ + [patch_jobs.ListPatchJobInstanceDetailsRequest], + Union[ + patch_jobs.ListPatchJobInstanceDetailsResponse, + Awaitable[patch_jobs.ListPatchJobInstanceDetailsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.CreatePatchDeploymentRequest], + Union[ + patch_deployments.PatchDeployment, + Awaitable[patch_deployments.PatchDeployment], + ], + ]: + raise NotImplementedError() + + @property + def get_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.GetPatchDeploymentRequest], + Union[ + patch_deployments.PatchDeployment, + Awaitable[patch_deployments.PatchDeployment], + ], + ]: + raise NotImplementedError() + + @property + def list_patch_deployments( + self, + ) -> Callable[ + [patch_deployments.ListPatchDeploymentsRequest], + Union[ + patch_deployments.ListPatchDeploymentsResponse, + Awaitable[patch_deployments.ListPatchDeploymentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.DeletePatchDeploymentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def update_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.UpdatePatchDeploymentRequest], + Union[ + patch_deployments.PatchDeployment, + Awaitable[patch_deployments.PatchDeployment], + ], + ]: + raise NotImplementedError() + + @property + def pause_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.PausePatchDeploymentRequest], + Union[ + patch_deployments.PatchDeployment, + Awaitable[patch_deployments.PatchDeployment], + ], + ]: + raise NotImplementedError() + + @property + def resume_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.ResumePatchDeploymentRequest], + Union[ + patch_deployments.PatchDeployment, + Awaitable[patch_deployments.PatchDeployment], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("OsConfigServiceTransport",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/grpc.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/grpc.py new file mode 100644 index 000000000000..cd722e47de2e --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/grpc.py @@ -0,0 +1,587 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.osconfig_v1.types import patch_deployments, patch_jobs + +from .base import DEFAULT_CLIENT_INFO, OsConfigServiceTransport + + +class OsConfigServiceGrpcTransport(OsConfigServiceTransport): + """gRPC backend transport for OsConfigService. + + OS Config API + + The OS Config service is a server-side component that you can + use to manage package installations and patch jobs for virtual + machine instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def execute_patch_job( + self, + ) -> Callable[[patch_jobs.ExecutePatchJobRequest], patch_jobs.PatchJob]: + r"""Return a callable for the execute patch job method over gRPC. + + Patch VM instances by creating and running a patch + job. + + Returns: + Callable[[~.ExecutePatchJobRequest], + ~.PatchJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "execute_patch_job" not in self._stubs: + self._stubs["execute_patch_job"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/ExecutePatchJob", + request_serializer=patch_jobs.ExecutePatchJobRequest.serialize, + response_deserializer=patch_jobs.PatchJob.deserialize, + ) + return self._stubs["execute_patch_job"] + + @property + def get_patch_job( + self, + ) -> Callable[[patch_jobs.GetPatchJobRequest], patch_jobs.PatchJob]: + r"""Return a callable for the get patch job method over gRPC. + + Get the patch job. This can be used to track the + progress of an ongoing patch job or review the details + of completed jobs. + + Returns: + Callable[[~.GetPatchJobRequest], + ~.PatchJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_patch_job" not in self._stubs: + self._stubs["get_patch_job"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/GetPatchJob", + request_serializer=patch_jobs.GetPatchJobRequest.serialize, + response_deserializer=patch_jobs.PatchJob.deserialize, + ) + return self._stubs["get_patch_job"] + + @property + def cancel_patch_job( + self, + ) -> Callable[[patch_jobs.CancelPatchJobRequest], patch_jobs.PatchJob]: + r"""Return a callable for the cancel patch job method over gRPC. + + Cancel a patch job. The patch job must be active. + Canceled patch jobs cannot be restarted. + + Returns: + Callable[[~.CancelPatchJobRequest], + ~.PatchJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_patch_job" not in self._stubs: + self._stubs["cancel_patch_job"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/CancelPatchJob", + request_serializer=patch_jobs.CancelPatchJobRequest.serialize, + response_deserializer=patch_jobs.PatchJob.deserialize, + ) + return self._stubs["cancel_patch_job"] + + @property + def list_patch_jobs( + self, + ) -> Callable[[patch_jobs.ListPatchJobsRequest], patch_jobs.ListPatchJobsResponse]: + r"""Return a callable for the list patch jobs method over gRPC. + + Get a list of patch jobs. + + Returns: + Callable[[~.ListPatchJobsRequest], + ~.ListPatchJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_patch_jobs" not in self._stubs: + self._stubs["list_patch_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/ListPatchJobs", + request_serializer=patch_jobs.ListPatchJobsRequest.serialize, + response_deserializer=patch_jobs.ListPatchJobsResponse.deserialize, + ) + return self._stubs["list_patch_jobs"] + + @property + def list_patch_job_instance_details( + self, + ) -> Callable[ + [patch_jobs.ListPatchJobInstanceDetailsRequest], + patch_jobs.ListPatchJobInstanceDetailsResponse, + ]: + r"""Return a callable for the list patch job instance + details method over gRPC. + + Get a list of instance details for a given patch job. + + Returns: + Callable[[~.ListPatchJobInstanceDetailsRequest], + ~.ListPatchJobInstanceDetailsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_patch_job_instance_details" not in self._stubs: + self._stubs[ + "list_patch_job_instance_details" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/ListPatchJobInstanceDetails", + request_serializer=patch_jobs.ListPatchJobInstanceDetailsRequest.serialize, + response_deserializer=patch_jobs.ListPatchJobInstanceDetailsResponse.deserialize, + ) + return self._stubs["list_patch_job_instance_details"] + + @property + def create_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.CreatePatchDeploymentRequest], + patch_deployments.PatchDeployment, + ]: + r"""Return a callable for the create patch deployment method over gRPC. + + Create an OS Config patch deployment. + + Returns: + Callable[[~.CreatePatchDeploymentRequest], + ~.PatchDeployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_patch_deployment" not in self._stubs: + self._stubs["create_patch_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/CreatePatchDeployment", + request_serializer=patch_deployments.CreatePatchDeploymentRequest.serialize, + response_deserializer=patch_deployments.PatchDeployment.deserialize, + ) + return self._stubs["create_patch_deployment"] + + @property + def get_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.GetPatchDeploymentRequest], patch_deployments.PatchDeployment + ]: + r"""Return a callable for the get patch deployment method over gRPC. + + Get an OS Config patch deployment. + + Returns: + Callable[[~.GetPatchDeploymentRequest], + ~.PatchDeployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_patch_deployment" not in self._stubs: + self._stubs["get_patch_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/GetPatchDeployment", + request_serializer=patch_deployments.GetPatchDeploymentRequest.serialize, + response_deserializer=patch_deployments.PatchDeployment.deserialize, + ) + return self._stubs["get_patch_deployment"] + + @property + def list_patch_deployments( + self, + ) -> Callable[ + [patch_deployments.ListPatchDeploymentsRequest], + patch_deployments.ListPatchDeploymentsResponse, + ]: + r"""Return a callable for the list patch deployments method over gRPC. + + Get a page of OS Config patch deployments. + + Returns: + Callable[[~.ListPatchDeploymentsRequest], + ~.ListPatchDeploymentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_patch_deployments" not in self._stubs: + self._stubs["list_patch_deployments"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/ListPatchDeployments", + request_serializer=patch_deployments.ListPatchDeploymentsRequest.serialize, + response_deserializer=patch_deployments.ListPatchDeploymentsResponse.deserialize, + ) + return self._stubs["list_patch_deployments"] + + @property + def delete_patch_deployment( + self, + ) -> Callable[[patch_deployments.DeletePatchDeploymentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete patch deployment method over gRPC. + + Delete an OS Config patch deployment. + + Returns: + Callable[[~.DeletePatchDeploymentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_patch_deployment" not in self._stubs: + self._stubs["delete_patch_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/DeletePatchDeployment", + request_serializer=patch_deployments.DeletePatchDeploymentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_patch_deployment"] + + @property + def update_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.UpdatePatchDeploymentRequest], + patch_deployments.PatchDeployment, + ]: + r"""Return a callable for the update patch deployment method over gRPC. + + Update an OS Config patch deployment. + + Returns: + Callable[[~.UpdatePatchDeploymentRequest], + ~.PatchDeployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_patch_deployment" not in self._stubs: + self._stubs["update_patch_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/UpdatePatchDeployment", + request_serializer=patch_deployments.UpdatePatchDeploymentRequest.serialize, + response_deserializer=patch_deployments.PatchDeployment.deserialize, + ) + return self._stubs["update_patch_deployment"] + + @property + def pause_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.PausePatchDeploymentRequest], + patch_deployments.PatchDeployment, + ]: + r"""Return a callable for the pause patch deployment method over gRPC. + + Change state of patch deployment to "PAUSED". + Patch deployment in paused state doesn't generate patch + jobs. + + Returns: + Callable[[~.PausePatchDeploymentRequest], + ~.PatchDeployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_patch_deployment" not in self._stubs: + self._stubs["pause_patch_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/PausePatchDeployment", + request_serializer=patch_deployments.PausePatchDeploymentRequest.serialize, + response_deserializer=patch_deployments.PatchDeployment.deserialize, + ) + return self._stubs["pause_patch_deployment"] + + @property + def resume_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.ResumePatchDeploymentRequest], + patch_deployments.PatchDeployment, + ]: + r"""Return a callable for the resume patch deployment method over gRPC. + + Change state of patch deployment back to "ACTIVE". + Patch deployment in active state continues to generate + patch jobs. + + Returns: + Callable[[~.ResumePatchDeploymentRequest], + ~.PatchDeployment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_patch_deployment" not in self._stubs: + self._stubs["resume_patch_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/ResumePatchDeployment", + request_serializer=patch_deployments.ResumePatchDeploymentRequest.serialize, + response_deserializer=patch_deployments.PatchDeployment.deserialize, + ) + return self._stubs["resume_patch_deployment"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("OsConfigServiceGrpcTransport",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/grpc_asyncio.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..cc8eefc26957 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/grpc_asyncio.py @@ -0,0 +1,591 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.osconfig_v1.types import patch_deployments, patch_jobs + +from .base import DEFAULT_CLIENT_INFO, OsConfigServiceTransport +from .grpc import OsConfigServiceGrpcTransport + + +class OsConfigServiceGrpcAsyncIOTransport(OsConfigServiceTransport): + """gRPC AsyncIO backend transport for OsConfigService. + + OS Config API + + The OS Config service is a server-side component that you can + use to manage package installations and patch jobs for virtual + machine instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def execute_patch_job( + self, + ) -> Callable[[patch_jobs.ExecutePatchJobRequest], Awaitable[patch_jobs.PatchJob]]: + r"""Return a callable for the execute patch job method over gRPC. + + Patch VM instances by creating and running a patch + job. + + Returns: + Callable[[~.ExecutePatchJobRequest], + Awaitable[~.PatchJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "execute_patch_job" not in self._stubs: + self._stubs["execute_patch_job"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/ExecutePatchJob", + request_serializer=patch_jobs.ExecutePatchJobRequest.serialize, + response_deserializer=patch_jobs.PatchJob.deserialize, + ) + return self._stubs["execute_patch_job"] + + @property + def get_patch_job( + self, + ) -> Callable[[patch_jobs.GetPatchJobRequest], Awaitable[patch_jobs.PatchJob]]: + r"""Return a callable for the get patch job method over gRPC. + + Get the patch job. This can be used to track the + progress of an ongoing patch job or review the details + of completed jobs. + + Returns: + Callable[[~.GetPatchJobRequest], + Awaitable[~.PatchJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_patch_job" not in self._stubs: + self._stubs["get_patch_job"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/GetPatchJob", + request_serializer=patch_jobs.GetPatchJobRequest.serialize, + response_deserializer=patch_jobs.PatchJob.deserialize, + ) + return self._stubs["get_patch_job"] + + @property + def cancel_patch_job( + self, + ) -> Callable[[patch_jobs.CancelPatchJobRequest], Awaitable[patch_jobs.PatchJob]]: + r"""Return a callable for the cancel patch job method over gRPC. + + Cancel a patch job. The patch job must be active. + Canceled patch jobs cannot be restarted. + + Returns: + Callable[[~.CancelPatchJobRequest], + Awaitable[~.PatchJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_patch_job" not in self._stubs: + self._stubs["cancel_patch_job"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/CancelPatchJob", + request_serializer=patch_jobs.CancelPatchJobRequest.serialize, + response_deserializer=patch_jobs.PatchJob.deserialize, + ) + return self._stubs["cancel_patch_job"] + + @property + def list_patch_jobs( + self, + ) -> Callable[ + [patch_jobs.ListPatchJobsRequest], Awaitable[patch_jobs.ListPatchJobsResponse] + ]: + r"""Return a callable for the list patch jobs method over gRPC. + + Get a list of patch jobs. + + Returns: + Callable[[~.ListPatchJobsRequest], + Awaitable[~.ListPatchJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_patch_jobs" not in self._stubs: + self._stubs["list_patch_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/ListPatchJobs", + request_serializer=patch_jobs.ListPatchJobsRequest.serialize, + response_deserializer=patch_jobs.ListPatchJobsResponse.deserialize, + ) + return self._stubs["list_patch_jobs"] + + @property + def list_patch_job_instance_details( + self, + ) -> Callable[ + [patch_jobs.ListPatchJobInstanceDetailsRequest], + Awaitable[patch_jobs.ListPatchJobInstanceDetailsResponse], + ]: + r"""Return a callable for the list patch job instance + details method over gRPC. + + Get a list of instance details for a given patch job. + + Returns: + Callable[[~.ListPatchJobInstanceDetailsRequest], + Awaitable[~.ListPatchJobInstanceDetailsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_patch_job_instance_details" not in self._stubs: + self._stubs[ + "list_patch_job_instance_details" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/ListPatchJobInstanceDetails", + request_serializer=patch_jobs.ListPatchJobInstanceDetailsRequest.serialize, + response_deserializer=patch_jobs.ListPatchJobInstanceDetailsResponse.deserialize, + ) + return self._stubs["list_patch_job_instance_details"] + + @property + def create_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.CreatePatchDeploymentRequest], + Awaitable[patch_deployments.PatchDeployment], + ]: + r"""Return a callable for the create patch deployment method over gRPC. + + Create an OS Config patch deployment. + + Returns: + Callable[[~.CreatePatchDeploymentRequest], + Awaitable[~.PatchDeployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_patch_deployment" not in self._stubs: + self._stubs["create_patch_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/CreatePatchDeployment", + request_serializer=patch_deployments.CreatePatchDeploymentRequest.serialize, + response_deserializer=patch_deployments.PatchDeployment.deserialize, + ) + return self._stubs["create_patch_deployment"] + + @property + def get_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.GetPatchDeploymentRequest], + Awaitable[patch_deployments.PatchDeployment], + ]: + r"""Return a callable for the get patch deployment method over gRPC. + + Get an OS Config patch deployment. + + Returns: + Callable[[~.GetPatchDeploymentRequest], + Awaitable[~.PatchDeployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_patch_deployment" not in self._stubs: + self._stubs["get_patch_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/GetPatchDeployment", + request_serializer=patch_deployments.GetPatchDeploymentRequest.serialize, + response_deserializer=patch_deployments.PatchDeployment.deserialize, + ) + return self._stubs["get_patch_deployment"] + + @property + def list_patch_deployments( + self, + ) -> Callable[ + [patch_deployments.ListPatchDeploymentsRequest], + Awaitable[patch_deployments.ListPatchDeploymentsResponse], + ]: + r"""Return a callable for the list patch deployments method over gRPC. + + Get a page of OS Config patch deployments. + + Returns: + Callable[[~.ListPatchDeploymentsRequest], + Awaitable[~.ListPatchDeploymentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_patch_deployments" not in self._stubs: + self._stubs["list_patch_deployments"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/ListPatchDeployments", + request_serializer=patch_deployments.ListPatchDeploymentsRequest.serialize, + response_deserializer=patch_deployments.ListPatchDeploymentsResponse.deserialize, + ) + return self._stubs["list_patch_deployments"] + + @property + def delete_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.DeletePatchDeploymentRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete patch deployment method over gRPC. + + Delete an OS Config patch deployment. + + Returns: + Callable[[~.DeletePatchDeploymentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_patch_deployment" not in self._stubs: + self._stubs["delete_patch_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/DeletePatchDeployment", + request_serializer=patch_deployments.DeletePatchDeploymentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_patch_deployment"] + + @property + def update_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.UpdatePatchDeploymentRequest], + Awaitable[patch_deployments.PatchDeployment], + ]: + r"""Return a callable for the update patch deployment method over gRPC. + + Update an OS Config patch deployment. + + Returns: + Callable[[~.UpdatePatchDeploymentRequest], + Awaitable[~.PatchDeployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_patch_deployment" not in self._stubs: + self._stubs["update_patch_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/UpdatePatchDeployment", + request_serializer=patch_deployments.UpdatePatchDeploymentRequest.serialize, + response_deserializer=patch_deployments.PatchDeployment.deserialize, + ) + return self._stubs["update_patch_deployment"] + + @property + def pause_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.PausePatchDeploymentRequest], + Awaitable[patch_deployments.PatchDeployment], + ]: + r"""Return a callable for the pause patch deployment method over gRPC. + + Change state of patch deployment to "PAUSED". + Patch deployment in paused state doesn't generate patch + jobs. + + Returns: + Callable[[~.PausePatchDeploymentRequest], + Awaitable[~.PatchDeployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_patch_deployment" not in self._stubs: + self._stubs["pause_patch_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/PausePatchDeployment", + request_serializer=patch_deployments.PausePatchDeploymentRequest.serialize, + response_deserializer=patch_deployments.PatchDeployment.deserialize, + ) + return self._stubs["pause_patch_deployment"] + + @property + def resume_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.ResumePatchDeploymentRequest], + Awaitable[patch_deployments.PatchDeployment], + ]: + r"""Return a callable for the resume patch deployment method over gRPC. + + Change state of patch deployment back to "ACTIVE". + Patch deployment in active state continues to generate + patch jobs. + + Returns: + Callable[[~.ResumePatchDeploymentRequest], + Awaitable[~.PatchDeployment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_patch_deployment" not in self._stubs: + self._stubs["resume_patch_deployment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigService/ResumePatchDeployment", + request_serializer=patch_deployments.ResumePatchDeploymentRequest.serialize, + response_deserializer=patch_deployments.PatchDeployment.deserialize, + ) + return self._stubs["resume_patch_deployment"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("OsConfigServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/rest.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/rest.py new file mode 100644 index 000000000000..8abc50fd2f12 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_service/transports/rest.py @@ -0,0 +1,1843 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.osconfig_v1.types import patch_deployments, patch_jobs + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import OsConfigServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class OsConfigServiceRestInterceptor: + """Interceptor for OsConfigService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the OsConfigServiceRestTransport. + + .. code-block:: python + class MyCustomOsConfigServiceInterceptor(OsConfigServiceRestInterceptor): + def pre_cancel_patch_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_patch_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_patch_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_patch_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_patch_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_execute_patch_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_execute_patch_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_patch_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_patch_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_patch_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_patch_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_patch_deployments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_patch_deployments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_patch_job_instance_details(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_patch_job_instance_details(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_patch_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_patch_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_pause_patch_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_pause_patch_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_resume_patch_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_resume_patch_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_patch_deployment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_patch_deployment(self, response): + logging.log(f"Received response: {response}") + return response + + transport = OsConfigServiceRestTransport(interceptor=MyCustomOsConfigServiceInterceptor()) + client = OsConfigServiceClient(transport=transport) + + + """ + + def pre_cancel_patch_job( + self, + request: patch_jobs.CancelPatchJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[patch_jobs.CancelPatchJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_patch_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigService server. + """ + return request, metadata + + def post_cancel_patch_job( + self, response: patch_jobs.PatchJob + ) -> patch_jobs.PatchJob: + """Post-rpc interceptor for cancel_patch_job + + Override in a subclass to manipulate the response + after it is returned by the OsConfigService server but before + it is returned to user code. + """ + return response + + def pre_create_patch_deployment( + self, + request: patch_deployments.CreatePatchDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + patch_deployments.CreatePatchDeploymentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_patch_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigService server. + """ + return request, metadata + + def post_create_patch_deployment( + self, response: patch_deployments.PatchDeployment + ) -> patch_deployments.PatchDeployment: + """Post-rpc interceptor for create_patch_deployment + + Override in a subclass to manipulate the response + after it is returned by the OsConfigService server but before + it is returned to user code. + """ + return response + + def pre_delete_patch_deployment( + self, + request: patch_deployments.DeletePatchDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + patch_deployments.DeletePatchDeploymentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_patch_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigService server. + """ + return request, metadata + + def pre_execute_patch_job( + self, + request: patch_jobs.ExecutePatchJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[patch_jobs.ExecutePatchJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for execute_patch_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigService server. + """ + return request, metadata + + def post_execute_patch_job( + self, response: patch_jobs.PatchJob + ) -> patch_jobs.PatchJob: + """Post-rpc interceptor for execute_patch_job + + Override in a subclass to manipulate the response + after it is returned by the OsConfigService server but before + it is returned to user code. + """ + return response + + def pre_get_patch_deployment( + self, + request: patch_deployments.GetPatchDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[patch_deployments.GetPatchDeploymentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_patch_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigService server. + """ + return request, metadata + + def post_get_patch_deployment( + self, response: patch_deployments.PatchDeployment + ) -> patch_deployments.PatchDeployment: + """Post-rpc interceptor for get_patch_deployment + + Override in a subclass to manipulate the response + after it is returned by the OsConfigService server but before + it is returned to user code. + """ + return response + + def pre_get_patch_job( + self, + request: patch_jobs.GetPatchJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[patch_jobs.GetPatchJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_patch_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigService server. + """ + return request, metadata + + def post_get_patch_job(self, response: patch_jobs.PatchJob) -> patch_jobs.PatchJob: + """Post-rpc interceptor for get_patch_job + + Override in a subclass to manipulate the response + after it is returned by the OsConfigService server but before + it is returned to user code. + """ + return response + + def pre_list_patch_deployments( + self, + request: patch_deployments.ListPatchDeploymentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + patch_deployments.ListPatchDeploymentsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_patch_deployments + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigService server. + """ + return request, metadata + + def post_list_patch_deployments( + self, response: patch_deployments.ListPatchDeploymentsResponse + ) -> patch_deployments.ListPatchDeploymentsResponse: + """Post-rpc interceptor for list_patch_deployments + + Override in a subclass to manipulate the response + after it is returned by the OsConfigService server but before + it is returned to user code. + """ + return response + + def pre_list_patch_job_instance_details( + self, + request: patch_jobs.ListPatchJobInstanceDetailsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + patch_jobs.ListPatchJobInstanceDetailsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_patch_job_instance_details + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigService server. + """ + return request, metadata + + def post_list_patch_job_instance_details( + self, response: patch_jobs.ListPatchJobInstanceDetailsResponse + ) -> patch_jobs.ListPatchJobInstanceDetailsResponse: + """Post-rpc interceptor for list_patch_job_instance_details + + Override in a subclass to manipulate the response + after it is returned by the OsConfigService server but before + it is returned to user code. + """ + return response + + def pre_list_patch_jobs( + self, + request: patch_jobs.ListPatchJobsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[patch_jobs.ListPatchJobsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_patch_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigService server. + """ + return request, metadata + + def post_list_patch_jobs( + self, response: patch_jobs.ListPatchJobsResponse + ) -> patch_jobs.ListPatchJobsResponse: + """Post-rpc interceptor for list_patch_jobs + + Override in a subclass to manipulate the response + after it is returned by the OsConfigService server but before + it is returned to user code. + """ + return response + + def pre_pause_patch_deployment( + self, + request: patch_deployments.PausePatchDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + patch_deployments.PausePatchDeploymentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for pause_patch_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigService server. + """ + return request, metadata + + def post_pause_patch_deployment( + self, response: patch_deployments.PatchDeployment + ) -> patch_deployments.PatchDeployment: + """Post-rpc interceptor for pause_patch_deployment + + Override in a subclass to manipulate the response + after it is returned by the OsConfigService server but before + it is returned to user code. + """ + return response + + def pre_resume_patch_deployment( + self, + request: patch_deployments.ResumePatchDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + patch_deployments.ResumePatchDeploymentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for resume_patch_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigService server. + """ + return request, metadata + + def post_resume_patch_deployment( + self, response: patch_deployments.PatchDeployment + ) -> patch_deployments.PatchDeployment: + """Post-rpc interceptor for resume_patch_deployment + + Override in a subclass to manipulate the response + after it is returned by the OsConfigService server but before + it is returned to user code. + """ + return response + + def pre_update_patch_deployment( + self, + request: patch_deployments.UpdatePatchDeploymentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + patch_deployments.UpdatePatchDeploymentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_patch_deployment + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigService server. + """ + return request, metadata + + def post_update_patch_deployment( + self, response: patch_deployments.PatchDeployment + ) -> patch_deployments.PatchDeployment: + """Post-rpc interceptor for update_patch_deployment + + Override in a subclass to manipulate the response + after it is returned by the OsConfigService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class OsConfigServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: OsConfigServiceRestInterceptor + + +class OsConfigServiceRestTransport(OsConfigServiceTransport): + """REST backend transport for OsConfigService. + + OS Config API + + The OS Config service is a server-side component that you can + use to manage package installations and patch jobs for virtual + machine instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[OsConfigServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or OsConfigServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CancelPatchJob(OsConfigServiceRestStub): + def __hash__(self): + return hash("CancelPatchJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: patch_jobs.CancelPatchJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_jobs.PatchJob: + r"""Call the cancel patch job method over HTTP. + + Args: + request (~.patch_jobs.CancelPatchJobRequest): + The request object. Message for canceling a patch job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.patch_jobs.PatchJob: + A high level representation of a patch job that is + either in progress or has completed. + + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. + + For more information about patch jobs, see `Creating + patch + jobs `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/patchJobs/*}:cancel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_cancel_patch_job( + request, metadata + ) + pb_request = patch_jobs.CancelPatchJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = patch_jobs.PatchJob() + pb_resp = patch_jobs.PatchJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel_patch_job(resp) + return resp + + class _CreatePatchDeployment(OsConfigServiceRestStub): + def __hash__(self): + return hash("CreatePatchDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "patchDeploymentId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: patch_deployments.CreatePatchDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Call the create patch deployment method over HTTP. + + Args: + request (~.patch_deployments.CreatePatchDeploymentRequest): + The request object. A request message for creating a + patch deployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.patch_deployments.PatchDeployment: + Patch deployments are configurations that individual + patch jobs use to complete a patch. These configurations + include instance filter, package repository settings, + and a schedule. For more information about creating and + managing patch deployments, see `Scheduling patch + jobs `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/patchDeployments", + "body": "patch_deployment", + }, + ] + request, metadata = self._interceptor.pre_create_patch_deployment( + request, metadata + ) + pb_request = patch_deployments.CreatePatchDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = patch_deployments.PatchDeployment() + pb_resp = patch_deployments.PatchDeployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_patch_deployment(resp) + return resp + + class _DeletePatchDeployment(OsConfigServiceRestStub): + def __hash__(self): + return hash("DeletePatchDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: patch_deployments.DeletePatchDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete patch deployment method over HTTP. + + Args: + request (~.patch_deployments.DeletePatchDeploymentRequest): + The request object. A request message for deleting a + patch deployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/patchDeployments/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_patch_deployment( + request, metadata + ) + pb_request = patch_deployments.DeletePatchDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _ExecutePatchJob(OsConfigServiceRestStub): + def __hash__(self): + return hash("ExecutePatchJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: patch_jobs.ExecutePatchJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_jobs.PatchJob: + r"""Call the execute patch job method over HTTP. + + Args: + request (~.patch_jobs.ExecutePatchJobRequest): + The request object. A request message to initiate + patching across Compute Engine + instances. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.patch_jobs.PatchJob: + A high level representation of a patch job that is + either in progress or has completed. + + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. + + For more information about patch jobs, see `Creating + patch + jobs `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/patchJobs:execute", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_execute_patch_job( + request, metadata + ) + pb_request = patch_jobs.ExecutePatchJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = patch_jobs.PatchJob() + pb_resp = patch_jobs.PatchJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_execute_patch_job(resp) + return resp + + class _GetPatchDeployment(OsConfigServiceRestStub): + def __hash__(self): + return hash("GetPatchDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: patch_deployments.GetPatchDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Call the get patch deployment method over HTTP. + + Args: + request (~.patch_deployments.GetPatchDeploymentRequest): + The request object. A request message for retrieving a + patch deployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.patch_deployments.PatchDeployment: + Patch deployments are configurations that individual + patch jobs use to complete a patch. These configurations + include instance filter, package repository settings, + and a schedule. For more information about creating and + managing patch deployments, see `Scheduling patch + jobs `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/patchDeployments/*}", + }, + ] + request, metadata = self._interceptor.pre_get_patch_deployment( + request, metadata + ) + pb_request = patch_deployments.GetPatchDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = patch_deployments.PatchDeployment() + pb_resp = patch_deployments.PatchDeployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_patch_deployment(resp) + return resp + + class _GetPatchJob(OsConfigServiceRestStub): + def __hash__(self): + return hash("GetPatchJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: patch_jobs.GetPatchJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_jobs.PatchJob: + r"""Call the get patch job method over HTTP. + + Args: + request (~.patch_jobs.GetPatchJobRequest): + The request object. Request to get an active or completed + patch job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.patch_jobs.PatchJob: + A high level representation of a patch job that is + either in progress or has completed. + + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. + + For more information about patch jobs, see `Creating + patch + jobs `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/patchJobs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_patch_job(request, metadata) + pb_request = patch_jobs.GetPatchJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = patch_jobs.PatchJob() + pb_resp = patch_jobs.PatchJob.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_patch_job(resp) + return resp + + class _ListPatchDeployments(OsConfigServiceRestStub): + def __hash__(self): + return hash("ListPatchDeployments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: patch_deployments.ListPatchDeploymentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.ListPatchDeploymentsResponse: + r"""Call the list patch deployments method over HTTP. + + Args: + request (~.patch_deployments.ListPatchDeploymentsRequest): + The request object. A request message for listing patch + deployments. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.patch_deployments.ListPatchDeploymentsResponse: + A response message for listing patch + deployments. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*}/patchDeployments", + }, + ] + request, metadata = self._interceptor.pre_list_patch_deployments( + request, metadata + ) + pb_request = patch_deployments.ListPatchDeploymentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = patch_deployments.ListPatchDeploymentsResponse() + pb_resp = patch_deployments.ListPatchDeploymentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_patch_deployments(resp) + return resp + + class _ListPatchJobInstanceDetails(OsConfigServiceRestStub): + def __hash__(self): + return hash("ListPatchJobInstanceDetails") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: patch_jobs.ListPatchJobInstanceDetailsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_jobs.ListPatchJobInstanceDetailsResponse: + r"""Call the list patch job instance + details method over HTTP. + + Args: + request (~.patch_jobs.ListPatchJobInstanceDetailsRequest): + The request object. Request to list details for all + instances that are part of a patch job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.patch_jobs.ListPatchJobInstanceDetailsResponse: + A response message for listing the + instances details for a patch job. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/patchJobs/*}/instanceDetails", + }, + ] + request, metadata = self._interceptor.pre_list_patch_job_instance_details( + request, metadata + ) + pb_request = patch_jobs.ListPatchJobInstanceDetailsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = patch_jobs.ListPatchJobInstanceDetailsResponse() + pb_resp = patch_jobs.ListPatchJobInstanceDetailsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_patch_job_instance_details(resp) + return resp + + class _ListPatchJobs(OsConfigServiceRestStub): + def __hash__(self): + return hash("ListPatchJobs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: patch_jobs.ListPatchJobsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_jobs.ListPatchJobsResponse: + r"""Call the list patch jobs method over HTTP. + + Args: + request (~.patch_jobs.ListPatchJobsRequest): + The request object. A request message for listing patch + jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.patch_jobs.ListPatchJobsResponse: + A response message for listing patch + jobs. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*}/patchJobs", + }, + ] + request, metadata = self._interceptor.pre_list_patch_jobs(request, metadata) + pb_request = patch_jobs.ListPatchJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = patch_jobs.ListPatchJobsResponse() + pb_resp = patch_jobs.ListPatchJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_patch_jobs(resp) + return resp + + class _PausePatchDeployment(OsConfigServiceRestStub): + def __hash__(self): + return hash("PausePatchDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: patch_deployments.PausePatchDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Call the pause patch deployment method over HTTP. + + Args: + request (~.patch_deployments.PausePatchDeploymentRequest): + The request object. A request message for pausing a patch + deployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.patch_deployments.PatchDeployment: + Patch deployments are configurations that individual + patch jobs use to complete a patch. These configurations + include instance filter, package repository settings, + and a schedule. For more information about creating and + managing patch deployments, see `Scheduling patch + jobs `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/patchDeployments/*}:pause", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_pause_patch_deployment( + request, metadata + ) + pb_request = patch_deployments.PausePatchDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = patch_deployments.PatchDeployment() + pb_resp = patch_deployments.PatchDeployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_pause_patch_deployment(resp) + return resp + + class _ResumePatchDeployment(OsConfigServiceRestStub): + def __hash__(self): + return hash("ResumePatchDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: patch_deployments.ResumePatchDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Call the resume patch deployment method over HTTP. + + Args: + request (~.patch_deployments.ResumePatchDeploymentRequest): + The request object. A request message for resuming a + patch deployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.patch_deployments.PatchDeployment: + Patch deployments are configurations that individual + patch jobs use to complete a patch. These configurations + include instance filter, package repository settings, + and a schedule. For more information about creating and + managing patch deployments, see `Scheduling patch + jobs `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/patchDeployments/*}:resume", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_resume_patch_deployment( + request, metadata + ) + pb_request = patch_deployments.ResumePatchDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = patch_deployments.PatchDeployment() + pb_resp = patch_deployments.PatchDeployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resume_patch_deployment(resp) + return resp + + class _UpdatePatchDeployment(OsConfigServiceRestStub): + def __hash__(self): + return hash("UpdatePatchDeployment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: patch_deployments.UpdatePatchDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> patch_deployments.PatchDeployment: + r"""Call the update patch deployment method over HTTP. + + Args: + request (~.patch_deployments.UpdatePatchDeploymentRequest): + The request object. A request message for updating a + patch deployment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.patch_deployments.PatchDeployment: + Patch deployments are configurations that individual + patch jobs use to complete a patch. These configurations + include instance filter, package repository settings, + and a schedule. For more information about creating and + managing patch deployments, see `Scheduling patch + jobs `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{patch_deployment.name=projects/*/patchDeployments/*}", + "body": "patch_deployment", + }, + ] + request, metadata = self._interceptor.pre_update_patch_deployment( + request, metadata + ) + pb_request = patch_deployments.UpdatePatchDeploymentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = patch_deployments.PatchDeployment() + pb_resp = patch_deployments.PatchDeployment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_patch_deployment(resp) + return resp + + @property + def cancel_patch_job( + self, + ) -> Callable[[patch_jobs.CancelPatchJobRequest], patch_jobs.PatchJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelPatchJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.CreatePatchDeploymentRequest], + patch_deployments.PatchDeployment, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreatePatchDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_patch_deployment( + self, + ) -> Callable[[patch_deployments.DeletePatchDeploymentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePatchDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def execute_patch_job( + self, + ) -> Callable[[patch_jobs.ExecutePatchJobRequest], patch_jobs.PatchJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExecutePatchJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.GetPatchDeploymentRequest], patch_deployments.PatchDeployment + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPatchDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_patch_job( + self, + ) -> Callable[[patch_jobs.GetPatchJobRequest], patch_jobs.PatchJob]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPatchJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_patch_deployments( + self, + ) -> Callable[ + [patch_deployments.ListPatchDeploymentsRequest], + patch_deployments.ListPatchDeploymentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPatchDeployments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_patch_job_instance_details( + self, + ) -> Callable[ + [patch_jobs.ListPatchJobInstanceDetailsRequest], + patch_jobs.ListPatchJobInstanceDetailsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPatchJobInstanceDetails(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_patch_jobs( + self, + ) -> Callable[[patch_jobs.ListPatchJobsRequest], patch_jobs.ListPatchJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPatchJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def pause_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.PausePatchDeploymentRequest], + patch_deployments.PatchDeployment, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PausePatchDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def resume_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.ResumePatchDeploymentRequest], + patch_deployments.PatchDeployment, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ResumePatchDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_patch_deployment( + self, + ) -> Callable[ + [patch_deployments.UpdatePatchDeploymentRequest], + patch_deployments.PatchDeployment, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdatePatchDeployment(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("OsConfigServiceRestTransport",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/__init__.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/__init__.py new file mode 100644 index 000000000000..39d10f171f77 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import OsConfigZonalServiceAsyncClient +from .client import OsConfigZonalServiceClient + +__all__ = ( + "OsConfigZonalServiceClient", + "OsConfigZonalServiceAsyncClient", +) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/async_client.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/async_client.py new file mode 100644 index 000000000000..5782efe52b92 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/async_client.py @@ -0,0 +1,1854 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.osconfig_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.osconfig_v1.services.os_config_zonal_service import pagers +from google.cloud.osconfig_v1.types import ( + inventory, + os_policy, + os_policy_assignment_reports, + os_policy_assignments, + vulnerability, +) + +from .client import OsConfigZonalServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, OsConfigZonalServiceTransport +from .transports.grpc_asyncio import OsConfigZonalServiceGrpcAsyncIOTransport + + +class OsConfigZonalServiceAsyncClient: + """Zonal OS Config API + + The OS Config service is the server-side component that allows + users to manage package installations and patch jobs for Compute + Engine VM instances. + """ + + _client: OsConfigZonalServiceClient + + DEFAULT_ENDPOINT = OsConfigZonalServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = OsConfigZonalServiceClient.DEFAULT_MTLS_ENDPOINT + + instance_path = staticmethod(OsConfigZonalServiceClient.instance_path) + parse_instance_path = staticmethod(OsConfigZonalServiceClient.parse_instance_path) + instance_os_policy_assignment_path = staticmethod( + OsConfigZonalServiceClient.instance_os_policy_assignment_path + ) + parse_instance_os_policy_assignment_path = staticmethod( + OsConfigZonalServiceClient.parse_instance_os_policy_assignment_path + ) + inventory_path = staticmethod(OsConfigZonalServiceClient.inventory_path) + parse_inventory_path = staticmethod(OsConfigZonalServiceClient.parse_inventory_path) + os_policy_assignment_path = staticmethod( + OsConfigZonalServiceClient.os_policy_assignment_path + ) + parse_os_policy_assignment_path = staticmethod( + OsConfigZonalServiceClient.parse_os_policy_assignment_path + ) + os_policy_assignment_report_path = staticmethod( + OsConfigZonalServiceClient.os_policy_assignment_report_path + ) + parse_os_policy_assignment_report_path = staticmethod( + OsConfigZonalServiceClient.parse_os_policy_assignment_report_path + ) + vulnerability_report_path = staticmethod( + OsConfigZonalServiceClient.vulnerability_report_path + ) + parse_vulnerability_report_path = staticmethod( + OsConfigZonalServiceClient.parse_vulnerability_report_path + ) + common_billing_account_path = staticmethod( + OsConfigZonalServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + OsConfigZonalServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(OsConfigZonalServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + OsConfigZonalServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + OsConfigZonalServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + OsConfigZonalServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(OsConfigZonalServiceClient.common_project_path) + parse_common_project_path = staticmethod( + OsConfigZonalServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(OsConfigZonalServiceClient.common_location_path) + parse_common_location_path = staticmethod( + OsConfigZonalServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OsConfigZonalServiceAsyncClient: The constructed client. + """ + return OsConfigZonalServiceClient.from_service_account_info.__func__(OsConfigZonalServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OsConfigZonalServiceAsyncClient: The constructed client. + """ + return OsConfigZonalServiceClient.from_service_account_file.__func__(OsConfigZonalServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return OsConfigZonalServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> OsConfigZonalServiceTransport: + """Returns the transport used by the client instance. + + Returns: + OsConfigZonalServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(OsConfigZonalServiceClient).get_transport_class, + type(OsConfigZonalServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, OsConfigZonalServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the os config zonal service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.OsConfigZonalServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = OsConfigZonalServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.CreateOSPolicyAssignmentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + os_policy_assignment: Optional[os_policy_assignments.OSPolicyAssignment] = None, + os_policy_assignment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create an OS policy assignment. + + This method also creates the first revision of the OS policy + assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_create_os_policy_assignment(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + os_policy_assignment = osconfig_v1.OSPolicyAssignment() + os_policy_assignment.os_policies.id = "id_value" + os_policy_assignment.os_policies.mode = "ENFORCEMENT" + os_policy_assignment.os_policies.resource_groups.resources.pkg.apt.name = "name_value" + os_policy_assignment.os_policies.resource_groups.resources.pkg.desired_state = "REMOVED" + os_policy_assignment.os_policies.resource_groups.resources.id = "id_value" + os_policy_assignment.rollout.disruption_budget.fixed = 528 + + request = osconfig_v1.CreateOSPolicyAssignmentRequest( + parent="parent_value", + os_policy_assignment=os_policy_assignment, + os_policy_assignment_id="os_policy_assignment_id_value", + ) + + # Make the request + operation = client.create_os_policy_assignment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.CreateOSPolicyAssignmentRequest, dict]]): + The request object. A request message to create an OS + policy assignment + parent (:class:`str`): + Required. The parent resource name in + the form: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + os_policy_assignment (:class:`google.cloud.osconfig_v1.types.OSPolicyAssignment`): + Required. The OS policy assignment to + be created. + + This corresponds to the ``os_policy_assignment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + os_policy_assignment_id (:class:`str`): + Required. The logical name of the OS policy assignment + in the project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the project. + + This corresponds to the ``os_policy_assignment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.osconfig_v1.types.OSPolicyAssignment` OS policy assignment is an API resource that is used to + apply a set of OS policies to a dynamically targeted + group of Compute Engine VM instances. + + An OS policy is used to define the desired state + configuration for a Compute Engine VM instance + through a set of configuration resources that provide + capabilities such as installing or removing software + packages, or executing a script. + + For more information, see [OS policy and OS policy + assignment](\ https://cloud.google.com/compute/docs/os-configuration-management/working-with-os-policies). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, os_policy_assignment, os_policy_assignment_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignments.CreateOSPolicyAssignmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if os_policy_assignment is not None: + request.os_policy_assignment = os_policy_assignment + if os_policy_assignment_id is not None: + request.os_policy_assignment_id = os_policy_assignment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_os_policy_assignment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + os_policy_assignments.OSPolicyAssignment, + metadata_type=os_policy_assignments.OSPolicyAssignmentOperationMetadata, + ) + + # Done; return the response. + return response + + async def update_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.UpdateOSPolicyAssignmentRequest, dict] + ] = None, + *, + os_policy_assignment: Optional[os_policy_assignments.OSPolicyAssignment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Update an existing OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_update_os_policy_assignment(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + os_policy_assignment = osconfig_v1.OSPolicyAssignment() + os_policy_assignment.os_policies.id = "id_value" + os_policy_assignment.os_policies.mode = "ENFORCEMENT" + os_policy_assignment.os_policies.resource_groups.resources.pkg.apt.name = "name_value" + os_policy_assignment.os_policies.resource_groups.resources.pkg.desired_state = "REMOVED" + os_policy_assignment.os_policies.resource_groups.resources.id = "id_value" + os_policy_assignment.rollout.disruption_budget.fixed = 528 + + request = osconfig_v1.UpdateOSPolicyAssignmentRequest( + os_policy_assignment=os_policy_assignment, + ) + + # Make the request + operation = client.update_os_policy_assignment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.UpdateOSPolicyAssignmentRequest, dict]]): + The request object. A request message to update an OS + policy assignment + os_policy_assignment (:class:`google.cloud.osconfig_v1.types.OSPolicyAssignment`): + Required. The updated OS policy + assignment. + + This corresponds to the ``os_policy_assignment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask that controls + which fields of the assignment should be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.osconfig_v1.types.OSPolicyAssignment` OS policy assignment is an API resource that is used to + apply a set of OS policies to a dynamically targeted + group of Compute Engine VM instances. + + An OS policy is used to define the desired state + configuration for a Compute Engine VM instance + through a set of configuration resources that provide + capabilities such as installing or removing software + packages, or executing a script. + + For more information, see [OS policy and OS policy + assignment](\ https://cloud.google.com/compute/docs/os-configuration-management/working-with-os-policies). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([os_policy_assignment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignments.UpdateOSPolicyAssignmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if os_policy_assignment is not None: + request.os_policy_assignment = os_policy_assignment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_os_policy_assignment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("os_policy_assignment.name", request.os_policy_assignment.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + os_policy_assignments.OSPolicyAssignment, + metadata_type=os_policy_assignments.OSPolicyAssignmentOperationMetadata, + ) + + # Done; return the response. + return response + + async def get_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.GetOSPolicyAssignmentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignments.OSPolicyAssignment: + r"""Retrieve an existing OS policy assignment. + + This method always returns the latest revision. In order to + retrieve a previous revision of the assignment, also provide the + revision ID in the ``name`` parameter. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_get_os_policy_assignment(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.GetOSPolicyAssignmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_os_policy_assignment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.GetOSPolicyAssignmentRequest, dict]]): + The request object. A request message to get an OS policy + assignment + name (:class:`str`): + Required. The resource name of OS policy assignment. + + Format: + ``projects/{project}/locations/{location}/osPolicyAssignments/{os_policy_assignment}@{revisionId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.OSPolicyAssignment: + OS policy assignment is an API resource that is used to + apply a set of OS policies to a dynamically targeted + group of Compute Engine VM instances. + + An OS policy is used to define the desired state + configuration for a Compute Engine VM instance + through a set of configuration resources that provide + capabilities such as installing or removing software + packages, or executing a script. + + For more information, see [OS policy and OS policy + assignment](\ https://cloud.google.com/compute/docs/os-configuration-management/working-with-os-policies). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignments.GetOSPolicyAssignmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_os_policy_assignment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_os_policy_assignments( + self, + request: Optional[ + Union[os_policy_assignments.ListOSPolicyAssignmentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOSPolicyAssignmentsAsyncPager: + r"""List the OS policy assignments under the parent + resource. + For each OS policy assignment, the latest revision is + returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_list_os_policy_assignments(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.ListOSPolicyAssignmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_os_policy_assignments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.ListOSPolicyAssignmentsRequest, dict]]): + The request object. A request message to list OS policy + assignments for a parent resource + parent (:class:`str`): + Required. The parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_zonal_service.pagers.ListOSPolicyAssignmentsAsyncPager: + A response message for listing all + assignments under given parent. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignments.ListOSPolicyAssignmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_os_policy_assignments, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListOSPolicyAssignmentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_os_policy_assignment_revisions( + self, + request: Optional[ + Union[os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOSPolicyAssignmentRevisionsAsyncPager: + r"""List the OS policy assignment revisions for a given + OS policy assignment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_list_os_policy_assignment_revisions(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.ListOSPolicyAssignmentRevisionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_os_policy_assignment_revisions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.ListOSPolicyAssignmentRevisionsRequest, dict]]): + The request object. A request message to list revisions + for a OS policy assignment + name (:class:`str`): + Required. The name of the OS policy + assignment to list revisions for. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_zonal_service.pagers.ListOSPolicyAssignmentRevisionsAsyncPager: + A response message for listing all + revisions for a OS policy assignment. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_os_policy_assignment_revisions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListOSPolicyAssignmentRevisionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.DeleteOSPolicyAssignmentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete the OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + If the LRO completes and is not cancelled, all revisions + associated with the OS policy assignment are deleted. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_delete_os_policy_assignment(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.DeleteOSPolicyAssignmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_os_policy_assignment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.DeleteOSPolicyAssignmentRequest, dict]]): + The request object. A request message for deleting a OS + policy assignment. + name (:class:`str`): + Required. The name of the OS policy + assignment to be deleted + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignments.DeleteOSPolicyAssignmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_os_policy_assignment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=os_policy_assignments.OSPolicyAssignmentOperationMetadata, + ) + + # Done; return the response. + return response + + async def get_os_policy_assignment_report( + self, + request: Optional[ + Union[os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignment_reports.OSPolicyAssignmentReport: + r"""Get the OS policy asssignment report for the + specified Compute Engine VM instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_get_os_policy_assignment_report(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.GetOSPolicyAssignmentReportRequest( + name="name_value", + ) + + # Make the request + response = await client.get_os_policy_assignment_report(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.GetOSPolicyAssignmentReportRequest, dict]]): + The request object. Get a report of the OS policy + assignment for a VM instance. + name (:class:`str`): + Required. API resource name for OS policy assignment + report. + + Format: + ``/projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/report`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance_id}``, + either Compute Engine ``instance-id`` or + ``instance-name`` can be provided. For + ``{assignment_id}``, the OSPolicyAssignment id must be + provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.OSPolicyAssignmentReport: + A report of the OS policy assignment + status for a given instance. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_os_policy_assignment_report, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_os_policy_assignment_reports( + self, + request: Optional[ + Union[ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, dict + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOSPolicyAssignmentReportsAsyncPager: + r"""List OS policy asssignment reports for all Compute + Engine VM instances in the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_list_os_policy_assignment_reports(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.ListOSPolicyAssignmentReportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_os_policy_assignment_reports(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.ListOSPolicyAssignmentReportsRequest, dict]]): + The request object. List the OS policy assignment reports + for VM instances. + parent (:class:`str`): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/reports`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, + either ``instance-name``, ``instance-id``, or ``-`` can + be provided. If '-' is provided, the response will + include OSPolicyAssignmentReports for all instances in + the project/location. For ``{assignment}``, either + ``assignment-id`` or ``-`` can be provided. If '-' is + provided, the response will include + OSPolicyAssignmentReports for all OSPolicyAssignments in + the project/location. Either {instance} or {assignment} + must be ``-``. + + For example: + ``projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/-/reports`` + returns all reports for the instance + ``projects/{project}/locations/{location}/instances/-/osPolicyAssignments/{assignment-id}/reports`` + returns all the reports for the given assignment across + all instances. + ``projects/{project}/locations/{location}/instances/-/osPolicyAssignments/-/reports`` + returns all the reports for all assignments across all + instances. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_zonal_service.pagers.ListOSPolicyAssignmentReportsAsyncPager: + A response message for listing OS + Policy assignment reports including the + page of results and page token. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_os_policy_assignment_reports, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListOSPolicyAssignmentReportsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_inventory( + self, + request: Optional[Union[inventory.GetInventoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> inventory.Inventory: + r"""Get inventory data for the specified VM instance. If the VM has + no associated inventory, the message ``NOT_FOUND`` is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_get_inventory(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.GetInventoryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_inventory(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.GetInventoryRequest, dict]]): + The request object. A request message for getting + inventory data for the specified VM. + name (:class:`str`): + Required. API resource name for inventory resource. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/inventory`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, + either Compute Engine ``instance-id`` or + ``instance-name`` can be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.Inventory: + This API resource represents the available inventory data for a + Compute Engine virtual machine (VM) instance at a + given point in time. + + You can use this API resource to determine the + inventory data of your VM. + + For more information, see [Information provided by OS + inventory + management](\ https://cloud.google.com/compute/docs/instances/os-inventory-management#data-collected). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = inventory.GetInventoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_inventory, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_inventories( + self, + request: Optional[Union[inventory.ListInventoriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInventoriesAsyncPager: + r"""List inventory data for all VM instances in the + specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_list_inventories(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.ListInventoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inventories(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.ListInventoriesRequest, dict]]): + The request object. A request message for listing + inventory data for all VMs in the + specified location. + parent (:class:`str`): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/-`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_zonal_service.pagers.ListInventoriesAsyncPager: + A response message for listing + inventory data for all VMs in a + specified location. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = inventory.ListInventoriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_inventories, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInventoriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_vulnerability_report( + self, + request: Optional[ + Union[vulnerability.GetVulnerabilityReportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vulnerability.VulnerabilityReport: + r"""Gets the vulnerability report for the specified VM + instance. Only VMs with inventory data have + vulnerability reports associated with them. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_get_vulnerability_report(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.GetVulnerabilityReportRequest( + name="name_value", + ) + + # Make the request + response = await client.get_vulnerability_report(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.GetVulnerabilityReportRequest, dict]]): + The request object. A request message for getting the + vulnerability report for the specified + VM. + name (:class:`str`): + Required. API resource name for vulnerability resource. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/vulnerabilityReport`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, + either Compute Engine ``instance-id`` or + ``instance-name`` can be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.VulnerabilityReport: + This API resource represents the vulnerability report for a specified + Compute Engine virtual machine (VM) instance at a + given point in time. + + For more information, see [Vulnerability + reports](\ https://cloud.google.com/compute/docs/instances/os-inventory-management#vulnerability-reports). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vulnerability.GetVulnerabilityReportRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_vulnerability_report, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_vulnerability_reports( + self, + request: Optional[ + Union[vulnerability.ListVulnerabilityReportsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVulnerabilityReportsAsyncPager: + r"""List vulnerability reports for all VM instances in + the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + async def sample_list_vulnerability_reports(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1.ListVulnerabilityReportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_vulnerability_reports(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1.types.ListVulnerabilityReportsRequest, dict]]): + The request object. A request message for listing + vulnerability reports for all VM + instances in the specified location. + parent (:class:`str`): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/-`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_zonal_service.pagers.ListVulnerabilityReportsAsyncPager: + A response message for listing + vulnerability reports for all VM + instances in the specified location. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vulnerability.ListVulnerabilityReportsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_vulnerability_reports, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListVulnerabilityReportsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "OsConfigZonalServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("OsConfigZonalServiceAsyncClient",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/client.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/client.py new file mode 100644 index 000000000000..fa7ca3141eeb --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/client.py @@ -0,0 +1,2209 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.osconfig_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.osconfig_v1.services.os_config_zonal_service import pagers +from google.cloud.osconfig_v1.types import ( + inventory, + os_policy, + os_policy_assignment_reports, + os_policy_assignments, + vulnerability, +) + +from .transports.base import DEFAULT_CLIENT_INFO, OsConfigZonalServiceTransport +from .transports.grpc import OsConfigZonalServiceGrpcTransport +from .transports.grpc_asyncio import OsConfigZonalServiceGrpcAsyncIOTransport +from .transports.rest import OsConfigZonalServiceRestTransport + + +class OsConfigZonalServiceClientMeta(type): + """Metaclass for the OsConfigZonalService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[OsConfigZonalServiceTransport]] + _transport_registry["grpc"] = OsConfigZonalServiceGrpcTransport + _transport_registry["grpc_asyncio"] = OsConfigZonalServiceGrpcAsyncIOTransport + _transport_registry["rest"] = OsConfigZonalServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[OsConfigZonalServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class OsConfigZonalServiceClient(metaclass=OsConfigZonalServiceClientMeta): + """Zonal OS Config API + + The OS Config service is the server-side component that allows + users to manage package installations and patch jobs for Compute + Engine VM instances. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "osconfig.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OsConfigZonalServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OsConfigZonalServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> OsConfigZonalServiceTransport: + """Returns the transport used by the client instance. + + Returns: + OsConfigZonalServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def instance_path( + project: str, + zone: str, + instance: str, + ) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/zones/{zone}/instances/{instance}".format( + project=project, + zone=zone, + instance=instance, + ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str, str]: + """Parses a instance path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/zones/(?P.+?)/instances/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def instance_os_policy_assignment_path( + project: str, + location: str, + instance: str, + assignment: str, + ) -> str: + """Returns a fully-qualified instance_os_policy_assignment string.""" + return "projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}".format( + project=project, + location=location, + instance=instance, + assignment=assignment, + ) + + @staticmethod + def parse_instance_os_policy_assignment_path(path: str) -> Dict[str, str]: + """Parses a instance_os_policy_assignment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/osPolicyAssignments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def inventory_path( + project: str, + location: str, + instance: str, + ) -> str: + """Returns a fully-qualified inventory string.""" + return "projects/{project}/locations/{location}/instances/{instance}/inventory".format( + project=project, + location=location, + instance=instance, + ) + + @staticmethod + def parse_inventory_path(path: str) -> Dict[str, str]: + """Parses a inventory path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/inventory$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def os_policy_assignment_path( + project: str, + location: str, + os_policy_assignment: str, + ) -> str: + """Returns a fully-qualified os_policy_assignment string.""" + return "projects/{project}/locations/{location}/osPolicyAssignments/{os_policy_assignment}".format( + project=project, + location=location, + os_policy_assignment=os_policy_assignment, + ) + + @staticmethod + def parse_os_policy_assignment_path(path: str) -> Dict[str, str]: + """Parses a os_policy_assignment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/osPolicyAssignments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def os_policy_assignment_report_path( + project: str, + location: str, + instance: str, + assignment: str, + ) -> str: + """Returns a fully-qualified os_policy_assignment_report string.""" + return "projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/report".format( + project=project, + location=location, + instance=instance, + assignment=assignment, + ) + + @staticmethod + def parse_os_policy_assignment_report_path(path: str) -> Dict[str, str]: + """Parses a os_policy_assignment_report path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/osPolicyAssignments/(?P.+?)/report$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def vulnerability_report_path( + project: str, + location: str, + instance: str, + ) -> str: + """Returns a fully-qualified vulnerability_report string.""" + return "projects/{project}/locations/{location}/instances/{instance}/vulnerabilityReport".format( + project=project, + location=location, + instance=instance, + ) + + @staticmethod + def parse_vulnerability_report_path(path: str) -> Dict[str, str]: + """Parses a vulnerability_report path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/vulnerabilityReport$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, OsConfigZonalServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the os config zonal service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, OsConfigZonalServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, OsConfigZonalServiceTransport): + # transport is a OsConfigZonalServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.CreateOSPolicyAssignmentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + os_policy_assignment: Optional[os_policy_assignments.OSPolicyAssignment] = None, + os_policy_assignment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create an OS policy assignment. + + This method also creates the first revision of the OS policy + assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_create_os_policy_assignment(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceClient() + + # Initialize request argument(s) + os_policy_assignment = osconfig_v1.OSPolicyAssignment() + os_policy_assignment.os_policies.id = "id_value" + os_policy_assignment.os_policies.mode = "ENFORCEMENT" + os_policy_assignment.os_policies.resource_groups.resources.pkg.apt.name = "name_value" + os_policy_assignment.os_policies.resource_groups.resources.pkg.desired_state = "REMOVED" + os_policy_assignment.os_policies.resource_groups.resources.id = "id_value" + os_policy_assignment.rollout.disruption_budget.fixed = 528 + + request = osconfig_v1.CreateOSPolicyAssignmentRequest( + parent="parent_value", + os_policy_assignment=os_policy_assignment, + os_policy_assignment_id="os_policy_assignment_id_value", + ) + + # Make the request + operation = client.create_os_policy_assignment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.CreateOSPolicyAssignmentRequest, dict]): + The request object. A request message to create an OS + policy assignment + parent (str): + Required. The parent resource name in + the form: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + os_policy_assignment (google.cloud.osconfig_v1.types.OSPolicyAssignment): + Required. The OS policy assignment to + be created. + + This corresponds to the ``os_policy_assignment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + os_policy_assignment_id (str): + Required. The logical name of the OS policy assignment + in the project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the project. + + This corresponds to the ``os_policy_assignment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.osconfig_v1.types.OSPolicyAssignment` OS policy assignment is an API resource that is used to + apply a set of OS policies to a dynamically targeted + group of Compute Engine VM instances. + + An OS policy is used to define the desired state + configuration for a Compute Engine VM instance + through a set of configuration resources that provide + capabilities such as installing or removing software + packages, or executing a script. + + For more information, see [OS policy and OS policy + assignment](\ https://cloud.google.com/compute/docs/os-configuration-management/working-with-os-policies). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, os_policy_assignment, os_policy_assignment_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignments.CreateOSPolicyAssignmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, os_policy_assignments.CreateOSPolicyAssignmentRequest + ): + request = os_policy_assignments.CreateOSPolicyAssignmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if os_policy_assignment is not None: + request.os_policy_assignment = os_policy_assignment + if os_policy_assignment_id is not None: + request.os_policy_assignment_id = os_policy_assignment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_os_policy_assignment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + os_policy_assignments.OSPolicyAssignment, + metadata_type=os_policy_assignments.OSPolicyAssignmentOperationMetadata, + ) + + # Done; return the response. + return response + + def update_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.UpdateOSPolicyAssignmentRequest, dict] + ] = None, + *, + os_policy_assignment: Optional[os_policy_assignments.OSPolicyAssignment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Update an existing OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_update_os_policy_assignment(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceClient() + + # Initialize request argument(s) + os_policy_assignment = osconfig_v1.OSPolicyAssignment() + os_policy_assignment.os_policies.id = "id_value" + os_policy_assignment.os_policies.mode = "ENFORCEMENT" + os_policy_assignment.os_policies.resource_groups.resources.pkg.apt.name = "name_value" + os_policy_assignment.os_policies.resource_groups.resources.pkg.desired_state = "REMOVED" + os_policy_assignment.os_policies.resource_groups.resources.id = "id_value" + os_policy_assignment.rollout.disruption_budget.fixed = 528 + + request = osconfig_v1.UpdateOSPolicyAssignmentRequest( + os_policy_assignment=os_policy_assignment, + ) + + # Make the request + operation = client.update_os_policy_assignment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.UpdateOSPolicyAssignmentRequest, dict]): + The request object. A request message to update an OS + policy assignment + os_policy_assignment (google.cloud.osconfig_v1.types.OSPolicyAssignment): + Required. The updated OS policy + assignment. + + This corresponds to the ``os_policy_assignment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that controls + which fields of the assignment should be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.osconfig_v1.types.OSPolicyAssignment` OS policy assignment is an API resource that is used to + apply a set of OS policies to a dynamically targeted + group of Compute Engine VM instances. + + An OS policy is used to define the desired state + configuration for a Compute Engine VM instance + through a set of configuration resources that provide + capabilities such as installing or removing software + packages, or executing a script. + + For more information, see [OS policy and OS policy + assignment](\ https://cloud.google.com/compute/docs/os-configuration-management/working-with-os-policies). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([os_policy_assignment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignments.UpdateOSPolicyAssignmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, os_policy_assignments.UpdateOSPolicyAssignmentRequest + ): + request = os_policy_assignments.UpdateOSPolicyAssignmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if os_policy_assignment is not None: + request.os_policy_assignment = os_policy_assignment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_os_policy_assignment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("os_policy_assignment.name", request.os_policy_assignment.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + os_policy_assignments.OSPolicyAssignment, + metadata_type=os_policy_assignments.OSPolicyAssignmentOperationMetadata, + ) + + # Done; return the response. + return response + + def get_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.GetOSPolicyAssignmentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignments.OSPolicyAssignment: + r"""Retrieve an existing OS policy assignment. + + This method always returns the latest revision. In order to + retrieve a previous revision of the assignment, also provide the + revision ID in the ``name`` parameter. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_get_os_policy_assignment(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.GetOSPolicyAssignmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_os_policy_assignment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.GetOSPolicyAssignmentRequest, dict]): + The request object. A request message to get an OS policy + assignment + name (str): + Required. The resource name of OS policy assignment. + + Format: + ``projects/{project}/locations/{location}/osPolicyAssignments/{os_policy_assignment}@{revisionId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.OSPolicyAssignment: + OS policy assignment is an API resource that is used to + apply a set of OS policies to a dynamically targeted + group of Compute Engine VM instances. + + An OS policy is used to define the desired state + configuration for a Compute Engine VM instance + through a set of configuration resources that provide + capabilities such as installing or removing software + packages, or executing a script. + + For more information, see [OS policy and OS policy + assignment](\ https://cloud.google.com/compute/docs/os-configuration-management/working-with-os-policies). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignments.GetOSPolicyAssignmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, os_policy_assignments.GetOSPolicyAssignmentRequest): + request = os_policy_assignments.GetOSPolicyAssignmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_os_policy_assignment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_os_policy_assignments( + self, + request: Optional[ + Union[os_policy_assignments.ListOSPolicyAssignmentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOSPolicyAssignmentsPager: + r"""List the OS policy assignments under the parent + resource. + For each OS policy assignment, the latest revision is + returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_list_os_policy_assignments(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.ListOSPolicyAssignmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_os_policy_assignments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.ListOSPolicyAssignmentsRequest, dict]): + The request object. A request message to list OS policy + assignments for a parent resource + parent (str): + Required. The parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_zonal_service.pagers.ListOSPolicyAssignmentsPager: + A response message for listing all + assignments under given parent. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignments.ListOSPolicyAssignmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, os_policy_assignments.ListOSPolicyAssignmentsRequest + ): + request = os_policy_assignments.ListOSPolicyAssignmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_os_policy_assignments + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOSPolicyAssignmentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_os_policy_assignment_revisions( + self, + request: Optional[ + Union[os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOSPolicyAssignmentRevisionsPager: + r"""List the OS policy assignment revisions for a given + OS policy assignment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_list_os_policy_assignment_revisions(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.ListOSPolicyAssignmentRevisionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_os_policy_assignment_revisions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.ListOSPolicyAssignmentRevisionsRequest, dict]): + The request object. A request message to list revisions + for a OS policy assignment + name (str): + Required. The name of the OS policy + assignment to list revisions for. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_zonal_service.pagers.ListOSPolicyAssignmentRevisionsPager: + A response message for listing all + revisions for a OS policy assignment. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest + ): + request = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_os_policy_assignment_revisions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOSPolicyAssignmentRevisionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.DeleteOSPolicyAssignmentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Delete the OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + If the LRO completes and is not cancelled, all revisions + associated with the OS policy assignment are deleted. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_delete_os_policy_assignment(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.DeleteOSPolicyAssignmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_os_policy_assignment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.DeleteOSPolicyAssignmentRequest, dict]): + The request object. A request message for deleting a OS + policy assignment. + name (str): + Required. The name of the OS policy + assignment to be deleted + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignments.DeleteOSPolicyAssignmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, os_policy_assignments.DeleteOSPolicyAssignmentRequest + ): + request = os_policy_assignments.DeleteOSPolicyAssignmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_os_policy_assignment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=os_policy_assignments.OSPolicyAssignmentOperationMetadata, + ) + + # Done; return the response. + return response + + def get_os_policy_assignment_report( + self, + request: Optional[ + Union[os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignment_reports.OSPolicyAssignmentReport: + r"""Get the OS policy asssignment report for the + specified Compute Engine VM instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_get_os_policy_assignment_report(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.GetOSPolicyAssignmentReportRequest( + name="name_value", + ) + + # Make the request + response = client.get_os_policy_assignment_report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.GetOSPolicyAssignmentReportRequest, dict]): + The request object. Get a report of the OS policy + assignment for a VM instance. + name (str): + Required. API resource name for OS policy assignment + report. + + Format: + ``/projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/report`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance_id}``, + either Compute Engine ``instance-id`` or + ``instance-name`` can be provided. For + ``{assignment_id}``, the OSPolicyAssignment id must be + provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.OSPolicyAssignmentReport: + A report of the OS policy assignment + status for a given instance. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest + ): + request = os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_os_policy_assignment_report + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_os_policy_assignment_reports( + self, + request: Optional[ + Union[ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, dict + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOSPolicyAssignmentReportsPager: + r"""List OS policy asssignment reports for all Compute + Engine VM instances in the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_list_os_policy_assignment_reports(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.ListOSPolicyAssignmentReportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_os_policy_assignment_reports(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.ListOSPolicyAssignmentReportsRequest, dict]): + The request object. List the OS policy assignment reports + for VM instances. + parent (str): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/reports`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, + either ``instance-name``, ``instance-id``, or ``-`` can + be provided. If '-' is provided, the response will + include OSPolicyAssignmentReports for all instances in + the project/location. For ``{assignment}``, either + ``assignment-id`` or ``-`` can be provided. If '-' is + provided, the response will include + OSPolicyAssignmentReports for all OSPolicyAssignments in + the project/location. Either {instance} or {assignment} + must be ``-``. + + For example: + ``projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/-/reports`` + returns all reports for the instance + ``projects/{project}/locations/{location}/instances/-/osPolicyAssignments/{assignment-id}/reports`` + returns all the reports for the given assignment across + all instances. + ``projects/{project}/locations/{location}/instances/-/osPolicyAssignments/-/reports`` + returns all the reports for all assignments across all + instances. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_zonal_service.pagers.ListOSPolicyAssignmentReportsPager: + A response message for listing OS + Policy assignment reports including the + page of results and page token. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest + ): + request = os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_os_policy_assignment_reports + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOSPolicyAssignmentReportsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_inventory( + self, + request: Optional[Union[inventory.GetInventoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> inventory.Inventory: + r"""Get inventory data for the specified VM instance. If the VM has + no associated inventory, the message ``NOT_FOUND`` is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_get_inventory(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.GetInventoryRequest( + name="name_value", + ) + + # Make the request + response = client.get_inventory(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.GetInventoryRequest, dict]): + The request object. A request message for getting + inventory data for the specified VM. + name (str): + Required. API resource name for inventory resource. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/inventory`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, + either Compute Engine ``instance-id`` or + ``instance-name`` can be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.Inventory: + This API resource represents the available inventory data for a + Compute Engine virtual machine (VM) instance at a + given point in time. + + You can use this API resource to determine the + inventory data of your VM. + + For more information, see [Information provided by OS + inventory + management](\ https://cloud.google.com/compute/docs/instances/os-inventory-management#data-collected). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a inventory.GetInventoryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, inventory.GetInventoryRequest): + request = inventory.GetInventoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_inventory] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_inventories( + self, + request: Optional[Union[inventory.ListInventoriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInventoriesPager: + r"""List inventory data for all VM instances in the + specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_list_inventories(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.ListInventoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inventories(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.ListInventoriesRequest, dict]): + The request object. A request message for listing + inventory data for all VMs in the + specified location. + parent (str): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/-`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_zonal_service.pagers.ListInventoriesPager: + A response message for listing + inventory data for all VMs in a + specified location. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a inventory.ListInventoriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, inventory.ListInventoriesRequest): + request = inventory.ListInventoriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_inventories] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInventoriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_vulnerability_report( + self, + request: Optional[ + Union[vulnerability.GetVulnerabilityReportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vulnerability.VulnerabilityReport: + r"""Gets the vulnerability report for the specified VM + instance. Only VMs with inventory data have + vulnerability reports associated with them. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_get_vulnerability_report(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.GetVulnerabilityReportRequest( + name="name_value", + ) + + # Make the request + response = client.get_vulnerability_report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.GetVulnerabilityReportRequest, dict]): + The request object. A request message for getting the + vulnerability report for the specified + VM. + name (str): + Required. API resource name for vulnerability resource. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/vulnerabilityReport`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, + either Compute Engine ``instance-id`` or + ``instance-name`` can be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.types.VulnerabilityReport: + This API resource represents the vulnerability report for a specified + Compute Engine virtual machine (VM) instance at a + given point in time. + + For more information, see [Vulnerability + reports](\ https://cloud.google.com/compute/docs/instances/os-inventory-management#vulnerability-reports). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vulnerability.GetVulnerabilityReportRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vulnerability.GetVulnerabilityReportRequest): + request = vulnerability.GetVulnerabilityReportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_vulnerability_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_vulnerability_reports( + self, + request: Optional[ + Union[vulnerability.ListVulnerabilityReportsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVulnerabilityReportsPager: + r"""List vulnerability reports for all VM instances in + the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1 + + def sample_list_vulnerability_reports(): + # Create a client + client = osconfig_v1.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1.ListVulnerabilityReportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_vulnerability_reports(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.osconfig_v1.types.ListVulnerabilityReportsRequest, dict]): + The request object. A request message for listing + vulnerability reports for all VM + instances in the specified location. + parent (str): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/-`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1.services.os_config_zonal_service.pagers.ListVulnerabilityReportsPager: + A response message for listing + vulnerability reports for all VM + instances in the specified location. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vulnerability.ListVulnerabilityReportsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vulnerability.ListVulnerabilityReportsRequest): + request = vulnerability.ListVulnerabilityReportsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_vulnerability_reports + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListVulnerabilityReportsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "OsConfigZonalServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("OsConfigZonalServiceClient",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/pagers.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/pagers.py new file mode 100644 index 000000000000..16fd145d37b7 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/pagers.py @@ -0,0 +1,714 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.osconfig_v1.types import ( + inventory, + os_policy_assignment_reports, + os_policy_assignments, + vulnerability, +) + + +class ListOSPolicyAssignmentsPager: + """A pager for iterating through ``list_os_policy_assignments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListOSPolicyAssignmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``os_policy_assignments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOSPolicyAssignments`` requests and continue to iterate + through the ``os_policy_assignments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListOSPolicyAssignmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., os_policy_assignments.ListOSPolicyAssignmentsResponse], + request: os_policy_assignments.ListOSPolicyAssignmentsRequest, + response: os_policy_assignments.ListOSPolicyAssignmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListOSPolicyAssignmentsRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListOSPolicyAssignmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = os_policy_assignments.ListOSPolicyAssignmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[os_policy_assignments.ListOSPolicyAssignmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[os_policy_assignments.OSPolicyAssignment]: + for page in self.pages: + yield from page.os_policy_assignments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOSPolicyAssignmentsAsyncPager: + """A pager for iterating through ``list_os_policy_assignments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListOSPolicyAssignmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``os_policy_assignments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListOSPolicyAssignments`` requests and continue to iterate + through the ``os_policy_assignments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListOSPolicyAssignmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[os_policy_assignments.ListOSPolicyAssignmentsResponse] + ], + request: os_policy_assignments.ListOSPolicyAssignmentsRequest, + response: os_policy_assignments.ListOSPolicyAssignmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListOSPolicyAssignmentsRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListOSPolicyAssignmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = os_policy_assignments.ListOSPolicyAssignmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[os_policy_assignments.ListOSPolicyAssignmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[os_policy_assignments.OSPolicyAssignment]: + async def async_generator(): + async for page in self.pages: + for response in page.os_policy_assignments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOSPolicyAssignmentRevisionsPager: + """A pager for iterating through ``list_os_policy_assignment_revisions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListOSPolicyAssignmentRevisionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``os_policy_assignments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOSPolicyAssignmentRevisions`` requests and continue to iterate + through the ``os_policy_assignments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListOSPolicyAssignmentRevisionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse + ], + request: os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, + response: os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListOSPolicyAssignmentRevisionsRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListOSPolicyAssignmentRevisionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[os_policy_assignments.OSPolicyAssignment]: + for page in self.pages: + yield from page.os_policy_assignments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOSPolicyAssignmentRevisionsAsyncPager: + """A pager for iterating through ``list_os_policy_assignment_revisions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListOSPolicyAssignmentRevisionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``os_policy_assignments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListOSPolicyAssignmentRevisions`` requests and continue to iterate + through the ``os_policy_assignments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListOSPolicyAssignmentRevisionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse], + ], + request: os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, + response: os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListOSPolicyAssignmentRevisionsRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListOSPolicyAssignmentRevisionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[os_policy_assignments.OSPolicyAssignment]: + async def async_generator(): + async for page in self.pages: + for response in page.os_policy_assignments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOSPolicyAssignmentReportsPager: + """A pager for iterating through ``list_os_policy_assignment_reports`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListOSPolicyAssignmentReportsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``os_policy_assignment_reports`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOSPolicyAssignmentReports`` requests and continue to iterate + through the ``os_policy_assignment_reports`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListOSPolicyAssignmentReportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse + ], + request: os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, + response: os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListOSPolicyAssignmentReportsRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListOSPolicyAssignmentReportsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest(request) + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__( + self, + ) -> Iterator[os_policy_assignment_reports.OSPolicyAssignmentReport]: + for page in self.pages: + yield from page.os_policy_assignment_reports + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOSPolicyAssignmentReportsAsyncPager: + """A pager for iterating through ``list_os_policy_assignment_reports`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListOSPolicyAssignmentReportsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``os_policy_assignment_reports`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListOSPolicyAssignmentReports`` requests and continue to iterate + through the ``os_policy_assignment_reports`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListOSPolicyAssignmentReportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse + ], + ], + request: os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, + response: os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListOSPolicyAssignmentReportsRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListOSPolicyAssignmentReportsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest(request) + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterator[os_policy_assignment_reports.OSPolicyAssignmentReport]: + async def async_generator(): + async for page in self.pages: + for response in page.os_policy_assignment_reports: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInventoriesPager: + """A pager for iterating through ``list_inventories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListInventoriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``inventories`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInventories`` requests and continue to iterate + through the ``inventories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListInventoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., inventory.ListInventoriesResponse], + request: inventory.ListInventoriesRequest, + response: inventory.ListInventoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListInventoriesRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListInventoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = inventory.ListInventoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[inventory.ListInventoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[inventory.Inventory]: + for page in self.pages: + yield from page.inventories + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInventoriesAsyncPager: + """A pager for iterating through ``list_inventories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListInventoriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``inventories`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInventories`` requests and continue to iterate + through the ``inventories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListInventoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[inventory.ListInventoriesResponse]], + request: inventory.ListInventoriesRequest, + response: inventory.ListInventoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListInventoriesRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListInventoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = inventory.ListInventoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[inventory.ListInventoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[inventory.Inventory]: + async def async_generator(): + async for page in self.pages: + for response in page.inventories: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListVulnerabilityReportsPager: + """A pager for iterating through ``list_vulnerability_reports`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListVulnerabilityReportsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``vulnerability_reports`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListVulnerabilityReports`` requests and continue to iterate + through the ``vulnerability_reports`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListVulnerabilityReportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., vulnerability.ListVulnerabilityReportsResponse], + request: vulnerability.ListVulnerabilityReportsRequest, + response: vulnerability.ListVulnerabilityReportsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListVulnerabilityReportsRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListVulnerabilityReportsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vulnerability.ListVulnerabilityReportsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[vulnerability.ListVulnerabilityReportsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[vulnerability.VulnerabilityReport]: + for page in self.pages: + yield from page.vulnerability_reports + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListVulnerabilityReportsAsyncPager: + """A pager for iterating through ``list_vulnerability_reports`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1.types.ListVulnerabilityReportsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``vulnerability_reports`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListVulnerabilityReports`` requests and continue to iterate + through the ``vulnerability_reports`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1.types.ListVulnerabilityReportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[vulnerability.ListVulnerabilityReportsResponse] + ], + request: vulnerability.ListVulnerabilityReportsRequest, + response: vulnerability.ListVulnerabilityReportsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1.types.ListVulnerabilityReportsRequest): + The initial request object. + response (google.cloud.osconfig_v1.types.ListVulnerabilityReportsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vulnerability.ListVulnerabilityReportsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[vulnerability.ListVulnerabilityReportsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[vulnerability.VulnerabilityReport]: + async def async_generator(): + async for page in self.pages: + for response in page.vulnerability_reports: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/__init__.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/__init__.py new file mode 100644 index 000000000000..77d28e750fe7 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import OsConfigZonalServiceTransport +from .grpc import OsConfigZonalServiceGrpcTransport +from .grpc_asyncio import OsConfigZonalServiceGrpcAsyncIOTransport +from .rest import OsConfigZonalServiceRestInterceptor, OsConfigZonalServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[OsConfigZonalServiceTransport]] +_transport_registry["grpc"] = OsConfigZonalServiceGrpcTransport +_transport_registry["grpc_asyncio"] = OsConfigZonalServiceGrpcAsyncIOTransport +_transport_registry["rest"] = OsConfigZonalServiceRestTransport + +__all__ = ( + "OsConfigZonalServiceTransport", + "OsConfigZonalServiceGrpcTransport", + "OsConfigZonalServiceGrpcAsyncIOTransport", + "OsConfigZonalServiceRestTransport", + "OsConfigZonalServiceRestInterceptor", +) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/base.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/base.py new file mode 100644 index 000000000000..ece8be532a28 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/base.py @@ -0,0 +1,346 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.osconfig_v1 import gapic_version as package_version +from google.cloud.osconfig_v1.types import ( + inventory, + os_policy_assignment_reports, + os_policy_assignments, + vulnerability, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class OsConfigZonalServiceTransport(abc.ABC): + """Abstract transport class for OsConfigZonalService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "osconfig.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_os_policy_assignment: gapic_v1.method.wrap_method( + self.create_os_policy_assignment, + default_timeout=None, + client_info=client_info, + ), + self.update_os_policy_assignment: gapic_v1.method.wrap_method( + self.update_os_policy_assignment, + default_timeout=None, + client_info=client_info, + ), + self.get_os_policy_assignment: gapic_v1.method.wrap_method( + self.get_os_policy_assignment, + default_timeout=None, + client_info=client_info, + ), + self.list_os_policy_assignments: gapic_v1.method.wrap_method( + self.list_os_policy_assignments, + default_timeout=None, + client_info=client_info, + ), + self.list_os_policy_assignment_revisions: gapic_v1.method.wrap_method( + self.list_os_policy_assignment_revisions, + default_timeout=None, + client_info=client_info, + ), + self.delete_os_policy_assignment: gapic_v1.method.wrap_method( + self.delete_os_policy_assignment, + default_timeout=None, + client_info=client_info, + ), + self.get_os_policy_assignment_report: gapic_v1.method.wrap_method( + self.get_os_policy_assignment_report, + default_timeout=None, + client_info=client_info, + ), + self.list_os_policy_assignment_reports: gapic_v1.method.wrap_method( + self.list_os_policy_assignment_reports, + default_timeout=None, + client_info=client_info, + ), + self.get_inventory: gapic_v1.method.wrap_method( + self.get_inventory, + default_timeout=None, + client_info=client_info, + ), + self.list_inventories: gapic_v1.method.wrap_method( + self.list_inventories, + default_timeout=None, + client_info=client_info, + ), + self.get_vulnerability_report: gapic_v1.method.wrap_method( + self.get_vulnerability_report, + default_timeout=None, + client_info=client_info, + ), + self.list_vulnerability_reports: gapic_v1.method.wrap_method( + self.list_vulnerability_reports, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.CreateOSPolicyAssignmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.UpdateOSPolicyAssignmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.GetOSPolicyAssignmentRequest], + Union[ + os_policy_assignments.OSPolicyAssignment, + Awaitable[os_policy_assignments.OSPolicyAssignment], + ], + ]: + raise NotImplementedError() + + @property + def list_os_policy_assignments( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentsRequest], + Union[ + os_policy_assignments.ListOSPolicyAssignmentsResponse, + Awaitable[os_policy_assignments.ListOSPolicyAssignmentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_os_policy_assignment_revisions( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest], + Union[ + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse, + Awaitable[os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.DeleteOSPolicyAssignmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_os_policy_assignment_report( + self, + ) -> Callable[ + [os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest], + Union[ + os_policy_assignment_reports.OSPolicyAssignmentReport, + Awaitable[os_policy_assignment_reports.OSPolicyAssignmentReport], + ], + ]: + raise NotImplementedError() + + @property + def list_os_policy_assignment_reports( + self, + ) -> Callable[ + [os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest], + Union[ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse, + Awaitable[ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse + ], + ], + ]: + raise NotImplementedError() + + @property + def get_inventory( + self, + ) -> Callable[ + [inventory.GetInventoryRequest], + Union[inventory.Inventory, Awaitable[inventory.Inventory]], + ]: + raise NotImplementedError() + + @property + def list_inventories( + self, + ) -> Callable[ + [inventory.ListInventoriesRequest], + Union[ + inventory.ListInventoriesResponse, + Awaitable[inventory.ListInventoriesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_vulnerability_report( + self, + ) -> Callable[ + [vulnerability.GetVulnerabilityReportRequest], + Union[ + vulnerability.VulnerabilityReport, + Awaitable[vulnerability.VulnerabilityReport], + ], + ]: + raise NotImplementedError() + + @property + def list_vulnerability_reports( + self, + ) -> Callable[ + [vulnerability.ListVulnerabilityReportsRequest], + Union[ + vulnerability.ListVulnerabilityReportsResponse, + Awaitable[vulnerability.ListVulnerabilityReportsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("OsConfigZonalServiceTransport",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/grpc.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/grpc.py new file mode 100644 index 000000000000..75553caa0b98 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/grpc.py @@ -0,0 +1,662 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.osconfig_v1.types import ( + inventory, + os_policy_assignment_reports, + os_policy_assignments, + vulnerability, +) + +from .base import DEFAULT_CLIENT_INFO, OsConfigZonalServiceTransport + + +class OsConfigZonalServiceGrpcTransport(OsConfigZonalServiceTransport): + """gRPC backend transport for OsConfigZonalService. + + Zonal OS Config API + + The OS Config service is the server-side component that allows + users to manage package installations and patch jobs for Compute + Engine VM instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.CreateOSPolicyAssignmentRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create os policy assignment method over gRPC. + + Create an OS policy assignment. + + This method also creates the first revision of the OS policy + assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + Returns: + Callable[[~.CreateOSPolicyAssignmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_os_policy_assignment" not in self._stubs: + self._stubs["create_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/CreateOSPolicyAssignment", + request_serializer=os_policy_assignments.CreateOSPolicyAssignmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_os_policy_assignment"] + + @property + def update_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.UpdateOSPolicyAssignmentRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the update os policy assignment method over gRPC. + + Update an existing OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + Returns: + Callable[[~.UpdateOSPolicyAssignmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_os_policy_assignment" not in self._stubs: + self._stubs["update_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/UpdateOSPolicyAssignment", + request_serializer=os_policy_assignments.UpdateOSPolicyAssignmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_os_policy_assignment"] + + @property + def get_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.GetOSPolicyAssignmentRequest], + os_policy_assignments.OSPolicyAssignment, + ]: + r"""Return a callable for the get os policy assignment method over gRPC. + + Retrieve an existing OS policy assignment. + + This method always returns the latest revision. In order to + retrieve a previous revision of the assignment, also provide the + revision ID in the ``name`` parameter. + + Returns: + Callable[[~.GetOSPolicyAssignmentRequest], + ~.OSPolicyAssignment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_os_policy_assignment" not in self._stubs: + self._stubs["get_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/GetOSPolicyAssignment", + request_serializer=os_policy_assignments.GetOSPolicyAssignmentRequest.serialize, + response_deserializer=os_policy_assignments.OSPolicyAssignment.deserialize, + ) + return self._stubs["get_os_policy_assignment"] + + @property + def list_os_policy_assignments( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentsRequest], + os_policy_assignments.ListOSPolicyAssignmentsResponse, + ]: + r"""Return a callable for the list os policy assignments method over gRPC. + + List the OS policy assignments under the parent + resource. + For each OS policy assignment, the latest revision is + returned. + + Returns: + Callable[[~.ListOSPolicyAssignmentsRequest], + ~.ListOSPolicyAssignmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_os_policy_assignments" not in self._stubs: + self._stubs["list_os_policy_assignments"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/ListOSPolicyAssignments", + request_serializer=os_policy_assignments.ListOSPolicyAssignmentsRequest.serialize, + response_deserializer=os_policy_assignments.ListOSPolicyAssignmentsResponse.deserialize, + ) + return self._stubs["list_os_policy_assignments"] + + @property + def list_os_policy_assignment_revisions( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest], + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse, + ]: + r"""Return a callable for the list os policy assignment + revisions method over gRPC. + + List the OS policy assignment revisions for a given + OS policy assignment. + + Returns: + Callable[[~.ListOSPolicyAssignmentRevisionsRequest], + ~.ListOSPolicyAssignmentRevisionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_os_policy_assignment_revisions" not in self._stubs: + self._stubs[ + "list_os_policy_assignment_revisions" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/ListOSPolicyAssignmentRevisions", + request_serializer=os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest.serialize, + response_deserializer=os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.deserialize, + ) + return self._stubs["list_os_policy_assignment_revisions"] + + @property + def delete_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.DeleteOSPolicyAssignmentRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete os policy assignment method over gRPC. + + Delete the OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + If the LRO completes and is not cancelled, all revisions + associated with the OS policy assignment are deleted. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + Returns: + Callable[[~.DeleteOSPolicyAssignmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_os_policy_assignment" not in self._stubs: + self._stubs["delete_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/DeleteOSPolicyAssignment", + request_serializer=os_policy_assignments.DeleteOSPolicyAssignmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_os_policy_assignment"] + + @property + def get_os_policy_assignment_report( + self, + ) -> Callable[ + [os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest], + os_policy_assignment_reports.OSPolicyAssignmentReport, + ]: + r"""Return a callable for the get os policy assignment + report method over gRPC. + + Get the OS policy asssignment report for the + specified Compute Engine VM instance. + + Returns: + Callable[[~.GetOSPolicyAssignmentReportRequest], + ~.OSPolicyAssignmentReport]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_os_policy_assignment_report" not in self._stubs: + self._stubs[ + "get_os_policy_assignment_report" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/GetOSPolicyAssignmentReport", + request_serializer=os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest.serialize, + response_deserializer=os_policy_assignment_reports.OSPolicyAssignmentReport.deserialize, + ) + return self._stubs["get_os_policy_assignment_report"] + + @property + def list_os_policy_assignment_reports( + self, + ) -> Callable[ + [os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest], + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse, + ]: + r"""Return a callable for the list os policy assignment + reports method over gRPC. + + List OS policy asssignment reports for all Compute + Engine VM instances in the specified zone. + + Returns: + Callable[[~.ListOSPolicyAssignmentReportsRequest], + ~.ListOSPolicyAssignmentReportsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_os_policy_assignment_reports" not in self._stubs: + self._stubs[ + "list_os_policy_assignment_reports" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/ListOSPolicyAssignmentReports", + request_serializer=os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest.serialize, + response_deserializer=os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.deserialize, + ) + return self._stubs["list_os_policy_assignment_reports"] + + @property + def get_inventory( + self, + ) -> Callable[[inventory.GetInventoryRequest], inventory.Inventory]: + r"""Return a callable for the get inventory method over gRPC. + + Get inventory data for the specified VM instance. If the VM has + no associated inventory, the message ``NOT_FOUND`` is returned. + + Returns: + Callable[[~.GetInventoryRequest], + ~.Inventory]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_inventory" not in self._stubs: + self._stubs["get_inventory"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/GetInventory", + request_serializer=inventory.GetInventoryRequest.serialize, + response_deserializer=inventory.Inventory.deserialize, + ) + return self._stubs["get_inventory"] + + @property + def list_inventories( + self, + ) -> Callable[ + [inventory.ListInventoriesRequest], inventory.ListInventoriesResponse + ]: + r"""Return a callable for the list inventories method over gRPC. + + List inventory data for all VM instances in the + specified zone. + + Returns: + Callable[[~.ListInventoriesRequest], + ~.ListInventoriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_inventories" not in self._stubs: + self._stubs["list_inventories"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/ListInventories", + request_serializer=inventory.ListInventoriesRequest.serialize, + response_deserializer=inventory.ListInventoriesResponse.deserialize, + ) + return self._stubs["list_inventories"] + + @property + def get_vulnerability_report( + self, + ) -> Callable[ + [vulnerability.GetVulnerabilityReportRequest], vulnerability.VulnerabilityReport + ]: + r"""Return a callable for the get vulnerability report method over gRPC. + + Gets the vulnerability report for the specified VM + instance. Only VMs with inventory data have + vulnerability reports associated with them. + + Returns: + Callable[[~.GetVulnerabilityReportRequest], + ~.VulnerabilityReport]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_vulnerability_report" not in self._stubs: + self._stubs["get_vulnerability_report"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/GetVulnerabilityReport", + request_serializer=vulnerability.GetVulnerabilityReportRequest.serialize, + response_deserializer=vulnerability.VulnerabilityReport.deserialize, + ) + return self._stubs["get_vulnerability_report"] + + @property + def list_vulnerability_reports( + self, + ) -> Callable[ + [vulnerability.ListVulnerabilityReportsRequest], + vulnerability.ListVulnerabilityReportsResponse, + ]: + r"""Return a callable for the list vulnerability reports method over gRPC. + + List vulnerability reports for all VM instances in + the specified zone. + + Returns: + Callable[[~.ListVulnerabilityReportsRequest], + ~.ListVulnerabilityReportsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_vulnerability_reports" not in self._stubs: + self._stubs["list_vulnerability_reports"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/ListVulnerabilityReports", + request_serializer=vulnerability.ListVulnerabilityReportsRequest.serialize, + response_deserializer=vulnerability.ListVulnerabilityReportsResponse.deserialize, + ) + return self._stubs["list_vulnerability_reports"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("OsConfigZonalServiceGrpcTransport",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/grpc_asyncio.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..bc0dc79e7a1f --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/grpc_asyncio.py @@ -0,0 +1,664 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.osconfig_v1.types import ( + inventory, + os_policy_assignment_reports, + os_policy_assignments, + vulnerability, +) + +from .base import DEFAULT_CLIENT_INFO, OsConfigZonalServiceTransport +from .grpc import OsConfigZonalServiceGrpcTransport + + +class OsConfigZonalServiceGrpcAsyncIOTransport(OsConfigZonalServiceTransport): + """gRPC AsyncIO backend transport for OsConfigZonalService. + + Zonal OS Config API + + The OS Config service is the server-side component that allows + users to manage package installations and patch jobs for Compute + Engine VM instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.CreateOSPolicyAssignmentRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create os policy assignment method over gRPC. + + Create an OS policy assignment. + + This method also creates the first revision of the OS policy + assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + Returns: + Callable[[~.CreateOSPolicyAssignmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_os_policy_assignment" not in self._stubs: + self._stubs["create_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/CreateOSPolicyAssignment", + request_serializer=os_policy_assignments.CreateOSPolicyAssignmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_os_policy_assignment"] + + @property + def update_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.UpdateOSPolicyAssignmentRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update os policy assignment method over gRPC. + + Update an existing OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + Returns: + Callable[[~.UpdateOSPolicyAssignmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_os_policy_assignment" not in self._stubs: + self._stubs["update_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/UpdateOSPolicyAssignment", + request_serializer=os_policy_assignments.UpdateOSPolicyAssignmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_os_policy_assignment"] + + @property + def get_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.GetOSPolicyAssignmentRequest], + Awaitable[os_policy_assignments.OSPolicyAssignment], + ]: + r"""Return a callable for the get os policy assignment method over gRPC. + + Retrieve an existing OS policy assignment. + + This method always returns the latest revision. In order to + retrieve a previous revision of the assignment, also provide the + revision ID in the ``name`` parameter. + + Returns: + Callable[[~.GetOSPolicyAssignmentRequest], + Awaitable[~.OSPolicyAssignment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_os_policy_assignment" not in self._stubs: + self._stubs["get_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/GetOSPolicyAssignment", + request_serializer=os_policy_assignments.GetOSPolicyAssignmentRequest.serialize, + response_deserializer=os_policy_assignments.OSPolicyAssignment.deserialize, + ) + return self._stubs["get_os_policy_assignment"] + + @property + def list_os_policy_assignments( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentsRequest], + Awaitable[os_policy_assignments.ListOSPolicyAssignmentsResponse], + ]: + r"""Return a callable for the list os policy assignments method over gRPC. + + List the OS policy assignments under the parent + resource. + For each OS policy assignment, the latest revision is + returned. + + Returns: + Callable[[~.ListOSPolicyAssignmentsRequest], + Awaitable[~.ListOSPolicyAssignmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_os_policy_assignments" not in self._stubs: + self._stubs["list_os_policy_assignments"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/ListOSPolicyAssignments", + request_serializer=os_policy_assignments.ListOSPolicyAssignmentsRequest.serialize, + response_deserializer=os_policy_assignments.ListOSPolicyAssignmentsResponse.deserialize, + ) + return self._stubs["list_os_policy_assignments"] + + @property + def list_os_policy_assignment_revisions( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest], + Awaitable[os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse], + ]: + r"""Return a callable for the list os policy assignment + revisions method over gRPC. + + List the OS policy assignment revisions for a given + OS policy assignment. + + Returns: + Callable[[~.ListOSPolicyAssignmentRevisionsRequest], + Awaitable[~.ListOSPolicyAssignmentRevisionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_os_policy_assignment_revisions" not in self._stubs: + self._stubs[ + "list_os_policy_assignment_revisions" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/ListOSPolicyAssignmentRevisions", + request_serializer=os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest.serialize, + response_deserializer=os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.deserialize, + ) + return self._stubs["list_os_policy_assignment_revisions"] + + @property + def delete_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.DeleteOSPolicyAssignmentRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete os policy assignment method over gRPC. + + Delete the OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + If the LRO completes and is not cancelled, all revisions + associated with the OS policy assignment are deleted. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + Returns: + Callable[[~.DeleteOSPolicyAssignmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_os_policy_assignment" not in self._stubs: + self._stubs["delete_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/DeleteOSPolicyAssignment", + request_serializer=os_policy_assignments.DeleteOSPolicyAssignmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_os_policy_assignment"] + + @property + def get_os_policy_assignment_report( + self, + ) -> Callable[ + [os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest], + Awaitable[os_policy_assignment_reports.OSPolicyAssignmentReport], + ]: + r"""Return a callable for the get os policy assignment + report method over gRPC. + + Get the OS policy asssignment report for the + specified Compute Engine VM instance. + + Returns: + Callable[[~.GetOSPolicyAssignmentReportRequest], + Awaitable[~.OSPolicyAssignmentReport]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_os_policy_assignment_report" not in self._stubs: + self._stubs[ + "get_os_policy_assignment_report" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/GetOSPolicyAssignmentReport", + request_serializer=os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest.serialize, + response_deserializer=os_policy_assignment_reports.OSPolicyAssignmentReport.deserialize, + ) + return self._stubs["get_os_policy_assignment_report"] + + @property + def list_os_policy_assignment_reports( + self, + ) -> Callable[ + [os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest], + Awaitable[os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse], + ]: + r"""Return a callable for the list os policy assignment + reports method over gRPC. + + List OS policy asssignment reports for all Compute + Engine VM instances in the specified zone. + + Returns: + Callable[[~.ListOSPolicyAssignmentReportsRequest], + Awaitable[~.ListOSPolicyAssignmentReportsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_os_policy_assignment_reports" not in self._stubs: + self._stubs[ + "list_os_policy_assignment_reports" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/ListOSPolicyAssignmentReports", + request_serializer=os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest.serialize, + response_deserializer=os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.deserialize, + ) + return self._stubs["list_os_policy_assignment_reports"] + + @property + def get_inventory( + self, + ) -> Callable[[inventory.GetInventoryRequest], Awaitable[inventory.Inventory]]: + r"""Return a callable for the get inventory method over gRPC. + + Get inventory data for the specified VM instance. If the VM has + no associated inventory, the message ``NOT_FOUND`` is returned. + + Returns: + Callable[[~.GetInventoryRequest], + Awaitable[~.Inventory]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_inventory" not in self._stubs: + self._stubs["get_inventory"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/GetInventory", + request_serializer=inventory.GetInventoryRequest.serialize, + response_deserializer=inventory.Inventory.deserialize, + ) + return self._stubs["get_inventory"] + + @property + def list_inventories( + self, + ) -> Callable[ + [inventory.ListInventoriesRequest], Awaitable[inventory.ListInventoriesResponse] + ]: + r"""Return a callable for the list inventories method over gRPC. + + List inventory data for all VM instances in the + specified zone. + + Returns: + Callable[[~.ListInventoriesRequest], + Awaitable[~.ListInventoriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_inventories" not in self._stubs: + self._stubs["list_inventories"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/ListInventories", + request_serializer=inventory.ListInventoriesRequest.serialize, + response_deserializer=inventory.ListInventoriesResponse.deserialize, + ) + return self._stubs["list_inventories"] + + @property + def get_vulnerability_report( + self, + ) -> Callable[ + [vulnerability.GetVulnerabilityReportRequest], + Awaitable[vulnerability.VulnerabilityReport], + ]: + r"""Return a callable for the get vulnerability report method over gRPC. + + Gets the vulnerability report for the specified VM + instance. Only VMs with inventory data have + vulnerability reports associated with them. + + Returns: + Callable[[~.GetVulnerabilityReportRequest], + Awaitable[~.VulnerabilityReport]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_vulnerability_report" not in self._stubs: + self._stubs["get_vulnerability_report"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/GetVulnerabilityReport", + request_serializer=vulnerability.GetVulnerabilityReportRequest.serialize, + response_deserializer=vulnerability.VulnerabilityReport.deserialize, + ) + return self._stubs["get_vulnerability_report"] + + @property + def list_vulnerability_reports( + self, + ) -> Callable[ + [vulnerability.ListVulnerabilityReportsRequest], + Awaitable[vulnerability.ListVulnerabilityReportsResponse], + ]: + r"""Return a callable for the list vulnerability reports method over gRPC. + + List vulnerability reports for all VM instances in + the specified zone. + + Returns: + Callable[[~.ListVulnerabilityReportsRequest], + Awaitable[~.ListVulnerabilityReportsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_vulnerability_reports" not in self._stubs: + self._stubs["list_vulnerability_reports"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1.OsConfigZonalService/ListVulnerabilityReports", + request_serializer=vulnerability.ListVulnerabilityReportsRequest.serialize, + response_deserializer=vulnerability.ListVulnerabilityReportsResponse.deserialize, + ) + return self._stubs["list_vulnerability_reports"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("OsConfigZonalServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/rest.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/rest.py new file mode 100644 index 000000000000..a8af97f0935c --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/services/os_config_zonal_service/transports/rest.py @@ -0,0 +1,1925 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.osconfig_v1.types import ( + inventory, + os_policy_assignment_reports, + os_policy_assignments, + vulnerability, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import OsConfigZonalServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class OsConfigZonalServiceRestInterceptor: + """Interceptor for OsConfigZonalService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the OsConfigZonalServiceRestTransport. + + .. code-block:: python + class MyCustomOsConfigZonalServiceInterceptor(OsConfigZonalServiceRestInterceptor): + def pre_create_os_policy_assignment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_os_policy_assignment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_os_policy_assignment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_os_policy_assignment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_inventory(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_inventory(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_os_policy_assignment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_os_policy_assignment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_os_policy_assignment_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_os_policy_assignment_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_vulnerability_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_vulnerability_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_inventories(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_inventories(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_os_policy_assignment_reports(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_os_policy_assignment_reports(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_os_policy_assignment_revisions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_os_policy_assignment_revisions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_os_policy_assignments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_os_policy_assignments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_vulnerability_reports(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_vulnerability_reports(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_os_policy_assignment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_os_policy_assignment(self, response): + logging.log(f"Received response: {response}") + return response + + transport = OsConfigZonalServiceRestTransport(interceptor=MyCustomOsConfigZonalServiceInterceptor()) + client = OsConfigZonalServiceClient(transport=transport) + + + """ + + def pre_create_os_policy_assignment( + self, + request: os_policy_assignments.CreateOSPolicyAssignmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignments.CreateOSPolicyAssignmentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_os_policy_assignment + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_create_os_policy_assignment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_os_policy_assignment + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_delete_os_policy_assignment( + self, + request: os_policy_assignments.DeleteOSPolicyAssignmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignments.DeleteOSPolicyAssignmentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_os_policy_assignment + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_delete_os_policy_assignment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_os_policy_assignment + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_get_inventory( + self, + request: inventory.GetInventoryRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[inventory.GetInventoryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_inventory + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_get_inventory(self, response: inventory.Inventory) -> inventory.Inventory: + """Post-rpc interceptor for get_inventory + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_get_os_policy_assignment( + self, + request: os_policy_assignments.GetOSPolicyAssignmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignments.GetOSPolicyAssignmentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_os_policy_assignment + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_get_os_policy_assignment( + self, response: os_policy_assignments.OSPolicyAssignment + ) -> os_policy_assignments.OSPolicyAssignment: + """Post-rpc interceptor for get_os_policy_assignment + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_get_os_policy_assignment_report( + self, + request: os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for get_os_policy_assignment_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_get_os_policy_assignment_report( + self, response: os_policy_assignment_reports.OSPolicyAssignmentReport + ) -> os_policy_assignment_reports.OSPolicyAssignmentReport: + """Post-rpc interceptor for get_os_policy_assignment_report + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_get_vulnerability_report( + self, + request: vulnerability.GetVulnerabilityReportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vulnerability.GetVulnerabilityReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_vulnerability_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_get_vulnerability_report( + self, response: vulnerability.VulnerabilityReport + ) -> vulnerability.VulnerabilityReport: + """Post-rpc interceptor for get_vulnerability_report + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_list_inventories( + self, + request: inventory.ListInventoriesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[inventory.ListInventoriesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_inventories + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_list_inventories( + self, response: inventory.ListInventoriesResponse + ) -> inventory.ListInventoriesResponse: + """Post-rpc interceptor for list_inventories + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_list_os_policy_assignment_reports( + self, + request: os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_os_policy_assignment_reports + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_list_os_policy_assignment_reports( + self, + response: os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse, + ) -> os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse: + """Post-rpc interceptor for list_os_policy_assignment_reports + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_list_os_policy_assignment_revisions( + self, + request: os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_os_policy_assignment_revisions + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_list_os_policy_assignment_revisions( + self, response: os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse + ) -> os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse: + """Post-rpc interceptor for list_os_policy_assignment_revisions + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_list_os_policy_assignments( + self, + request: os_policy_assignments.ListOSPolicyAssignmentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignments.ListOSPolicyAssignmentsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_os_policy_assignments + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_list_os_policy_assignments( + self, response: os_policy_assignments.ListOSPolicyAssignmentsResponse + ) -> os_policy_assignments.ListOSPolicyAssignmentsResponse: + """Post-rpc interceptor for list_os_policy_assignments + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_list_vulnerability_reports( + self, + request: vulnerability.ListVulnerabilityReportsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + vulnerability.ListVulnerabilityReportsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_vulnerability_reports + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_list_vulnerability_reports( + self, response: vulnerability.ListVulnerabilityReportsResponse + ) -> vulnerability.ListVulnerabilityReportsResponse: + """Post-rpc interceptor for list_vulnerability_reports + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_update_os_policy_assignment( + self, + request: os_policy_assignments.UpdateOSPolicyAssignmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignments.UpdateOSPolicyAssignmentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_os_policy_assignment + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_update_os_policy_assignment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_os_policy_assignment + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class OsConfigZonalServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: OsConfigZonalServiceRestInterceptor + + +class OsConfigZonalServiceRestTransport(OsConfigZonalServiceTransport): + """REST backend transport for OsConfigZonalService. + + Zonal OS Config API + + The OS Config service is the server-side component that allows + users to manage package installations and patch jobs for Compute + Engine VM instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[OsConfigZonalServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or OsConfigZonalServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/osPolicyAssignments/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/osPolicyAssignments/*/operations/*}", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateOSPolicyAssignment(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("CreateOSPolicyAssignment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "osPolicyAssignmentId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignments.CreateOSPolicyAssignmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create os policy + assignment method over HTTP. + + Args: + request (~.os_policy_assignments.CreateOSPolicyAssignmentRequest): + The request object. A request message to create an OS + policy assignment + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/osPolicyAssignments", + "body": "os_policy_assignment", + }, + ] + request, metadata = self._interceptor.pre_create_os_policy_assignment( + request, metadata + ) + pb_request = os_policy_assignments.CreateOSPolicyAssignmentRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_os_policy_assignment(resp) + return resp + + class _DeleteOSPolicyAssignment(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("DeleteOSPolicyAssignment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignments.DeleteOSPolicyAssignmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete os policy + assignment method over HTTP. + + Args: + request (~.os_policy_assignments.DeleteOSPolicyAssignmentRequest): + The request object. A request message for deleting a OS + policy assignment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/osPolicyAssignments/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_os_policy_assignment( + request, metadata + ) + pb_request = os_policy_assignments.DeleteOSPolicyAssignmentRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_os_policy_assignment(resp) + return resp + + class _GetInventory(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("GetInventory") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: inventory.GetInventoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> inventory.Inventory: + r"""Call the get inventory method over HTTP. + + Args: + request (~.inventory.GetInventoryRequest): + The request object. A request message for getting + inventory data for the specified VM. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.inventory.Inventory: + This API resource represents the available inventory + data for a Compute Engine virtual machine (VM) instance + at a given point in time. + + You can use this API resource to determine the inventory + data of your VM. + + For more information, see `Information provided by OS + inventory + management `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*/inventory}", + }, + ] + request, metadata = self._interceptor.pre_get_inventory(request, metadata) + pb_request = inventory.GetInventoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = inventory.Inventory() + pb_resp = inventory.Inventory.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_inventory(resp) + return resp + + class _GetOSPolicyAssignment(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("GetOSPolicyAssignment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignments.GetOSPolicyAssignmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignments.OSPolicyAssignment: + r"""Call the get os policy assignment method over HTTP. + + Args: + request (~.os_policy_assignments.GetOSPolicyAssignmentRequest): + The request object. A request message to get an OS policy + assignment + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.os_policy_assignments.OSPolicyAssignment: + OS policy assignment is an API resource that is used to + apply a set of OS policies to a dynamically targeted + group of Compute Engine VM instances. + + An OS policy is used to define the desired state + configuration for a Compute Engine VM instance through a + set of configuration resources that provide capabilities + such as installing or removing software packages, or + executing a script. + + For more information, see `OS policy and OS policy + assignment `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/osPolicyAssignments/*}", + }, + ] + request, metadata = self._interceptor.pre_get_os_policy_assignment( + request, metadata + ) + pb_request = os_policy_assignments.GetOSPolicyAssignmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = os_policy_assignments.OSPolicyAssignment() + pb_resp = os_policy_assignments.OSPolicyAssignment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_os_policy_assignment(resp) + return resp + + class _GetOSPolicyAssignmentReport(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("GetOSPolicyAssignmentReport") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignment_reports.OSPolicyAssignmentReport: + r"""Call the get os policy assignment + report method over HTTP. + + Args: + request (~.os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest): + The request object. Get a report of the OS policy + assignment for a VM instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.os_policy_assignment_reports.OSPolicyAssignmentReport: + A report of the OS policy assignment + status for a given instance. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*/osPolicyAssignments/*/report}", + }, + ] + request, metadata = self._interceptor.pre_get_os_policy_assignment_report( + request, metadata + ) + pb_request = ( + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = os_policy_assignment_reports.OSPolicyAssignmentReport() + pb_resp = os_policy_assignment_reports.OSPolicyAssignmentReport.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_os_policy_assignment_report(resp) + return resp + + class _GetVulnerabilityReport(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("GetVulnerabilityReport") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vulnerability.GetVulnerabilityReportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vulnerability.VulnerabilityReport: + r"""Call the get vulnerability report method over HTTP. + + Args: + request (~.vulnerability.GetVulnerabilityReportRequest): + The request object. A request message for getting the + vulnerability report for the specified + VM. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vulnerability.VulnerabilityReport: + This API resource represents the vulnerability report + for a specified Compute Engine virtual machine (VM) + instance at a given point in time. + + For more information, see `Vulnerability + reports `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*/vulnerabilityReport}", + }, + ] + request, metadata = self._interceptor.pre_get_vulnerability_report( + request, metadata + ) + pb_request = vulnerability.GetVulnerabilityReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vulnerability.VulnerabilityReport() + pb_resp = vulnerability.VulnerabilityReport.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_vulnerability_report(resp) + return resp + + class _ListInventories(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("ListInventories") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: inventory.ListInventoriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> inventory.ListInventoriesResponse: + r"""Call the list inventories method over HTTP. + + Args: + request (~.inventory.ListInventoriesRequest): + The request object. A request message for listing + inventory data for all VMs in the + specified location. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.inventory.ListInventoriesResponse: + A response message for listing + inventory data for all VMs in a + specified location. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/instances/*}/inventories", + }, + ] + request, metadata = self._interceptor.pre_list_inventories( + request, metadata + ) + pb_request = inventory.ListInventoriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = inventory.ListInventoriesResponse() + pb_resp = inventory.ListInventoriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_inventories(resp) + return resp + + class _ListOSPolicyAssignmentReports(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("ListOSPolicyAssignmentReports") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse: + r"""Call the list os policy assignment + reports method over HTTP. + + Args: + request (~.os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest): + The request object. List the OS policy assignment reports + for VM instances. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse: + A response message for listing OS + Policy assignment reports including the + page of results and page token. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/instances/*/osPolicyAssignments/*}/reports", + }, + ] + request, metadata = self._interceptor.pre_list_os_policy_assignment_reports( + request, metadata + ) + pb_request = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + pb_resp = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.pb( + resp + ) + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_os_policy_assignment_reports(resp) + return resp + + class _ListOSPolicyAssignmentRevisions(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("ListOSPolicyAssignmentRevisions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse: + r"""Call the list os policy assignment + revisions method over HTTP. + + Args: + request (~.os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest): + The request object. A request message to list revisions + for a OS policy assignment + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse: + A response message for listing all + revisions for a OS policy assignment. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/osPolicyAssignments/*}:listRevisions", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_list_os_policy_assignment_revisions( + request, metadata + ) + pb_request = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest.pb(request) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + pb_resp = os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_os_policy_assignment_revisions(resp) + return resp + + class _ListOSPolicyAssignments(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("ListOSPolicyAssignments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignments.ListOSPolicyAssignmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignments.ListOSPolicyAssignmentsResponse: + r"""Call the list os policy + assignments method over HTTP. + + Args: + request (~.os_policy_assignments.ListOSPolicyAssignmentsRequest): + The request object. A request message to list OS policy + assignments for a parent resource + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.os_policy_assignments.ListOSPolicyAssignmentsResponse: + A response message for listing all + assignments under given parent. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/osPolicyAssignments", + }, + ] + request, metadata = self._interceptor.pre_list_os_policy_assignments( + request, metadata + ) + pb_request = os_policy_assignments.ListOSPolicyAssignmentsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = os_policy_assignments.ListOSPolicyAssignmentsResponse() + pb_resp = os_policy_assignments.ListOSPolicyAssignmentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_os_policy_assignments(resp) + return resp + + class _ListVulnerabilityReports(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("ListVulnerabilityReports") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vulnerability.ListVulnerabilityReportsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vulnerability.ListVulnerabilityReportsResponse: + r"""Call the list vulnerability + reports method over HTTP. + + Args: + request (~.vulnerability.ListVulnerabilityReportsRequest): + The request object. A request message for listing + vulnerability reports for all VM + instances in the specified location. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vulnerability.ListVulnerabilityReportsResponse: + A response message for listing + vulnerability reports for all VM + instances in the specified location. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/instances/*}/vulnerabilityReports", + }, + ] + request, metadata = self._interceptor.pre_list_vulnerability_reports( + request, metadata + ) + pb_request = vulnerability.ListVulnerabilityReportsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vulnerability.ListVulnerabilityReportsResponse() + pb_resp = vulnerability.ListVulnerabilityReportsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_vulnerability_reports(resp) + return resp + + class _UpdateOSPolicyAssignment(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("UpdateOSPolicyAssignment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignments.UpdateOSPolicyAssignmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update os policy + assignment method over HTTP. + + Args: + request (~.os_policy_assignments.UpdateOSPolicyAssignmentRequest): + The request object. A request message to update an OS + policy assignment + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{os_policy_assignment.name=projects/*/locations/*/osPolicyAssignments/*}", + "body": "os_policy_assignment", + }, + ] + request, metadata = self._interceptor.pre_update_os_policy_assignment( + request, metadata + ) + pb_request = os_policy_assignments.UpdateOSPolicyAssignmentRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_os_policy_assignment(resp) + return resp + + @property + def create_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.CreateOSPolicyAssignmentRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateOSPolicyAssignment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.DeleteOSPolicyAssignmentRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteOSPolicyAssignment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_inventory( + self, + ) -> Callable[[inventory.GetInventoryRequest], inventory.Inventory]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInventory(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.GetOSPolicyAssignmentRequest], + os_policy_assignments.OSPolicyAssignment, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetOSPolicyAssignment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_os_policy_assignment_report( + self, + ) -> Callable[ + [os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest], + os_policy_assignment_reports.OSPolicyAssignmentReport, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetOSPolicyAssignmentReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_vulnerability_report( + self, + ) -> Callable[ + [vulnerability.GetVulnerabilityReportRequest], vulnerability.VulnerabilityReport + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetVulnerabilityReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_inventories( + self, + ) -> Callable[ + [inventory.ListInventoriesRequest], inventory.ListInventoriesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInventories(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_os_policy_assignment_reports( + self, + ) -> Callable[ + [os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest], + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListOSPolicyAssignmentReports(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_os_policy_assignment_revisions( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest], + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListOSPolicyAssignmentRevisions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_os_policy_assignments( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentsRequest], + os_policy_assignments.ListOSPolicyAssignmentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListOSPolicyAssignments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_vulnerability_reports( + self, + ) -> Callable[ + [vulnerability.ListVulnerabilityReportsRequest], + vulnerability.ListVulnerabilityReportsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListVulnerabilityReports(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.UpdateOSPolicyAssignmentRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateOSPolicyAssignment(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("OsConfigZonalServiceRestTransport",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/__init__.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/__init__.py new file mode 100644 index 000000000000..4f5b0ccdbda9 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/__init__.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .inventory import ( + GetInventoryRequest, + Inventory, + InventoryView, + ListInventoriesRequest, + ListInventoriesResponse, +) +from .os_policy import OSPolicy +from .os_policy_assignment_reports import ( + GetOSPolicyAssignmentReportRequest, + ListOSPolicyAssignmentReportsRequest, + ListOSPolicyAssignmentReportsResponse, + OSPolicyAssignmentReport, +) +from .os_policy_assignments import ( + CreateOSPolicyAssignmentRequest, + DeleteOSPolicyAssignmentRequest, + GetOSPolicyAssignmentRequest, + ListOSPolicyAssignmentRevisionsRequest, + ListOSPolicyAssignmentRevisionsResponse, + ListOSPolicyAssignmentsRequest, + ListOSPolicyAssignmentsResponse, + OSPolicyAssignment, + OSPolicyAssignmentOperationMetadata, + UpdateOSPolicyAssignmentRequest, +) +from .osconfig_common import FixedOrPercent +from .patch_deployments import ( + CreatePatchDeploymentRequest, + DeletePatchDeploymentRequest, + GetPatchDeploymentRequest, + ListPatchDeploymentsRequest, + ListPatchDeploymentsResponse, + MonthlySchedule, + OneTimeSchedule, + PatchDeployment, + PausePatchDeploymentRequest, + RecurringSchedule, + ResumePatchDeploymentRequest, + UpdatePatchDeploymentRequest, + WeekDayOfMonth, + WeeklySchedule, +) +from .patch_jobs import ( + AptSettings, + CancelPatchJobRequest, + ExecStep, + ExecStepConfig, + ExecutePatchJobRequest, + GcsObject, + GetPatchJobRequest, + GooSettings, + Instance, + ListPatchJobInstanceDetailsRequest, + ListPatchJobInstanceDetailsResponse, + ListPatchJobsRequest, + ListPatchJobsResponse, + PatchConfig, + PatchInstanceFilter, + PatchJob, + PatchJobInstanceDetails, + PatchRollout, + WindowsUpdateSettings, + YumSettings, + ZypperSettings, +) +from .vulnerability import ( + CVSSv3, + GetVulnerabilityReportRequest, + ListVulnerabilityReportsRequest, + ListVulnerabilityReportsResponse, + VulnerabilityReport, +) + +__all__ = ( + "GetInventoryRequest", + "Inventory", + "ListInventoriesRequest", + "ListInventoriesResponse", + "InventoryView", + "OSPolicy", + "GetOSPolicyAssignmentReportRequest", + "ListOSPolicyAssignmentReportsRequest", + "ListOSPolicyAssignmentReportsResponse", + "OSPolicyAssignmentReport", + "CreateOSPolicyAssignmentRequest", + "DeleteOSPolicyAssignmentRequest", + "GetOSPolicyAssignmentRequest", + "ListOSPolicyAssignmentRevisionsRequest", + "ListOSPolicyAssignmentRevisionsResponse", + "ListOSPolicyAssignmentsRequest", + "ListOSPolicyAssignmentsResponse", + "OSPolicyAssignment", + "OSPolicyAssignmentOperationMetadata", + "UpdateOSPolicyAssignmentRequest", + "FixedOrPercent", + "CreatePatchDeploymentRequest", + "DeletePatchDeploymentRequest", + "GetPatchDeploymentRequest", + "ListPatchDeploymentsRequest", + "ListPatchDeploymentsResponse", + "MonthlySchedule", + "OneTimeSchedule", + "PatchDeployment", + "PausePatchDeploymentRequest", + "RecurringSchedule", + "ResumePatchDeploymentRequest", + "UpdatePatchDeploymentRequest", + "WeekDayOfMonth", + "WeeklySchedule", + "AptSettings", + "CancelPatchJobRequest", + "ExecStep", + "ExecStepConfig", + "ExecutePatchJobRequest", + "GcsObject", + "GetPatchJobRequest", + "GooSettings", + "Instance", + "ListPatchJobInstanceDetailsRequest", + "ListPatchJobInstanceDetailsResponse", + "ListPatchJobsRequest", + "ListPatchJobsResponse", + "PatchConfig", + "PatchInstanceFilter", + "PatchJob", + "PatchJobInstanceDetails", + "PatchRollout", + "WindowsUpdateSettings", + "YumSettings", + "ZypperSettings", + "CVSSv3", + "GetVulnerabilityReportRequest", + "ListVulnerabilityReportsRequest", + "ListVulnerabilityReportsResponse", + "VulnerabilityReport", +) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/inventory.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/inventory.py new file mode 100644 index 000000000000..1d37d9974a12 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/inventory.py @@ -0,0 +1,743 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1", + manifest={ + "InventoryView", + "Inventory", + "GetInventoryRequest", + "ListInventoriesRequest", + "ListInventoriesResponse", + }, +) + + +class InventoryView(proto.Enum): + r"""The view for inventory objects. + + Values: + INVENTORY_VIEW_UNSPECIFIED (0): + The default value. + The API defaults to the BASIC view. + BASIC (1): + Returns the basic inventory information that includes + ``os_info``. + FULL (2): + Returns all fields. + """ + INVENTORY_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + +class Inventory(proto.Message): + r"""This API resource represents the available inventory data for a + Compute Engine virtual machine (VM) instance at a given point in + time. + + You can use this API resource to determine the inventory data of + your VM. + + For more information, see `Information provided by OS inventory + management `__. + + Attributes: + name (str): + Output only. The ``Inventory`` API resource name. + + Format: + ``projects/{project_number}/locations/{location}/instances/{instance_id}/inventory`` + os_info (google.cloud.osconfig_v1.types.Inventory.OsInfo): + Base level operating system information for + the VM. + items (MutableMapping[str, google.cloud.osconfig_v1.types.Inventory.Item]): + Inventory items related to the VM keyed by an + opaque unique identifier for each inventory + item. The identifier is unique to each distinct + and addressable inventory item and will change, + when there is a new package version. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp of the last reported + inventory for the VM. + """ + + class OsInfo(proto.Message): + r"""Operating system information for the VM. + + Attributes: + hostname (str): + The VM hostname. + long_name (str): + The operating system long name. + For example 'Debian GNU/Linux 9' or 'Microsoft + Window Server 2019 Datacenter'. + short_name (str): + The operating system short name. + For example, 'windows' or 'debian'. + version (str): + The version of the operating system. + architecture (str): + The system architecture of the operating + system. + kernel_version (str): + The kernel version of the operating system. + kernel_release (str): + The kernel release of the operating system. + osconfig_agent_version (str): + The current version of the OS Config agent + running on the VM. + """ + + hostname: str = proto.Field( + proto.STRING, + number=9, + ) + long_name: str = proto.Field( + proto.STRING, + number=2, + ) + short_name: str = proto.Field( + proto.STRING, + number=3, + ) + version: str = proto.Field( + proto.STRING, + number=4, + ) + architecture: str = proto.Field( + proto.STRING, + number=5, + ) + kernel_version: str = proto.Field( + proto.STRING, + number=6, + ) + kernel_release: str = proto.Field( + proto.STRING, + number=7, + ) + osconfig_agent_version: str = proto.Field( + proto.STRING, + number=8, + ) + + class Item(proto.Message): + r"""A single piece of inventory on a VM. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + Identifier for this item, unique across items + for this VM. + origin_type (google.cloud.osconfig_v1.types.Inventory.Item.OriginType): + The origin of this inventory item. + create_time (google.protobuf.timestamp_pb2.Timestamp): + When this inventory item was first detected. + update_time (google.protobuf.timestamp_pb2.Timestamp): + When this inventory item was last modified. + type_ (google.cloud.osconfig_v1.types.Inventory.Item.Type): + The specific type of inventory, correlating + to its specific details. + installed_package (google.cloud.osconfig_v1.types.Inventory.SoftwarePackage): + Software package present on the VM instance. + + This field is a member of `oneof`_ ``details``. + available_package (google.cloud.osconfig_v1.types.Inventory.SoftwarePackage): + Software package available to be installed on + the VM instance. + + This field is a member of `oneof`_ ``details``. + """ + + class OriginType(proto.Enum): + r"""The origin of a specific inventory item. + + Values: + ORIGIN_TYPE_UNSPECIFIED (0): + Invalid. An origin type must be specified. + INVENTORY_REPORT (1): + This inventory item was discovered as the + result of the agent reporting inventory via the + reporting API. + """ + ORIGIN_TYPE_UNSPECIFIED = 0 + INVENTORY_REPORT = 1 + + class Type(proto.Enum): + r"""The different types of inventory that are tracked on a VM. + + Values: + TYPE_UNSPECIFIED (0): + Invalid. An type must be specified. + INSTALLED_PACKAGE (1): + This represents a package that is installed + on the VM. + AVAILABLE_PACKAGE (2): + This represents an update that is available + for a package. + """ + TYPE_UNSPECIFIED = 0 + INSTALLED_PACKAGE = 1 + AVAILABLE_PACKAGE = 2 + + id: str = proto.Field( + proto.STRING, + number=1, + ) + origin_type: "Inventory.Item.OriginType" = proto.Field( + proto.ENUM, + number=2, + enum="Inventory.Item.OriginType", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + type_: "Inventory.Item.Type" = proto.Field( + proto.ENUM, + number=5, + enum="Inventory.Item.Type", + ) + installed_package: "Inventory.SoftwarePackage" = proto.Field( + proto.MESSAGE, + number=6, + oneof="details", + message="Inventory.SoftwarePackage", + ) + available_package: "Inventory.SoftwarePackage" = proto.Field( + proto.MESSAGE, + number=7, + oneof="details", + message="Inventory.SoftwarePackage", + ) + + class SoftwarePackage(proto.Message): + r"""Software package information of the operating system. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + yum_package (google.cloud.osconfig_v1.types.Inventory.VersionedPackage): + Yum package info. For details about the yum package manager, + see + https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/6/html/deployment_guide/ch-yum. + + This field is a member of `oneof`_ ``details``. + apt_package (google.cloud.osconfig_v1.types.Inventory.VersionedPackage): + Details of an APT package. + For details about the apt package manager, see + https://wiki.debian.org/Apt. + + This field is a member of `oneof`_ ``details``. + zypper_package (google.cloud.osconfig_v1.types.Inventory.VersionedPackage): + Details of a Zypper package. For details about the Zypper + package manager, see + https://en.opensuse.org/SDB:Zypper_manual. + + This field is a member of `oneof`_ ``details``. + googet_package (google.cloud.osconfig_v1.types.Inventory.VersionedPackage): + Details of a Googet package. + For details about the googet package manager, + see https://github.com/google/googet. + + This field is a member of `oneof`_ ``details``. + zypper_patch (google.cloud.osconfig_v1.types.Inventory.ZypperPatch): + Details of a Zypper patch. For details about the Zypper + package manager, see + https://en.opensuse.org/SDB:Zypper_manual. + + This field is a member of `oneof`_ ``details``. + wua_package (google.cloud.osconfig_v1.types.Inventory.WindowsUpdatePackage): + Details of a Windows Update package. See + https://docs.microsoft.com/en-us/windows/win32/api/_wua/ for + information about Windows Update. + + This field is a member of `oneof`_ ``details``. + qfe_package (google.cloud.osconfig_v1.types.Inventory.WindowsQuickFixEngineeringPackage): + Details of a Windows Quick Fix engineering + package. See + https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/win32-quickfixengineering + for info in Windows Quick Fix Engineering. + + This field is a member of `oneof`_ ``details``. + cos_package (google.cloud.osconfig_v1.types.Inventory.VersionedPackage): + Details of a COS package. + + This field is a member of `oneof`_ ``details``. + windows_application (google.cloud.osconfig_v1.types.Inventory.WindowsApplication): + Details of Windows Application. + + This field is a member of `oneof`_ ``details``. + """ + + yum_package: "Inventory.VersionedPackage" = proto.Field( + proto.MESSAGE, + number=1, + oneof="details", + message="Inventory.VersionedPackage", + ) + apt_package: "Inventory.VersionedPackage" = proto.Field( + proto.MESSAGE, + number=2, + oneof="details", + message="Inventory.VersionedPackage", + ) + zypper_package: "Inventory.VersionedPackage" = proto.Field( + proto.MESSAGE, + number=3, + oneof="details", + message="Inventory.VersionedPackage", + ) + googet_package: "Inventory.VersionedPackage" = proto.Field( + proto.MESSAGE, + number=4, + oneof="details", + message="Inventory.VersionedPackage", + ) + zypper_patch: "Inventory.ZypperPatch" = proto.Field( + proto.MESSAGE, + number=5, + oneof="details", + message="Inventory.ZypperPatch", + ) + wua_package: "Inventory.WindowsUpdatePackage" = proto.Field( + proto.MESSAGE, + number=6, + oneof="details", + message="Inventory.WindowsUpdatePackage", + ) + qfe_package: "Inventory.WindowsQuickFixEngineeringPackage" = proto.Field( + proto.MESSAGE, + number=7, + oneof="details", + message="Inventory.WindowsQuickFixEngineeringPackage", + ) + cos_package: "Inventory.VersionedPackage" = proto.Field( + proto.MESSAGE, + number=8, + oneof="details", + message="Inventory.VersionedPackage", + ) + windows_application: "Inventory.WindowsApplication" = proto.Field( + proto.MESSAGE, + number=9, + oneof="details", + message="Inventory.WindowsApplication", + ) + + class VersionedPackage(proto.Message): + r"""Information related to the a standard versioned package. + This includes package info for APT, Yum, Zypper, and Googet + package managers. + + Attributes: + package_name (str): + The name of the package. + architecture (str): + The system architecture this package is + intended for. + version (str): + The version of the package. + """ + + package_name: str = proto.Field( + proto.STRING, + number=4, + ) + architecture: str = proto.Field( + proto.STRING, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=3, + ) + + class ZypperPatch(proto.Message): + r"""Details related to a Zypper Patch. + + Attributes: + patch_name (str): + The name of the patch. + category (str): + The category of the patch. + severity (str): + The severity specified for this patch + summary (str): + Any summary information provided about this + patch. + """ + + patch_name: str = proto.Field( + proto.STRING, + number=5, + ) + category: str = proto.Field( + proto.STRING, + number=2, + ) + severity: str = proto.Field( + proto.STRING, + number=3, + ) + summary: str = proto.Field( + proto.STRING, + number=4, + ) + + class WindowsUpdatePackage(proto.Message): + r"""Details related to a Windows Update package. Field data and names + are taken from Windows Update API IUpdate Interface: + https://docs.microsoft.com/en-us/windows/win32/api/_wua/ Descriptive + fields like title, and description are localized based on the locale + of the VM being updated. + + Attributes: + title (str): + The localized title of the update package. + description (str): + The localized description of the update + package. + categories (MutableSequence[google.cloud.osconfig_v1.types.Inventory.WindowsUpdatePackage.WindowsUpdateCategory]): + The categories that are associated with this + update package. + kb_article_ids (MutableSequence[str]): + A collection of Microsoft Knowledge Base + article IDs that are associated with the update + package. + support_url (str): + A hyperlink to the language-specific support + information for the update. + more_info_urls (MutableSequence[str]): + A collection of URLs that provide more + information about the update package. + update_id (str): + Gets the identifier of an update package. + Stays the same across revisions. + revision_number (int): + The revision number of this update package. + last_deployment_change_time (google.protobuf.timestamp_pb2.Timestamp): + The last published date of the update, in + (UTC) date and time. + """ + + class WindowsUpdateCategory(proto.Message): + r"""Categories specified by the Windows Update. + + Attributes: + id (str): + The identifier of the windows update + category. + name (str): + The name of the windows update category. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + + title: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + categories: MutableSequence[ + "Inventory.WindowsUpdatePackage.WindowsUpdateCategory" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Inventory.WindowsUpdatePackage.WindowsUpdateCategory", + ) + kb_article_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + support_url: str = proto.Field( + proto.STRING, + number=11, + ) + more_info_urls: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + update_id: str = proto.Field( + proto.STRING, + number=6, + ) + revision_number: int = proto.Field( + proto.INT32, + number=7, + ) + last_deployment_change_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + + class WindowsQuickFixEngineeringPackage(proto.Message): + r"""Information related to a Quick Fix Engineering package. + Fields are taken from Windows QuickFixEngineering Interface and + match the source names: + + https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/win32-quickfixengineering + + Attributes: + caption (str): + A short textual description of the QFE + update. + description (str): + A textual description of the QFE update. + hot_fix_id (str): + Unique identifier associated with a + particular QFE update. + install_time (google.protobuf.timestamp_pb2.Timestamp): + Date that the QFE update was installed. Mapped from + installed_on field. + """ + + caption: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + hot_fix_id: str = proto.Field( + proto.STRING, + number=3, + ) + install_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + class WindowsApplication(proto.Message): + r"""Contains information about a Windows application that is + retrieved from the Windows Registry. For more information about + these fields, see: + + https://docs.microsoft.com/en-us/windows/win32/msi/uninstall-registry-key + + Attributes: + display_name (str): + The name of the application or product. + display_version (str): + The version of the product or application in + string format. + publisher (str): + The name of the manufacturer for the product + or application. + install_date (google.type.date_pb2.Date): + The last time this product received service. + The value of this property is replaced each time + a patch is applied or removed from the product + or the command-line option is used to repair the + product. + help_link (str): + The internet address for technical support. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + display_version: str = proto.Field( + proto.STRING, + number=2, + ) + publisher: str = proto.Field( + proto.STRING, + number=3, + ) + install_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=4, + message=date_pb2.Date, + ) + help_link: str = proto.Field( + proto.STRING, + number=5, + ) + + name: str = proto.Field( + proto.STRING, + number=3, + ) + os_info: OsInfo = proto.Field( + proto.MESSAGE, + number=1, + message=OsInfo, + ) + items: MutableMapping[str, Item] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message=Item, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class GetInventoryRequest(proto.Message): + r"""A request message for getting inventory data for the + specified VM. + + Attributes: + name (str): + Required. API resource name for inventory resource. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/inventory`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, either + Compute Engine ``instance-id`` or ``instance-name`` can be + provided. + view (google.cloud.osconfig_v1.types.InventoryView): + Inventory view indicating what information + should be included in the inventory resource. If + unspecified, the default view is BASIC. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: "InventoryView" = proto.Field( + proto.ENUM, + number=2, + enum="InventoryView", + ) + + +class ListInventoriesRequest(proto.Message): + r"""A request message for listing inventory data for all VMs in + the specified location. + + Attributes: + parent (str): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/-`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. + view (google.cloud.osconfig_v1.types.InventoryView): + Inventory view indicating what information + should be included in the inventory resource. If + unspecified, the default view is BASIC. + page_size (int): + The maximum number of results to return. + page_token (str): + A pagination token returned from a previous call to + ``ListInventories`` that indicates where this listing should + continue from. + filter (str): + If provided, this field specifies the criteria that must be + met by a ``Inventory`` API resource to be included in the + response. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + view: "InventoryView" = proto.Field( + proto.ENUM, + number=2, + enum="InventoryView", + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInventoriesResponse(proto.Message): + r"""A response message for listing inventory data for all VMs in + a specified location. + + Attributes: + inventories (MutableSequence[google.cloud.osconfig_v1.types.Inventory]): + List of inventory objects. + next_page_token (str): + The pagination token to retrieve the next + page of inventory objects. + """ + + @property + def raw_page(self): + return self + + inventories: MutableSequence["Inventory"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Inventory", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/os_policy.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/os_policy.py new file mode 100644 index 000000000000..220255ac724c --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/os_policy.py @@ -0,0 +1,1055 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1", + manifest={ + "OSPolicy", + }, +) + + +class OSPolicy(proto.Message): + r"""An OS policy defines the desired state configuration for a + VM. + + Attributes: + id (str): + Required. The id of the OS policy with the following + restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the assignment. + description (str): + Policy description. + Length of the description is limited to 1024 + characters. + mode (google.cloud.osconfig_v1.types.OSPolicy.Mode): + Required. Policy mode + resource_groups (MutableSequence[google.cloud.osconfig_v1.types.OSPolicy.ResourceGroup]): + Required. List of resource groups for the policy. For a + particular VM, resource groups are evaluated in the order + specified and the first resource group that is applicable is + selected and the rest are ignored. + + If none of the resource groups are applicable for a VM, the + VM is considered to be non-compliant w.r.t this policy. This + behavior can be toggled by the flag + ``allow_no_resource_group_match`` + allow_no_resource_group_match (bool): + This flag determines the OS policy compliance status when + none of the resource groups within the policy are applicable + for a VM. Set this value to ``true`` if the policy needs to + be reported as compliant even if the policy has nothing to + validate or enforce. + """ + + class Mode(proto.Enum): + r"""Policy mode + + Values: + MODE_UNSPECIFIED (0): + Invalid mode + VALIDATION (1): + This mode checks if the configuration + resources in the policy are in their desired + state. No actions are performed if they are not + in the desired state. This mode is used for + reporting purposes. + ENFORCEMENT (2): + This mode checks if the configuration + resources in the policy are in their desired + state, and if not, enforces the desired state. + """ + MODE_UNSPECIFIED = 0 + VALIDATION = 1 + ENFORCEMENT = 2 + + class InventoryFilter(proto.Message): + r"""Filtering criteria to select VMs based on inventory details. + + Attributes: + os_short_name (str): + Required. The OS short name + os_version (str): + The OS version + + Prefix matches are supported if asterisk(*) is provided as + the last character. For example, to match all versions with + a major version of ``7``, specify the following value for + this field ``7.*`` + + An empty string matches all OS versions. + """ + + os_short_name: str = proto.Field( + proto.STRING, + number=1, + ) + os_version: str = proto.Field( + proto.STRING, + number=2, + ) + + class Resource(proto.Message): + r"""An OS policy resource is used to define the desired state + configuration and provides a specific functionality like + installing/removing packages, executing a script etc. + + The system ensures that resources are always in their desired + state by taking necessary actions if they have drifted from + their desired state. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + Required. The id of the resource with the following + restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the OS policy. + pkg (google.cloud.osconfig_v1.types.OSPolicy.Resource.PackageResource): + Package resource + + This field is a member of `oneof`_ ``resource_type``. + repository (google.cloud.osconfig_v1.types.OSPolicy.Resource.RepositoryResource): + Package repository resource + + This field is a member of `oneof`_ ``resource_type``. + exec_ (google.cloud.osconfig_v1.types.OSPolicy.Resource.ExecResource): + Exec resource + + This field is a member of `oneof`_ ``resource_type``. + file (google.cloud.osconfig_v1.types.OSPolicy.Resource.FileResource): + File resource + + This field is a member of `oneof`_ ``resource_type``. + """ + + class File(proto.Message): + r"""A remote or local file. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + remote (google.cloud.osconfig_v1.types.OSPolicy.Resource.File.Remote): + A generic remote file. + + This field is a member of `oneof`_ ``type``. + gcs (google.cloud.osconfig_v1.types.OSPolicy.Resource.File.Gcs): + A Cloud Storage object. + + This field is a member of `oneof`_ ``type``. + local_path (str): + A local path within the VM to use. + + This field is a member of `oneof`_ ``type``. + allow_insecure (bool): + Defaults to false. When false, files are + subject to validations based on the file type: + + Remote: A checksum must be specified. + Cloud Storage: An object generation number must + be specified. + """ + + class Remote(proto.Message): + r"""Specifies a file available via some URI. + + Attributes: + uri (str): + Required. URI from which to fetch the object. It should + contain both the protocol and path following the format + ``{protocol}://{location}``. + sha256_checksum (str): + SHA256 checksum of the remote file. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + sha256_checksum: str = proto.Field( + proto.STRING, + number=2, + ) + + class Gcs(proto.Message): + r"""Specifies a file available as a Cloud Storage Object. + + Attributes: + bucket (str): + Required. Bucket of the Cloud Storage object. + object_ (str): + Required. Name of the Cloud Storage object. + generation (int): + Generation number of the Cloud Storage + object. + """ + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + object_: str = proto.Field( + proto.STRING, + number=2, + ) + generation: int = proto.Field( + proto.INT64, + number=3, + ) + + remote: "OSPolicy.Resource.File.Remote" = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message="OSPolicy.Resource.File.Remote", + ) + gcs: "OSPolicy.Resource.File.Gcs" = proto.Field( + proto.MESSAGE, + number=2, + oneof="type", + message="OSPolicy.Resource.File.Gcs", + ) + local_path: str = proto.Field( + proto.STRING, + number=3, + oneof="type", + ) + allow_insecure: bool = proto.Field( + proto.BOOL, + number=4, + ) + + class PackageResource(proto.Message): + r"""A resource that manages a system package. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + desired_state (google.cloud.osconfig_v1.types.OSPolicy.Resource.PackageResource.DesiredState): + Required. The desired state the agent should + maintain for this package. + apt (google.cloud.osconfig_v1.types.OSPolicy.Resource.PackageResource.APT): + A package managed by Apt. + + This field is a member of `oneof`_ ``system_package``. + deb (google.cloud.osconfig_v1.types.OSPolicy.Resource.PackageResource.Deb): + A deb package file. + + This field is a member of `oneof`_ ``system_package``. + yum (google.cloud.osconfig_v1.types.OSPolicy.Resource.PackageResource.YUM): + A package managed by YUM. + + This field is a member of `oneof`_ ``system_package``. + zypper (google.cloud.osconfig_v1.types.OSPolicy.Resource.PackageResource.Zypper): + A package managed by Zypper. + + This field is a member of `oneof`_ ``system_package``. + rpm (google.cloud.osconfig_v1.types.OSPolicy.Resource.PackageResource.RPM): + An rpm package file. + + This field is a member of `oneof`_ ``system_package``. + googet (google.cloud.osconfig_v1.types.OSPolicy.Resource.PackageResource.GooGet): + A package managed by GooGet. + + This field is a member of `oneof`_ ``system_package``. + msi (google.cloud.osconfig_v1.types.OSPolicy.Resource.PackageResource.MSI): + An MSI package. + + This field is a member of `oneof`_ ``system_package``. + """ + + class DesiredState(proto.Enum): + r"""The desired state that the OS Config agent maintains on the + VM. + + Values: + DESIRED_STATE_UNSPECIFIED (0): + Unspecified is invalid. + INSTALLED (1): + Ensure that the package is installed. + REMOVED (2): + The agent ensures that the package is not + installed and uninstalls it if detected. + """ + DESIRED_STATE_UNSPECIFIED = 0 + INSTALLED = 1 + REMOVED = 2 + + class Deb(proto.Message): + r"""A deb package file. dpkg packages only support INSTALLED + state. + + Attributes: + source (google.cloud.osconfig_v1.types.OSPolicy.Resource.File): + Required. A deb package. + pull_deps (bool): + Whether dependencies should also be installed. + + - install when false: ``dpkg -i package`` + - install when true: + ``apt-get update && apt-get -y install package.deb`` + """ + + source: "OSPolicy.Resource.File" = proto.Field( + proto.MESSAGE, + number=1, + message="OSPolicy.Resource.File", + ) + pull_deps: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class APT(proto.Message): + r"""A package managed by APT. + + - install: ``apt-get update && apt-get -y install [name]`` + - remove: ``apt-get -y remove [name]`` + + Attributes: + name (str): + Required. Package name. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class RPM(proto.Message): + r"""An RPM package file. RPM packages only support INSTALLED + state. + + Attributes: + source (google.cloud.osconfig_v1.types.OSPolicy.Resource.File): + Required. An rpm package. + pull_deps (bool): + Whether dependencies should also be installed. + + - install when false: + ``rpm --upgrade --replacepkgs package.rpm`` + - install when true: ``yum -y install package.rpm`` or + ``zypper -y install package.rpm`` + """ + + source: "OSPolicy.Resource.File" = proto.Field( + proto.MESSAGE, + number=1, + message="OSPolicy.Resource.File", + ) + pull_deps: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class YUM(proto.Message): + r"""A package managed by YUM. + + - install: ``yum -y install package`` + - remove: ``yum -y remove package`` + + Attributes: + name (str): + Required. Package name. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class Zypper(proto.Message): + r"""A package managed by Zypper. + + - install: ``zypper -y install package`` + - remove: ``zypper -y rm package`` + + Attributes: + name (str): + Required. Package name. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class GooGet(proto.Message): + r"""A package managed by GooGet. + + - install: ``googet -noconfirm install package`` + - remove: ``googet -noconfirm remove package`` + + Attributes: + name (str): + Required. Package name. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class MSI(proto.Message): + r"""An MSI package. MSI packages only support INSTALLED state. + + Attributes: + source (google.cloud.osconfig_v1.types.OSPolicy.Resource.File): + Required. The MSI package. + properties (MutableSequence[str]): + Additional properties to use during installation. This + should be in the format of Property=Setting. Appended to the + defaults of ``ACTION=INSTALL REBOOT=ReallySuppress``. + """ + + source: "OSPolicy.Resource.File" = proto.Field( + proto.MESSAGE, + number=1, + message="OSPolicy.Resource.File", + ) + properties: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + desired_state: "OSPolicy.Resource.PackageResource.DesiredState" = ( + proto.Field( + proto.ENUM, + number=1, + enum="OSPolicy.Resource.PackageResource.DesiredState", + ) + ) + apt: "OSPolicy.Resource.PackageResource.APT" = proto.Field( + proto.MESSAGE, + number=2, + oneof="system_package", + message="OSPolicy.Resource.PackageResource.APT", + ) + deb: "OSPolicy.Resource.PackageResource.Deb" = proto.Field( + proto.MESSAGE, + number=3, + oneof="system_package", + message="OSPolicy.Resource.PackageResource.Deb", + ) + yum: "OSPolicy.Resource.PackageResource.YUM" = proto.Field( + proto.MESSAGE, + number=4, + oneof="system_package", + message="OSPolicy.Resource.PackageResource.YUM", + ) + zypper: "OSPolicy.Resource.PackageResource.Zypper" = proto.Field( + proto.MESSAGE, + number=5, + oneof="system_package", + message="OSPolicy.Resource.PackageResource.Zypper", + ) + rpm: "OSPolicy.Resource.PackageResource.RPM" = proto.Field( + proto.MESSAGE, + number=6, + oneof="system_package", + message="OSPolicy.Resource.PackageResource.RPM", + ) + googet: "OSPolicy.Resource.PackageResource.GooGet" = proto.Field( + proto.MESSAGE, + number=7, + oneof="system_package", + message="OSPolicy.Resource.PackageResource.GooGet", + ) + msi: "OSPolicy.Resource.PackageResource.MSI" = proto.Field( + proto.MESSAGE, + number=8, + oneof="system_package", + message="OSPolicy.Resource.PackageResource.MSI", + ) + + class RepositoryResource(proto.Message): + r"""A resource that manages a package repository. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + apt (google.cloud.osconfig_v1.types.OSPolicy.Resource.RepositoryResource.AptRepository): + An Apt Repository. + + This field is a member of `oneof`_ ``repository``. + yum (google.cloud.osconfig_v1.types.OSPolicy.Resource.RepositoryResource.YumRepository): + A Yum Repository. + + This field is a member of `oneof`_ ``repository``. + zypper (google.cloud.osconfig_v1.types.OSPolicy.Resource.RepositoryResource.ZypperRepository): + A Zypper Repository. + + This field is a member of `oneof`_ ``repository``. + goo (google.cloud.osconfig_v1.types.OSPolicy.Resource.RepositoryResource.GooRepository): + A Goo Repository. + + This field is a member of `oneof`_ ``repository``. + """ + + class AptRepository(proto.Message): + r"""Represents a single apt package repository. These will be added to a + repo file that will be managed at + ``/etc/apt/sources.list.d/google_osconfig.list``. + + Attributes: + archive_type (google.cloud.osconfig_v1.types.OSPolicy.Resource.RepositoryResource.AptRepository.ArchiveType): + Required. Type of archive files in this + repository. + uri (str): + Required. URI for this repository. + distribution (str): + Required. Distribution of this repository. + components (MutableSequence[str]): + Required. List of components for this + repository. Must contain at least one item. + gpg_key (str): + URI of the key file for this repository. The agent maintains + a keyring at + ``/etc/apt/trusted.gpg.d/osconfig_agent_managed.gpg``. + """ + + class ArchiveType(proto.Enum): + r"""Type of archive. + + Values: + ARCHIVE_TYPE_UNSPECIFIED (0): + Unspecified is invalid. + DEB (1): + Deb indicates that the archive contains + binary files. + DEB_SRC (2): + Deb-src indicates that the archive contains + source files. + """ + ARCHIVE_TYPE_UNSPECIFIED = 0 + DEB = 1 + DEB_SRC = 2 + + archive_type: "OSPolicy.Resource.RepositoryResource.AptRepository.ArchiveType" = proto.Field( + proto.ENUM, + number=1, + enum="OSPolicy.Resource.RepositoryResource.AptRepository.ArchiveType", + ) + uri: str = proto.Field( + proto.STRING, + number=2, + ) + distribution: str = proto.Field( + proto.STRING, + number=3, + ) + components: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + gpg_key: str = proto.Field( + proto.STRING, + number=5, + ) + + class YumRepository(proto.Message): + r"""Represents a single yum package repository. These are added to a + repo file that is managed at + ``/etc/yum.repos.d/google_osconfig.repo``. + + Attributes: + id (str): + Required. A one word, unique name for this repository. This + is the ``repo id`` in the yum config file and also the + ``display_name`` if ``display_name`` is omitted. This id is + also used as the unique identifier when checking for + resource conflicts. + display_name (str): + The display name of the repository. + base_url (str): + Required. The location of the repository + directory. + gpg_keys (MutableSequence[str]): + URIs of GPG keys. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + base_url: str = proto.Field( + proto.STRING, + number=3, + ) + gpg_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + class ZypperRepository(proto.Message): + r"""Represents a single zypper package repository. These are added to a + repo file that is managed at + ``/etc/zypp/repos.d/google_osconfig.repo``. + + Attributes: + id (str): + Required. A one word, unique name for this repository. This + is the ``repo id`` in the zypper config file and also the + ``display_name`` if ``display_name`` is omitted. This id is + also used as the unique identifier when checking for + GuestPolicy conflicts. + display_name (str): + The display name of the repository. + base_url (str): + Required. The location of the repository + directory. + gpg_keys (MutableSequence[str]): + URIs of GPG keys. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + base_url: str = proto.Field( + proto.STRING, + number=3, + ) + gpg_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + class GooRepository(proto.Message): + r"""Represents a Goo package repository. These are added to a repo file + that is managed at + ``C:/ProgramData/GooGet/repos/google_osconfig.repo``. + + Attributes: + name (str): + Required. The name of the repository. + url (str): + Required. The url of the repository. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + url: str = proto.Field( + proto.STRING, + number=2, + ) + + apt: "OSPolicy.Resource.RepositoryResource.AptRepository" = proto.Field( + proto.MESSAGE, + number=1, + oneof="repository", + message="OSPolicy.Resource.RepositoryResource.AptRepository", + ) + yum: "OSPolicy.Resource.RepositoryResource.YumRepository" = proto.Field( + proto.MESSAGE, + number=2, + oneof="repository", + message="OSPolicy.Resource.RepositoryResource.YumRepository", + ) + zypper: "OSPolicy.Resource.RepositoryResource.ZypperRepository" = ( + proto.Field( + proto.MESSAGE, + number=3, + oneof="repository", + message="OSPolicy.Resource.RepositoryResource.ZypperRepository", + ) + ) + goo: "OSPolicy.Resource.RepositoryResource.GooRepository" = proto.Field( + proto.MESSAGE, + number=4, + oneof="repository", + message="OSPolicy.Resource.RepositoryResource.GooRepository", + ) + + class ExecResource(proto.Message): + r"""A resource that allows executing scripts on the VM. + + The ``ExecResource`` has 2 stages: ``validate`` and ``enforce`` and + both stages accept a script as an argument to execute. + + When the ``ExecResource`` is applied by the agent, it first executes + the script in the ``validate`` stage. The ``validate`` stage can + signal that the ``ExecResource`` is already in the desired state by + returning an exit code of ``100``. If the ``ExecResource`` is not in + the desired state, it should return an exit code of ``101``. Any + other exit code returned by this stage is considered an error. + + If the ``ExecResource`` is not in the desired state based on the + exit code from the ``validate`` stage, the agent proceeds to execute + the script from the ``enforce`` stage. If the ``ExecResource`` is + already in the desired state, the ``enforce`` stage will not be run. + Similar to ``validate`` stage, the ``enforce`` stage should return + an exit code of ``100`` to indicate that the resource in now in its + desired state. Any other exit code is considered an error. + + NOTE: An exit code of ``100`` was chosen over ``0`` (and ``101`` vs + ``1``) to have an explicit indicator of ``in desired state``, + ``not in desired state`` and errors. Because, for example, + Powershell will always return an exit code of ``0`` unless an + ``exit`` statement is provided in the script. So, for reasons of + consistency and being explicit, exit codes ``100`` and ``101`` were + chosen. + + Attributes: + validate (google.cloud.osconfig_v1.types.OSPolicy.Resource.ExecResource.Exec): + Required. What to run to validate this + resource is in the desired state. An exit code + of 100 indicates "in desired state", and exit + code of 101 indicates "not in desired state". + Any other exit code indicates a failure running + validate. + enforce (google.cloud.osconfig_v1.types.OSPolicy.Resource.ExecResource.Exec): + What to run to bring this resource into the + desired state. An exit code of 100 indicates + "success", any other exit code indicates a + failure running enforce. + """ + + class Exec(proto.Message): + r"""A file or script to execute. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + file (google.cloud.osconfig_v1.types.OSPolicy.Resource.File): + A remote or local file. + + This field is a member of `oneof`_ ``source``. + script (str): + An inline script. + The size of the script is limited to 1024 + characters. + + This field is a member of `oneof`_ ``source``. + args (MutableSequence[str]): + Optional arguments to pass to the source + during execution. + interpreter (google.cloud.osconfig_v1.types.OSPolicy.Resource.ExecResource.Exec.Interpreter): + Required. The script interpreter to use. + output_file_path (str): + Only recorded for enforce Exec. + Path to an output file (that is created by this + Exec) whose content will be recorded in + OSPolicyResourceCompliance after a successful + run. Absence or failure to read this file will + result in this ExecResource being non-compliant. + Output file size is limited to 100K bytes. + """ + + class Interpreter(proto.Enum): + r"""The interpreter to use. + + Values: + INTERPRETER_UNSPECIFIED (0): + Invalid value, the request will return + validation error. + NONE (1): + If an interpreter is not specified, the source is executed + directly. This execution, without an interpreter, only + succeeds for executables and scripts that have shebang + lines. + SHELL (2): + Indicates that the script runs with ``/bin/sh`` on Linux and + ``cmd.exe`` on Windows. + POWERSHELL (3): + Indicates that the script runs with + PowerShell. + """ + INTERPRETER_UNSPECIFIED = 0 + NONE = 1 + SHELL = 2 + POWERSHELL = 3 + + file: "OSPolicy.Resource.File" = proto.Field( + proto.MESSAGE, + number=1, + oneof="source", + message="OSPolicy.Resource.File", + ) + script: str = proto.Field( + proto.STRING, + number=2, + oneof="source", + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + interpreter: "OSPolicy.Resource.ExecResource.Exec.Interpreter" = ( + proto.Field( + proto.ENUM, + number=4, + enum="OSPolicy.Resource.ExecResource.Exec.Interpreter", + ) + ) + output_file_path: str = proto.Field( + proto.STRING, + number=5, + ) + + validate: "OSPolicy.Resource.ExecResource.Exec" = proto.Field( + proto.MESSAGE, + number=1, + message="OSPolicy.Resource.ExecResource.Exec", + ) + enforce: "OSPolicy.Resource.ExecResource.Exec" = proto.Field( + proto.MESSAGE, + number=2, + message="OSPolicy.Resource.ExecResource.Exec", + ) + + class FileResource(proto.Message): + r"""A resource that manages the state of a file. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + file (google.cloud.osconfig_v1.types.OSPolicy.Resource.File): + A remote or local source. + + This field is a member of `oneof`_ ``source``. + content (str): + A a file with this content. + The size of the content is limited to 1024 + characters. + + This field is a member of `oneof`_ ``source``. + path (str): + Required. The absolute path of the file + within the VM. + state (google.cloud.osconfig_v1.types.OSPolicy.Resource.FileResource.DesiredState): + Required. Desired state of the file. + permissions (str): + Consists of three octal digits which + represent, in order, the permissions of the + owner, group, and other users for the file + (similarly to the numeric mode used in the linux + chmod utility). Each digit represents a three + bit number with the 4 bit corresponding to the + read permissions, the 2 bit corresponds to the + write bit, and the one bit corresponds to the + execute permission. Default behavior is 755. + + Below are some examples of permissions and their + associated values: + + read, write, and execute: 7 + read and execute: 5 + read and write: 6 + read only: 4 + """ + + class DesiredState(proto.Enum): + r"""Desired state of the file. + + Values: + DESIRED_STATE_UNSPECIFIED (0): + Unspecified is invalid. + PRESENT (1): + Ensure file at path is present. + ABSENT (2): + Ensure file at path is absent. + CONTENTS_MATCH (3): + Ensure the contents of the file at path + matches. If the file does not exist it will be + created. + """ + DESIRED_STATE_UNSPECIFIED = 0 + PRESENT = 1 + ABSENT = 2 + CONTENTS_MATCH = 3 + + file: "OSPolicy.Resource.File" = proto.Field( + proto.MESSAGE, + number=1, + oneof="source", + message="OSPolicy.Resource.File", + ) + content: str = proto.Field( + proto.STRING, + number=2, + oneof="source", + ) + path: str = proto.Field( + proto.STRING, + number=3, + ) + state: "OSPolicy.Resource.FileResource.DesiredState" = proto.Field( + proto.ENUM, + number=4, + enum="OSPolicy.Resource.FileResource.DesiredState", + ) + permissions: str = proto.Field( + proto.STRING, + number=5, + ) + + id: str = proto.Field( + proto.STRING, + number=1, + ) + pkg: "OSPolicy.Resource.PackageResource" = proto.Field( + proto.MESSAGE, + number=2, + oneof="resource_type", + message="OSPolicy.Resource.PackageResource", + ) + repository: "OSPolicy.Resource.RepositoryResource" = proto.Field( + proto.MESSAGE, + number=3, + oneof="resource_type", + message="OSPolicy.Resource.RepositoryResource", + ) + exec_: "OSPolicy.Resource.ExecResource" = proto.Field( + proto.MESSAGE, + number=4, + oneof="resource_type", + message="OSPolicy.Resource.ExecResource", + ) + file: "OSPolicy.Resource.FileResource" = proto.Field( + proto.MESSAGE, + number=5, + oneof="resource_type", + message="OSPolicy.Resource.FileResource", + ) + + class ResourceGroup(proto.Message): + r"""Resource groups provide a mechanism to group OS policy resources. + + Resource groups enable OS policy authors to create a single OS + policy to be applied to VMs running different operating Systems. + + When the OS policy is applied to a target VM, the appropriate + resource group within the OS policy is selected based on the + ``OSFilter`` specified within the resource group. + + Attributes: + inventory_filters (MutableSequence[google.cloud.osconfig_v1.types.OSPolicy.InventoryFilter]): + List of inventory filters for the resource group. + + The resources in this resource group are applied to the + target VM if it satisfies at least one of the following + inventory filters. + + For example, to apply this resource group to VMs running + either ``RHEL`` or ``CentOS`` operating systems, specify 2 + items for the list with following values: + inventory_filters[0].os_short_name='rhel' and + inventory_filters[1].os_short_name='centos' + + If the list is empty, this resource group will be applied to + the target VM unconditionally. + resources (MutableSequence[google.cloud.osconfig_v1.types.OSPolicy.Resource]): + Required. List of resources configured for + this resource group. The resources are executed + in the exact order specified here. + """ + + inventory_filters: MutableSequence[ + "OSPolicy.InventoryFilter" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="OSPolicy.InventoryFilter", + ) + resources: MutableSequence["OSPolicy.Resource"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="OSPolicy.Resource", + ) + + id: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + mode: Mode = proto.Field( + proto.ENUM, + number=3, + enum=Mode, + ) + resource_groups: MutableSequence[ResourceGroup] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=ResourceGroup, + ) + allow_no_resource_group_match: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/os_policy_assignment_reports.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/os_policy_assignment_reports.py new file mode 100644 index 000000000000..8d6b62e96bd6 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/os_policy_assignment_reports.py @@ -0,0 +1,448 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1", + manifest={ + "GetOSPolicyAssignmentReportRequest", + "ListOSPolicyAssignmentReportsRequest", + "ListOSPolicyAssignmentReportsResponse", + "OSPolicyAssignmentReport", + }, +) + + +class GetOSPolicyAssignmentReportRequest(proto.Message): + r"""Get a report of the OS policy assignment for a VM instance. + + Attributes: + name (str): + Required. API resource name for OS policy assignment report. + + Format: + ``/projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/report`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance_id}``, + either Compute Engine ``instance-id`` or ``instance-name`` + can be provided. For ``{assignment_id}``, the + OSPolicyAssignment id must be provided. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListOSPolicyAssignmentReportsRequest(proto.Message): + r"""List the OS policy assignment reports for VM instances. + + Attributes: + parent (str): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/reports`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, either + ``instance-name``, ``instance-id``, or ``-`` can be + provided. If '-' is provided, the response will include + OSPolicyAssignmentReports for all instances in the + project/location. For ``{assignment}``, either + ``assignment-id`` or ``-`` can be provided. If '-' is + provided, the response will include + OSPolicyAssignmentReports for all OSPolicyAssignments in the + project/location. Either {instance} or {assignment} must be + ``-``. + + For example: + ``projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/-/reports`` + returns all reports for the instance + ``projects/{project}/locations/{location}/instances/-/osPolicyAssignments/{assignment-id}/reports`` + returns all the reports for the given assignment across all + instances. + ``projects/{project}/locations/{location}/instances/-/osPolicyAssignments/-/reports`` + returns all the reports for all assignments across all + instances. + page_size (int): + The maximum number of results to return. + filter (str): + If provided, this field specifies the criteria that must be + met by the ``OSPolicyAssignmentReport`` API resource that is + included in the response. + page_token (str): + A pagination token returned from a previous call to the + ``ListOSPolicyAssignmentReports`` method that indicates + where this listing should continue from. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListOSPolicyAssignmentReportsResponse(proto.Message): + r"""A response message for listing OS Policy assignment reports + including the page of results and page token. + + Attributes: + os_policy_assignment_reports (MutableSequence[google.cloud.osconfig_v1.types.OSPolicyAssignmentReport]): + List of OS policy assignment reports. + next_page_token (str): + The pagination token to retrieve the next + page of OS policy assignment report objects. + """ + + @property + def raw_page(self): + return self + + os_policy_assignment_reports: MutableSequence[ + "OSPolicyAssignmentReport" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="OSPolicyAssignmentReport", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OSPolicyAssignmentReport(proto.Message): + r"""A report of the OS policy assignment status for a given + instance. + + Attributes: + name (str): + The ``OSPolicyAssignmentReport`` API resource name. + + Format: + ``projects/{project_number}/locations/{location}/instances/{instance_id}/osPolicyAssignments/{os_policy_assignment_id}/report`` + instance (str): + The Compute Engine VM instance name. + os_policy_assignment (str): + Reference to the ``OSPolicyAssignment`` API resource that + the ``OSPolicy`` belongs to. + + Format: + ``projects/{project_number}/locations/{location}/osPolicyAssignments/{os_policy_assignment_id@revision_id}`` + os_policy_compliances (MutableSequence[google.cloud.osconfig_v1.types.OSPolicyAssignmentReport.OSPolicyCompliance]): + Compliance data for each ``OSPolicy`` that is applied to the + VM. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp for when the report was last + generated. + last_run_id (str): + Unique identifier of the last attempted run + to apply the OS policies associated with this + assignment on the VM. + + This ID is logged by the OS Config agent while + applying the OS policies associated with this + assignment on the VM. NOTE: If the service is + unable to successfully connect to the agent for + this run, then this id will not be available in + the agent logs. + """ + + class OSPolicyCompliance(proto.Message): + r"""Compliance data for an OS policy + + Attributes: + os_policy_id (str): + The OS policy id + compliance_state (google.cloud.osconfig_v1.types.OSPolicyAssignmentReport.OSPolicyCompliance.ComplianceState): + The compliance state of the OS policy. + compliance_state_reason (str): + The reason for the OS policy to be in an unknown compliance + state. This field is always populated when + ``compliance_state`` is ``UNKNOWN``. + + If populated, the field can contain one of the following + values: + + - ``vm-not-running``: The VM was not running. + - ``os-policies-not-supported-by-agent``: The version of + the OS Config agent running on the VM does not support + running OS policies. + - ``no-agent-detected``: The OS Config agent is not + detected for the VM. + - ``resource-execution-errors``: The OS Config agent + encountered errors while executing one or more resources + in the policy. See ``os_policy_resource_compliances`` for + details. + - ``task-timeout``: The task sent to the agent to apply the + policy timed out. + - ``unexpected-agent-state``: The OS Config agent did not + report the final status of the task that attempted to + apply the policy. Instead, the agent unexpectedly started + working on a different task. This mostly happens when the + agent or VM unexpectedly restarts while applying OS + policies. + - ``internal-service-errors``: Internal service errors were + encountered while attempting to apply the policy. + os_policy_resource_compliances (MutableSequence[google.cloud.osconfig_v1.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance]): + Compliance data for each resource within the + policy that is applied to the VM. + """ + + class ComplianceState(proto.Enum): + r"""Possible compliance states for an os policy. + + Values: + UNKNOWN (0): + The policy is in an unknown compliance state. + + Refer to the field ``compliance_state_reason`` to learn the + exact reason for the policy to be in this compliance state. + COMPLIANT (1): + Policy is compliant. + + The policy is compliant if all the underlying + resources are also compliant. + NON_COMPLIANT (2): + Policy is non-compliant. + + The policy is non-compliant if one or more + underlying resources are non-compliant. + """ + UNKNOWN = 0 + COMPLIANT = 1 + NON_COMPLIANT = 2 + + class OSPolicyResourceCompliance(proto.Message): + r"""Compliance data for an OS policy resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + os_policy_resource_id (str): + The ID of the OS policy resource. + config_steps (MutableSequence[google.cloud.osconfig_v1.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep]): + Ordered list of configuration completed by + the agent for the OS policy resource. + compliance_state (google.cloud.osconfig_v1.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ComplianceState): + The compliance state of the resource. + compliance_state_reason (str): + A reason for the resource to be in the given compliance + state. This field is always populated when + ``compliance_state`` is ``UNKNOWN``. + + The following values are supported when + ``compliance_state == UNKNOWN`` + + - ``execution-errors``: Errors were encountered by the + agent while executing the resource and the compliance + state couldn't be determined. + - ``execution-skipped-by-agent``: Resource execution was + skipped by the agent because errors were encountered + while executing prior resources in the OS policy. + - ``os-policy-execution-attempt-failed``: The execution of + the OS policy containing this resource failed and the + compliance state couldn't be determined. + exec_resource_output (google.cloud.osconfig_v1.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ExecResourceOutput): + ExecResource specific output. + + This field is a member of `oneof`_ ``output``. + """ + + class ComplianceState(proto.Enum): + r"""Possible compliance states for a resource. + + Values: + UNKNOWN (0): + The resource is in an unknown compliance state. + + To get more details about why the policy is in this state, + review the output of the ``compliance_state_reason`` field. + COMPLIANT (1): + Resource is compliant. + NON_COMPLIANT (2): + Resource is non-compliant. + """ + UNKNOWN = 0 + COMPLIANT = 1 + NON_COMPLIANT = 2 + + class OSPolicyResourceConfigStep(proto.Message): + r"""Step performed by the OS Config agent for configuring an + ``OSPolicy`` resource to its desired state. + + Attributes: + type_ (google.cloud.osconfig_v1.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep.Type): + Configuration step type. + error_message (str): + An error message recorded during the + execution of this step. Only populated if errors + were encountered during this step execution. + """ + + class Type(proto.Enum): + r"""Supported configuration step types + + Values: + TYPE_UNSPECIFIED (0): + Default value. This value is unused. + VALIDATION (1): + Checks for resource conflicts such as schema + errors. + DESIRED_STATE_CHECK (2): + Checks the current status of the desired + state for a resource. + DESIRED_STATE_ENFORCEMENT (3): + Enforces the desired state for a resource + that is not in desired state. + DESIRED_STATE_CHECK_POST_ENFORCEMENT (4): + Re-checks the status of the desired state. + This check is done for a resource after the + enforcement of all OS policies. + + This step is used to determine the final desired + state status for the resource. It accounts for + any resources that might have drifted from their + desired state due to side effects from executing + other resources. + """ + TYPE_UNSPECIFIED = 0 + VALIDATION = 1 + DESIRED_STATE_CHECK = 2 + DESIRED_STATE_ENFORCEMENT = 3 + DESIRED_STATE_CHECK_POST_ENFORCEMENT = 4 + + type_: "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep.Type" = proto.Field( + proto.ENUM, + number=1, + enum="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep.Type", + ) + error_message: str = proto.Field( + proto.STRING, + number=2, + ) + + class ExecResourceOutput(proto.Message): + r"""ExecResource specific output. + + Attributes: + enforcement_output (bytes): + Output from enforcement phase output file (if + run). Output size is limited to 100K bytes. + """ + + enforcement_output: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + os_policy_resource_id: str = proto.Field( + proto.STRING, + number=1, + ) + config_steps: MutableSequence[ + "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep", + ) + compliance_state: "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ComplianceState" = proto.Field( + proto.ENUM, + number=3, + enum="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ComplianceState", + ) + compliance_state_reason: str = proto.Field( + proto.STRING, + number=4, + ) + exec_resource_output: "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ExecResourceOutput" = proto.Field( + proto.MESSAGE, + number=5, + oneof="output", + message="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ExecResourceOutput", + ) + + os_policy_id: str = proto.Field( + proto.STRING, + number=1, + ) + compliance_state: "OSPolicyAssignmentReport.OSPolicyCompliance.ComplianceState" = proto.Field( + proto.ENUM, + number=2, + enum="OSPolicyAssignmentReport.OSPolicyCompliance.ComplianceState", + ) + compliance_state_reason: str = proto.Field( + proto.STRING, + number=3, + ) + os_policy_resource_compliances: MutableSequence[ + "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + instance: str = proto.Field( + proto.STRING, + number=2, + ) + os_policy_assignment: str = proto.Field( + proto.STRING, + number=3, + ) + os_policy_compliances: MutableSequence[OSPolicyCompliance] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=OSPolicyCompliance, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + last_run_id: str = proto.Field( + proto.STRING, + number=6, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/os_policy_assignments.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/os_policy_assignments.py new file mode 100644 index 000000000000..692332fd6347 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/os_policy_assignments.py @@ -0,0 +1,625 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.osconfig_v1.types import os_policy, osconfig_common + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1", + manifest={ + "OSPolicyAssignment", + "OSPolicyAssignmentOperationMetadata", + "CreateOSPolicyAssignmentRequest", + "UpdateOSPolicyAssignmentRequest", + "GetOSPolicyAssignmentRequest", + "ListOSPolicyAssignmentsRequest", + "ListOSPolicyAssignmentsResponse", + "ListOSPolicyAssignmentRevisionsRequest", + "ListOSPolicyAssignmentRevisionsResponse", + "DeleteOSPolicyAssignmentRequest", + }, +) + + +class OSPolicyAssignment(proto.Message): + r"""OS policy assignment is an API resource that is used to apply a set + of OS policies to a dynamically targeted group of Compute Engine VM + instances. + + An OS policy is used to define the desired state configuration for a + Compute Engine VM instance through a set of configuration resources + that provide capabilities such as installing or removing software + packages, or executing a script. + + For more information, see `OS policy and OS policy + assignment `__. + + Attributes: + name (str): + Resource name. + + Format: + ``projects/{project_number}/locations/{location}/osPolicyAssignments/{os_policy_assignment_id}`` + + This field is ignored when you create an OS policy + assignment. + description (str): + OS policy assignment description. + Length of the description is limited to 1024 + characters. + os_policies (MutableSequence[google.cloud.osconfig_v1.types.OSPolicy]): + Required. List of OS policies to be applied + to the VMs. + instance_filter (google.cloud.osconfig_v1.types.OSPolicyAssignment.InstanceFilter): + Required. Filter to select VMs. + rollout (google.cloud.osconfig_v1.types.OSPolicyAssignment.Rollout): + Required. Rollout to deploy the OS policy assignment. A + rollout is triggered in the following situations: + + 1) OSPolicyAssignment is created. + 2) OSPolicyAssignment is updated and the update contains + changes to one of the following fields: + + - instance_filter + - os_policies + + 3) OSPolicyAssignment is deleted. + revision_id (str): + Output only. The assignment revision ID + A new revision is committed whenever a rollout + is triggered for a OS policy assignment + revision_create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp that the revision + was created. + etag (str): + The etag for this OS policy assignment. + If this is provided on update, it must match the + server's etag. + rollout_state (google.cloud.osconfig_v1.types.OSPolicyAssignment.RolloutState): + Output only. OS policy assignment rollout + state + baseline (bool): + Output only. Indicates that this revision has been + successfully rolled out in this zone and new VMs will be + assigned OS policies from this revision. + + For a given OS policy assignment, there is only one revision + with a value of ``true`` for this field. + deleted (bool): + Output only. Indicates that this revision + deletes the OS policy assignment. + reconciling (bool): + Output only. Indicates that reconciliation is in progress + for the revision. This value is ``true`` when the + ``rollout_state`` is one of: + + - IN_PROGRESS + - CANCELLING + uid (str): + Output only. Server generated unique id for + the OS policy assignment resource. + """ + + class RolloutState(proto.Enum): + r"""OS policy assignment rollout state + + Values: + ROLLOUT_STATE_UNSPECIFIED (0): + Invalid value + IN_PROGRESS (1): + The rollout is in progress. + CANCELLING (2): + The rollout is being cancelled. + CANCELLED (3): + The rollout is cancelled. + SUCCEEDED (4): + The rollout has completed successfully. + """ + ROLLOUT_STATE_UNSPECIFIED = 0 + IN_PROGRESS = 1 + CANCELLING = 2 + CANCELLED = 3 + SUCCEEDED = 4 + + class LabelSet(proto.Message): + r"""Message representing label set. + + - A label is a key value pair set for a VM. + - A LabelSet is a set of labels. + - Labels within a LabelSet are ANDed. In other words, a LabelSet is + applicable for a VM only if it matches all the labels in the + LabelSet. + - Example: A LabelSet with 2 labels: ``env=prod`` and + ``type=webserver`` will only be applicable for those VMs with + both labels present. + + Attributes: + labels (MutableMapping[str, str]): + Labels are identified by key/value pairs in + this map. A VM should contain all the key/value + pairs specified in this map to be selected. + """ + + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + class InstanceFilter(proto.Message): + r"""Filters to select target VMs for an assignment. + + If more than one filter criteria is specified below, a VM will + be selected if and only if it satisfies all of them. + + Attributes: + all_ (bool): + Target all VMs in the project. If true, no + other criteria is permitted. + inclusion_labels (MutableSequence[google.cloud.osconfig_v1.types.OSPolicyAssignment.LabelSet]): + List of label sets used for VM inclusion. + + If the list has more than one ``LabelSet``, the VM is + included if any of the label sets are applicable for the VM. + exclusion_labels (MutableSequence[google.cloud.osconfig_v1.types.OSPolicyAssignment.LabelSet]): + List of label sets used for VM exclusion. + + If the list has more than one label set, the VM + is excluded if any of the label sets are + applicable for the VM. + inventories (MutableSequence[google.cloud.osconfig_v1.types.OSPolicyAssignment.InstanceFilter.Inventory]): + List of inventories to select VMs. + + A VM is selected if its inventory data matches + at least one of the following inventories. + """ + + class Inventory(proto.Message): + r"""VM inventory details. + + Attributes: + os_short_name (str): + Required. The OS short name + os_version (str): + The OS version + + Prefix matches are supported if asterisk(*) is provided as + the last character. For example, to match all versions with + a major version of ``7``, specify the following value for + this field ``7.*`` + + An empty string matches all OS versions. + """ + + os_short_name: str = proto.Field( + proto.STRING, + number=1, + ) + os_version: str = proto.Field( + proto.STRING, + number=2, + ) + + all_: bool = proto.Field( + proto.BOOL, + number=1, + ) + inclusion_labels: MutableSequence[ + "OSPolicyAssignment.LabelSet" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="OSPolicyAssignment.LabelSet", + ) + exclusion_labels: MutableSequence[ + "OSPolicyAssignment.LabelSet" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="OSPolicyAssignment.LabelSet", + ) + inventories: MutableSequence[ + "OSPolicyAssignment.InstanceFilter.Inventory" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="OSPolicyAssignment.InstanceFilter.Inventory", + ) + + class Rollout(proto.Message): + r"""Message to configure the rollout at the zonal level for the + OS policy assignment. + + Attributes: + disruption_budget (google.cloud.osconfig_v1.types.FixedOrPercent): + Required. The maximum number (or percentage) + of VMs per zone to disrupt at any given moment. + min_wait_duration (google.protobuf.duration_pb2.Duration): + Required. This determines the minimum duration of time to + wait after the configuration changes are applied through the + current rollout. A VM continues to count towards the + ``disruption_budget`` at least until this duration of time + has passed after configuration changes are applied. + """ + + disruption_budget: osconfig_common.FixedOrPercent = proto.Field( + proto.MESSAGE, + number=1, + message=osconfig_common.FixedOrPercent, + ) + min_wait_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + os_policies: MutableSequence[os_policy.OSPolicy] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=os_policy.OSPolicy, + ) + instance_filter: InstanceFilter = proto.Field( + proto.MESSAGE, + number=4, + message=InstanceFilter, + ) + rollout: Rollout = proto.Field( + proto.MESSAGE, + number=5, + message=Rollout, + ) + revision_id: str = proto.Field( + proto.STRING, + number=6, + ) + revision_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + etag: str = proto.Field( + proto.STRING, + number=8, + ) + rollout_state: RolloutState = proto.Field( + proto.ENUM, + number=9, + enum=RolloutState, + ) + baseline: bool = proto.Field( + proto.BOOL, + number=10, + ) + deleted: bool = proto.Field( + proto.BOOL, + number=11, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=12, + ) + uid: str = proto.Field( + proto.STRING, + number=13, + ) + + +class OSPolicyAssignmentOperationMetadata(proto.Message): + r"""OS policy assignment operation metadata provided by OS policy + assignment API methods that return long running operations. + + Attributes: + os_policy_assignment (str): + Reference to the ``OSPolicyAssignment`` API resource. + + Format: + ``projects/{project_number}/locations/{location}/osPolicyAssignments/{os_policy_assignment_id@revision_id}`` + api_method (google.cloud.osconfig_v1.types.OSPolicyAssignmentOperationMetadata.APIMethod): + The OS policy assignment API method. + rollout_state (google.cloud.osconfig_v1.types.OSPolicyAssignmentOperationMetadata.RolloutState): + State of the rollout + rollout_start_time (google.protobuf.timestamp_pb2.Timestamp): + Rollout start time + rollout_update_time (google.protobuf.timestamp_pb2.Timestamp): + Rollout update time + """ + + class APIMethod(proto.Enum): + r"""The OS policy assignment API method. + + Values: + API_METHOD_UNSPECIFIED (0): + Invalid value + CREATE (1): + Create OS policy assignment API method + UPDATE (2): + Update OS policy assignment API method + DELETE (3): + Delete OS policy assignment API method + """ + API_METHOD_UNSPECIFIED = 0 + CREATE = 1 + UPDATE = 2 + DELETE = 3 + + class RolloutState(proto.Enum): + r"""State of the rollout + + Values: + ROLLOUT_STATE_UNSPECIFIED (0): + Invalid value + IN_PROGRESS (1): + The rollout is in progress. + CANCELLING (2): + The rollout is being cancelled. + CANCELLED (3): + The rollout is cancelled. + SUCCEEDED (4): + The rollout has completed successfully. + """ + ROLLOUT_STATE_UNSPECIFIED = 0 + IN_PROGRESS = 1 + CANCELLING = 2 + CANCELLED = 3 + SUCCEEDED = 4 + + os_policy_assignment: str = proto.Field( + proto.STRING, + number=1, + ) + api_method: APIMethod = proto.Field( + proto.ENUM, + number=2, + enum=APIMethod, + ) + rollout_state: RolloutState = proto.Field( + proto.ENUM, + number=3, + enum=RolloutState, + ) + rollout_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + rollout_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class CreateOSPolicyAssignmentRequest(proto.Message): + r"""A request message to create an OS policy assignment + + Attributes: + parent (str): + Required. The parent resource name in the + form: projects/{project}/locations/{location} + os_policy_assignment (google.cloud.osconfig_v1.types.OSPolicyAssignment): + Required. The OS policy assignment to be + created. + os_policy_assignment_id (str): + Required. The logical name of the OS policy assignment in + the project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the project. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + os_policy_assignment: "OSPolicyAssignment" = proto.Field( + proto.MESSAGE, + number=2, + message="OSPolicyAssignment", + ) + os_policy_assignment_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateOSPolicyAssignmentRequest(proto.Message): + r"""A request message to update an OS policy assignment + + Attributes: + os_policy_assignment (google.cloud.osconfig_v1.types.OSPolicyAssignment): + Required. The updated OS policy assignment. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that controls which + fields of the assignment should be updated. + """ + + os_policy_assignment: "OSPolicyAssignment" = proto.Field( + proto.MESSAGE, + number=1, + message="OSPolicyAssignment", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetOSPolicyAssignmentRequest(proto.Message): + r"""A request message to get an OS policy assignment + + Attributes: + name (str): + Required. The resource name of OS policy assignment. + + Format: + ``projects/{project}/locations/{location}/osPolicyAssignments/{os_policy_assignment}@{revisionId}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListOSPolicyAssignmentsRequest(proto.Message): + r"""A request message to list OS policy assignments for a parent + resource + + Attributes: + parent (str): + Required. The parent resource name. + page_size (int): + The maximum number of assignments to return. + page_token (str): + A pagination token returned from a previous call to + ``ListOSPolicyAssignments`` that indicates where this + listing should continue from. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListOSPolicyAssignmentsResponse(proto.Message): + r"""A response message for listing all assignments under given + parent. + + Attributes: + os_policy_assignments (MutableSequence[google.cloud.osconfig_v1.types.OSPolicyAssignment]): + The list of assignments + next_page_token (str): + The pagination token to retrieve the next + page of OS policy assignments. + """ + + @property + def raw_page(self): + return self + + os_policy_assignments: MutableSequence["OSPolicyAssignment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="OSPolicyAssignment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListOSPolicyAssignmentRevisionsRequest(proto.Message): + r"""A request message to list revisions for a OS policy + assignment + + Attributes: + name (str): + Required. The name of the OS policy + assignment to list revisions for. + page_size (int): + The maximum number of revisions to return. + page_token (str): + A pagination token returned from a previous call to + ``ListOSPolicyAssignmentRevisions`` that indicates where + this listing should continue from. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListOSPolicyAssignmentRevisionsResponse(proto.Message): + r"""A response message for listing all revisions for a OS policy + assignment. + + Attributes: + os_policy_assignments (MutableSequence[google.cloud.osconfig_v1.types.OSPolicyAssignment]): + The OS policy assignment revisions + next_page_token (str): + The pagination token to retrieve the next + page of OS policy assignment revisions. + """ + + @property + def raw_page(self): + return self + + os_policy_assignments: MutableSequence["OSPolicyAssignment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="OSPolicyAssignment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteOSPolicyAssignmentRequest(proto.Message): + r"""A request message for deleting a OS policy assignment. + + Attributes: + name (str): + Required. The name of the OS policy + assignment to be deleted + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/osconfig_common.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/osconfig_common.py new file mode 100644 index 000000000000..e92a05cc9717 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/osconfig_common.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1", + manifest={ + "FixedOrPercent", + }, +) + + +class FixedOrPercent(proto.Message): + r"""Message encapsulating a value that can be either absolute + ("fixed") or relative ("percent") to a value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fixed (int): + Specifies a fixed value. + + This field is a member of `oneof`_ ``mode``. + percent (int): + Specifies the relative value defined as a + percentage, which will be multiplied by a + reference value. + + This field is a member of `oneof`_ ``mode``. + """ + + fixed: int = proto.Field( + proto.INT32, + number=1, + oneof="mode", + ) + percent: int = proto.Field( + proto.INT32, + number=2, + oneof="mode", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/osconfig_service.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/osconfig_service.py new file mode 100644 index 000000000000..0ab5f9c10e3f --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/osconfig_service.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1", + manifest={}, +) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/osconfig_zonal_service.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/osconfig_zonal_service.py new file mode 100644 index 000000000000..0ab5f9c10e3f --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/osconfig_zonal_service.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1", + manifest={}, +) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/patch_deployments.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/patch_deployments.py new file mode 100644 index 000000000000..84dd9fb4f6bd --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/patch_deployments.py @@ -0,0 +1,597 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.osconfig_v1.types import patch_jobs + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1", + manifest={ + "PatchDeployment", + "OneTimeSchedule", + "RecurringSchedule", + "WeeklySchedule", + "MonthlySchedule", + "WeekDayOfMonth", + "CreatePatchDeploymentRequest", + "GetPatchDeploymentRequest", + "ListPatchDeploymentsRequest", + "ListPatchDeploymentsResponse", + "DeletePatchDeploymentRequest", + "UpdatePatchDeploymentRequest", + "PausePatchDeploymentRequest", + "ResumePatchDeploymentRequest", + }, +) + + +class PatchDeployment(proto.Message): + r"""Patch deployments are configurations that individual patch jobs use + to complete a patch. These configurations include instance filter, + package repository settings, and a schedule. For more information + about creating and managing patch deployments, see `Scheduling patch + jobs `__. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Unique name for the patch deployment resource in a project. + The patch deployment name is in the form: + ``projects/{project_id}/patchDeployments/{patch_deployment_id}``. + This field is ignored when you create a new patch + deployment. + description (str): + Optional. Description of the patch + deployment. Length of the description is limited + to 1024 characters. + instance_filter (google.cloud.osconfig_v1.types.PatchInstanceFilter): + Required. VM instances to patch. + patch_config (google.cloud.osconfig_v1.types.PatchConfig): + Optional. Patch configuration that is + applied. + duration (google.protobuf.duration_pb2.Duration): + Optional. Duration of the patch. After the + duration ends, the patch times out. + one_time_schedule (google.cloud.osconfig_v1.types.OneTimeSchedule): + Required. Schedule a one-time execution. + + This field is a member of `oneof`_ ``schedule``. + recurring_schedule (google.cloud.osconfig_v1.types.RecurringSchedule): + Required. Schedule recurring executions. + + This field is a member of `oneof`_ ``schedule``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time the patch deployment was created. + Timestamp is in + `RFC3339 `__ text + format. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time the patch deployment was last updated. + Timestamp is in + `RFC3339 `__ text + format. + last_execute_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last time a patch job was started by this + deployment. Timestamp is in + `RFC3339 `__ text + format. + rollout (google.cloud.osconfig_v1.types.PatchRollout): + Optional. Rollout strategy of the patch job. + state (google.cloud.osconfig_v1.types.PatchDeployment.State): + Output only. Current state of the patch + deployment. + """ + + class State(proto.Enum): + r"""Represents state of patch peployment. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is omitted. + ACTIVE (1): + Active value means that patch deployment + generates Patch Jobs. + PAUSED (2): + Paused value means that patch deployment does + not generate Patch jobs. Requires user action to + move in and out from this state. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + PAUSED = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + instance_filter: patch_jobs.PatchInstanceFilter = proto.Field( + proto.MESSAGE, + number=3, + message=patch_jobs.PatchInstanceFilter, + ) + patch_config: patch_jobs.PatchConfig = proto.Field( + proto.MESSAGE, + number=4, + message=patch_jobs.PatchConfig, + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=5, + message=duration_pb2.Duration, + ) + one_time_schedule: "OneTimeSchedule" = proto.Field( + proto.MESSAGE, + number=6, + oneof="schedule", + message="OneTimeSchedule", + ) + recurring_schedule: "RecurringSchedule" = proto.Field( + proto.MESSAGE, + number=7, + oneof="schedule", + message="RecurringSchedule", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + last_execute_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + rollout: patch_jobs.PatchRollout = proto.Field( + proto.MESSAGE, + number=11, + message=patch_jobs.PatchRollout, + ) + state: State = proto.Field( + proto.ENUM, + number=12, + enum=State, + ) + + +class OneTimeSchedule(proto.Message): + r"""Sets the time for a one time patch deployment. Timestamp is in + `RFC3339 `__ text format. + + Attributes: + execute_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The desired patch job execution + time. + """ + + execute_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + + +class RecurringSchedule(proto.Message): + r"""Sets the time for recurring patch deployments. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + time_zone (google.type.datetime_pb2.TimeZone): + Required. Defines the time zone that ``time_of_day`` is + relative to. The rules for daylight saving time are + determined by the chosen time zone. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time that the recurring schedule becomes + effective. Defaults to ``create_time`` of the patch + deployment. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The end time at which a recurring + patch deployment schedule is no longer active. + time_of_day (google.type.timeofday_pb2.TimeOfDay): + Required. Time of the day to run a recurring + deployment. + frequency (google.cloud.osconfig_v1.types.RecurringSchedule.Frequency): + Required. The frequency unit of this + recurring schedule. + weekly (google.cloud.osconfig_v1.types.WeeklySchedule): + Required. Schedule with weekly executions. + + This field is a member of `oneof`_ ``schedule_config``. + monthly (google.cloud.osconfig_v1.types.MonthlySchedule): + Required. Schedule with monthly executions. + + This field is a member of `oneof`_ ``schedule_config``. + last_execute_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the last patch job ran + successfully. + next_execute_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the next patch job is + scheduled to run. + """ + + class Frequency(proto.Enum): + r"""Specifies the frequency of the recurring patch deployments. + + Values: + FREQUENCY_UNSPECIFIED (0): + Invalid. A frequency must be specified. + WEEKLY (1): + Indicates that the frequency of recurrence + should be expressed in terms of weeks. + MONTHLY (2): + Indicates that the frequency of recurrence + should be expressed in terms of months. + DAILY (3): + Indicates that the frequency of recurrence + should be expressed in terms of days. + """ + FREQUENCY_UNSPECIFIED = 0 + WEEKLY = 1 + MONTHLY = 2 + DAILY = 3 + + time_zone: datetime_pb2.TimeZone = proto.Field( + proto.MESSAGE, + number=1, + message=datetime_pb2.TimeZone, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + time_of_day: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=4, + message=timeofday_pb2.TimeOfDay, + ) + frequency: Frequency = proto.Field( + proto.ENUM, + number=5, + enum=Frequency, + ) + weekly: "WeeklySchedule" = proto.Field( + proto.MESSAGE, + number=6, + oneof="schedule_config", + message="WeeklySchedule", + ) + monthly: "MonthlySchedule" = proto.Field( + proto.MESSAGE, + number=7, + oneof="schedule_config", + message="MonthlySchedule", + ) + last_execute_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + next_execute_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + + +class WeeklySchedule(proto.Message): + r"""Represents a weekly schedule. + + Attributes: + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Required. Day of the week. + """ + + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + + +class MonthlySchedule(proto.Message): + r"""Represents a monthly schedule. An example of a valid monthly + schedule is "on the third Tuesday of the month" or "on the 15th + of the month". + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + week_day_of_month (google.cloud.osconfig_v1.types.WeekDayOfMonth): + Required. Week day in a month. + + This field is a member of `oneof`_ ``day_of_month``. + month_day (int): + Required. One day of the month. 1-31 + indicates the 1st to the 31st day. -1 indicates + the last day of the month. Months without the + target day will be skipped. For example, a + schedule to run "every month on the 31st" will + not run in February, April, June, etc. + + This field is a member of `oneof`_ ``day_of_month``. + """ + + week_day_of_month: "WeekDayOfMonth" = proto.Field( + proto.MESSAGE, + number=1, + oneof="day_of_month", + message="WeekDayOfMonth", + ) + month_day: int = proto.Field( + proto.INT32, + number=2, + oneof="day_of_month", + ) + + +class WeekDayOfMonth(proto.Message): + r"""Represents one week day in a month. An example is "the 4th + Sunday". + + Attributes: + week_ordinal (int): + Required. Week number in a month. 1-4 + indicates the 1st to 4th week of the month. -1 + indicates the last week of the month. + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Required. A day of the week. + day_offset (int): + Optional. Represents the number of days before or after the + given week day of month that the patch deployment is + scheduled for. For example if ``week_ordinal`` and + ``day_of_week`` values point to the second day of the month + and this ``day_offset`` value is set to ``3``, the patch + deployment takes place three days after the second Tuesday + of the month. If this value is negative, for example -5, the + patches are deployed five days before before the second + Tuesday of the month. Allowed values are in range [-30, 30]. + """ + + week_ordinal: int = proto.Field( + proto.INT32, + number=1, + ) + day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + day_offset: int = proto.Field( + proto.INT32, + number=3, + ) + + +class CreatePatchDeploymentRequest(proto.Message): + r"""A request message for creating a patch deployment. + + Attributes: + parent (str): + Required. The project to apply this patch deployment to in + the form ``projects/*``. + patch_deployment_id (str): + Required. A name for the patch deployment in the project. + When creating a name the following rules apply: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the project. + patch_deployment (google.cloud.osconfig_v1.types.PatchDeployment): + Required. The patch deployment to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + patch_deployment_id: str = proto.Field( + proto.STRING, + number=2, + ) + patch_deployment: "PatchDeployment" = proto.Field( + proto.MESSAGE, + number=3, + message="PatchDeployment", + ) + + +class GetPatchDeploymentRequest(proto.Message): + r"""A request message for retrieving a patch deployment. + + Attributes: + name (str): + Required. The resource name of the patch deployment in the + form ``projects/*/patchDeployments/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListPatchDeploymentsRequest(proto.Message): + r"""A request message for listing patch deployments. + + Attributes: + parent (str): + Required. The resource name of the parent in the form + ``projects/*``. + page_size (int): + Optional. The maximum number of patch + deployments to return. Default is 100. + page_token (str): + Optional. A pagination token returned from a + previous call to ListPatchDeployments that + indicates where this listing should continue + from. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListPatchDeploymentsResponse(proto.Message): + r"""A response message for listing patch deployments. + + Attributes: + patch_deployments (MutableSequence[google.cloud.osconfig_v1.types.PatchDeployment]): + The list of patch deployments. + next_page_token (str): + A pagination token that can be used to get + the next page of patch deployments. + """ + + @property + def raw_page(self): + return self + + patch_deployments: MutableSequence["PatchDeployment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PatchDeployment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeletePatchDeploymentRequest(proto.Message): + r"""A request message for deleting a patch deployment. + + Attributes: + name (str): + Required. The resource name of the patch deployment in the + form ``projects/*/patchDeployments/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdatePatchDeploymentRequest(proto.Message): + r"""A request message for updating a patch deployment. + + Attributes: + patch_deployment (google.cloud.osconfig_v1.types.PatchDeployment): + Required. The patch deployment to Update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that controls which + fields of the patch deployment should be + updated. + """ + + patch_deployment: "PatchDeployment" = proto.Field( + proto.MESSAGE, + number=1, + message="PatchDeployment", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class PausePatchDeploymentRequest(proto.Message): + r"""A request message for pausing a patch deployment. + + Attributes: + name (str): + Required. The resource name of the patch deployment in the + form ``projects/*/patchDeployments/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ResumePatchDeploymentRequest(proto.Message): + r"""A request message for resuming a patch deployment. + + Attributes: + name (str): + Required. The resource name of the patch deployment in the + form ``projects/*/patchDeployments/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/patch_jobs.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/patch_jobs.py new file mode 100644 index 000000000000..5bfe7ab1127e --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/patch_jobs.py @@ -0,0 +1,1285 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.osconfig_v1.types import osconfig_common + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1", + manifest={ + "ExecutePatchJobRequest", + "GetPatchJobRequest", + "ListPatchJobInstanceDetailsRequest", + "ListPatchJobInstanceDetailsResponse", + "PatchJobInstanceDetails", + "ListPatchJobsRequest", + "ListPatchJobsResponse", + "PatchJob", + "PatchConfig", + "Instance", + "CancelPatchJobRequest", + "AptSettings", + "YumSettings", + "GooSettings", + "ZypperSettings", + "WindowsUpdateSettings", + "ExecStep", + "ExecStepConfig", + "GcsObject", + "PatchInstanceFilter", + "PatchRollout", + }, +) + + +class ExecutePatchJobRequest(proto.Message): + r"""A request message to initiate patching across Compute Engine + instances. + + Attributes: + parent (str): + Required. The project in which to run this patch in the form + ``projects/*`` + description (str): + Description of the patch job. Length of the + description is limited to 1024 characters. + instance_filter (google.cloud.osconfig_v1.types.PatchInstanceFilter): + Required. Instances to patch, either + explicitly or filtered by some criteria such as + zone or labels. + patch_config (google.cloud.osconfig_v1.types.PatchConfig): + Patch configuration being applied. If + omitted, instances are patched using the default + configurations. + duration (google.protobuf.duration_pb2.Duration): + Duration of the patch job. After the duration + ends, the patch job times out. + dry_run (bool): + If this patch is a dry-run only, instances + are contacted but will do nothing. + display_name (str): + Display name for this patch job. This does + not have to be unique. + rollout (google.cloud.osconfig_v1.types.PatchRollout): + Rollout strategy of the patch job. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + instance_filter: "PatchInstanceFilter" = proto.Field( + proto.MESSAGE, + number=7, + message="PatchInstanceFilter", + ) + patch_config: "PatchConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="PatchConfig", + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=5, + message=duration_pb2.Duration, + ) + dry_run: bool = proto.Field( + proto.BOOL, + number=6, + ) + display_name: str = proto.Field( + proto.STRING, + number=8, + ) + rollout: "PatchRollout" = proto.Field( + proto.MESSAGE, + number=9, + message="PatchRollout", + ) + + +class GetPatchJobRequest(proto.Message): + r"""Request to get an active or completed patch job. + + Attributes: + name (str): + Required. Name of the patch in the form + ``projects/*/patchJobs/*`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListPatchJobInstanceDetailsRequest(proto.Message): + r"""Request to list details for all instances that are part of a + patch job. + + Attributes: + parent (str): + Required. The parent for the instances are in the form of + ``projects/*/patchJobs/*``. + page_size (int): + The maximum number of instance details + records to return. Default is 100. + page_token (str): + A pagination token returned from a previous + call that indicates where this listing should + continue from. + filter (str): + A filter expression that filters results listed in the + response. This field supports filtering results by instance + zone, name, state, or ``failure_reason``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListPatchJobInstanceDetailsResponse(proto.Message): + r"""A response message for listing the instances details for a + patch job. + + Attributes: + patch_job_instance_details (MutableSequence[google.cloud.osconfig_v1.types.PatchJobInstanceDetails]): + A list of instance status. + next_page_token (str): + A pagination token that can be used to get + the next page of results. + """ + + @property + def raw_page(self): + return self + + patch_job_instance_details: MutableSequence[ + "PatchJobInstanceDetails" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PatchJobInstanceDetails", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class PatchJobInstanceDetails(proto.Message): + r"""Patch details for a VM instance. For more information about + reviewing VM instance details, see `Listing all VM instance details + for a specific patch + job `__. + + Attributes: + name (str): + The instance name in the form + ``projects/*/zones/*/instances/*`` + instance_system_id (str): + The unique identifier for the instance. This + identifier is defined by the server. + state (google.cloud.osconfig_v1.types.Instance.PatchState): + Current state of instance patch. + failure_reason (str): + If the patch fails, this field provides the + reason. + attempt_count (int): + The number of times the agent that the agent + attempts to apply the patch. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + instance_system_id: str = proto.Field( + proto.STRING, + number=2, + ) + state: "Instance.PatchState" = proto.Field( + proto.ENUM, + number=3, + enum="Instance.PatchState", + ) + failure_reason: str = proto.Field( + proto.STRING, + number=4, + ) + attempt_count: int = proto.Field( + proto.INT64, + number=5, + ) + + +class ListPatchJobsRequest(proto.Message): + r"""A request message for listing patch jobs. + + Attributes: + parent (str): + Required. In the form of ``projects/*`` + page_size (int): + The maximum number of instance status to + return. + page_token (str): + A pagination token returned from a previous + call that indicates where this listing should + continue from. + filter (str): + If provided, this field specifies the criteria that must be + met by patch jobs to be included in the response. Currently, + filtering is only available on the patch_deployment field. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListPatchJobsResponse(proto.Message): + r"""A response message for listing patch jobs. + + Attributes: + patch_jobs (MutableSequence[google.cloud.osconfig_v1.types.PatchJob]): + The list of patch jobs. + next_page_token (str): + A pagination token that can be used to get + the next page of results. + """ + + @property + def raw_page(self): + return self + + patch_jobs: MutableSequence["PatchJob"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PatchJob", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class PatchJob(proto.Message): + r"""A high level representation of a patch job that is either in + progress or has completed. + + Instance details are not included in the job. To paginate through + instance details, use ListPatchJobInstanceDetails. + + For more information about patch jobs, see `Creating patch + jobs `__. + + Attributes: + name (str): + Unique identifier for this patch job in the form + ``projects/*/patchJobs/*`` + display_name (str): + Display name for this patch job. This is not + a unique identifier. + description (str): + Description of the patch job. Length of the + description is limited to 1024 characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time this patch job was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Last time this patch job was updated. + state (google.cloud.osconfig_v1.types.PatchJob.State): + The current state of the PatchJob. + instance_filter (google.cloud.osconfig_v1.types.PatchInstanceFilter): + Instances to patch. + patch_config (google.cloud.osconfig_v1.types.PatchConfig): + Patch configuration being applied. + duration (google.protobuf.duration_pb2.Duration): + Duration of the patch job. After the duration + ends, the patch job times out. + instance_details_summary (google.cloud.osconfig_v1.types.PatchJob.InstanceDetailsSummary): + Summary of instance details. + dry_run (bool): + If this patch job is a dry run, the agent + reports that it has finished without running any + updates on the VM instance. + error_message (str): + If this patch job failed, this message + provides information about the failure. + percent_complete (float): + Reflects the overall progress of the patch + job in the range of 0.0 being no progress to + 100.0 being complete. + patch_deployment (str): + Output only. Name of the patch deployment + that created this patch job. + rollout (google.cloud.osconfig_v1.types.PatchRollout): + Rollout strategy being applied. + """ + + class State(proto.Enum): + r"""Enumeration of the various states a patch job passes through + as it executes. + + Values: + STATE_UNSPECIFIED (0): + State must be specified. + STARTED (1): + The patch job was successfully initiated. + INSTANCE_LOOKUP (2): + The patch job is looking up instances to run + the patch on. + PATCHING (3): + Instances are being patched. + SUCCEEDED (4): + Patch job completed successfully. + COMPLETED_WITH_ERRORS (5): + Patch job completed but there were errors. + CANCELED (6): + The patch job was canceled. + TIMED_OUT (7): + The patch job timed out. + """ + STATE_UNSPECIFIED = 0 + STARTED = 1 + INSTANCE_LOOKUP = 2 + PATCHING = 3 + SUCCEEDED = 4 + COMPLETED_WITH_ERRORS = 5 + CANCELED = 6 + TIMED_OUT = 7 + + class InstanceDetailsSummary(proto.Message): + r"""A summary of the current patch state across all instances that this + patch job affects. Contains counts of instances in different states. + These states map to ``InstancePatchState``. List patch job instance + details to see the specific states of each instance. + + Attributes: + pending_instance_count (int): + Number of instances pending patch job. + inactive_instance_count (int): + Number of instances that are inactive. + notified_instance_count (int): + Number of instances notified about patch job. + started_instance_count (int): + Number of instances that have started. + downloading_patches_instance_count (int): + Number of instances that are downloading + patches. + applying_patches_instance_count (int): + Number of instances that are applying + patches. + rebooting_instance_count (int): + Number of instances rebooting. + succeeded_instance_count (int): + Number of instances that have completed + successfully. + succeeded_reboot_required_instance_count (int): + Number of instances that require reboot. + failed_instance_count (int): + Number of instances that failed. + acked_instance_count (int): + Number of instances that have acked and will + start shortly. + timed_out_instance_count (int): + Number of instances that exceeded the time + out while applying the patch. + pre_patch_step_instance_count (int): + Number of instances that are running the + pre-patch step. + post_patch_step_instance_count (int): + Number of instances that are running the + post-patch step. + no_agent_detected_instance_count (int): + Number of instances that do not appear to be + running the agent. Check to ensure that the + agent is installed, running, and able to + communicate with the service. + """ + + pending_instance_count: int = proto.Field( + proto.INT64, + number=1, + ) + inactive_instance_count: int = proto.Field( + proto.INT64, + number=2, + ) + notified_instance_count: int = proto.Field( + proto.INT64, + number=3, + ) + started_instance_count: int = proto.Field( + proto.INT64, + number=4, + ) + downloading_patches_instance_count: int = proto.Field( + proto.INT64, + number=5, + ) + applying_patches_instance_count: int = proto.Field( + proto.INT64, + number=6, + ) + rebooting_instance_count: int = proto.Field( + proto.INT64, + number=7, + ) + succeeded_instance_count: int = proto.Field( + proto.INT64, + number=8, + ) + succeeded_reboot_required_instance_count: int = proto.Field( + proto.INT64, + number=9, + ) + failed_instance_count: int = proto.Field( + proto.INT64, + number=10, + ) + acked_instance_count: int = proto.Field( + proto.INT64, + number=11, + ) + timed_out_instance_count: int = proto.Field( + proto.INT64, + number=12, + ) + pre_patch_step_instance_count: int = proto.Field( + proto.INT64, + number=13, + ) + post_patch_step_instance_count: int = proto.Field( + proto.INT64, + number=14, + ) + no_agent_detected_instance_count: int = proto.Field( + proto.INT64, + number=15, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=14, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + instance_filter: "PatchInstanceFilter" = proto.Field( + proto.MESSAGE, + number=13, + message="PatchInstanceFilter", + ) + patch_config: "PatchConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="PatchConfig", + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + instance_details_summary: InstanceDetailsSummary = proto.Field( + proto.MESSAGE, + number=9, + message=InstanceDetailsSummary, + ) + dry_run: bool = proto.Field( + proto.BOOL, + number=10, + ) + error_message: str = proto.Field( + proto.STRING, + number=11, + ) + percent_complete: float = proto.Field( + proto.DOUBLE, + number=12, + ) + patch_deployment: str = proto.Field( + proto.STRING, + number=15, + ) + rollout: "PatchRollout" = proto.Field( + proto.MESSAGE, + number=16, + message="PatchRollout", + ) + + +class PatchConfig(proto.Message): + r"""Patch configuration specifications. Contains details on how + to apply the patch(es) to a VM instance. + + Attributes: + reboot_config (google.cloud.osconfig_v1.types.PatchConfig.RebootConfig): + Post-patch reboot settings. + apt (google.cloud.osconfig_v1.types.AptSettings): + Apt update settings. Use this setting to override the + default ``apt`` patch rules. + yum (google.cloud.osconfig_v1.types.YumSettings): + Yum update settings. Use this setting to override the + default ``yum`` patch rules. + goo (google.cloud.osconfig_v1.types.GooSettings): + Goo update settings. Use this setting to override the + default ``goo`` patch rules. + zypper (google.cloud.osconfig_v1.types.ZypperSettings): + Zypper update settings. Use this setting to override the + default ``zypper`` patch rules. + windows_update (google.cloud.osconfig_v1.types.WindowsUpdateSettings): + Windows update settings. Use this override + the default windows patch rules. + pre_step (google.cloud.osconfig_v1.types.ExecStep): + The ``ExecStep`` to run before the patch update. + post_step (google.cloud.osconfig_v1.types.ExecStep): + The ``ExecStep`` to run after the patch update. + mig_instances_allowed (bool): + Allows the patch job to run on Managed + instance groups (MIGs). + """ + + class RebootConfig(proto.Enum): + r"""Post-patch reboot settings. + + Values: + REBOOT_CONFIG_UNSPECIFIED (0): + The default behavior is DEFAULT. + DEFAULT (1): + The agent decides if a reboot is necessary by checking + signals such as registry keys on Windows or + ``/var/run/reboot-required`` on APT based systems. On RPM + based systems, a set of core system package install times + are compared with system boot time. + ALWAYS (2): + Always reboot the machine after the update + completes. + NEVER (3): + Never reboot the machine after the update + completes. + """ + REBOOT_CONFIG_UNSPECIFIED = 0 + DEFAULT = 1 + ALWAYS = 2 + NEVER = 3 + + reboot_config: RebootConfig = proto.Field( + proto.ENUM, + number=1, + enum=RebootConfig, + ) + apt: "AptSettings" = proto.Field( + proto.MESSAGE, + number=3, + message="AptSettings", + ) + yum: "YumSettings" = proto.Field( + proto.MESSAGE, + number=4, + message="YumSettings", + ) + goo: "GooSettings" = proto.Field( + proto.MESSAGE, + number=5, + message="GooSettings", + ) + zypper: "ZypperSettings" = proto.Field( + proto.MESSAGE, + number=6, + message="ZypperSettings", + ) + windows_update: "WindowsUpdateSettings" = proto.Field( + proto.MESSAGE, + number=7, + message="WindowsUpdateSettings", + ) + pre_step: "ExecStep" = proto.Field( + proto.MESSAGE, + number=8, + message="ExecStep", + ) + post_step: "ExecStep" = proto.Field( + proto.MESSAGE, + number=9, + message="ExecStep", + ) + mig_instances_allowed: bool = proto.Field( + proto.BOOL, + number=10, + ) + + +class Instance(proto.Message): + r"""Namespace for instance state enums.""" + + class PatchState(proto.Enum): + r"""Patch state of an instance. + + Values: + PATCH_STATE_UNSPECIFIED (0): + Unspecified. + PENDING (1): + The instance is not yet notified. + INACTIVE (2): + Instance is inactive and cannot be patched. + NOTIFIED (3): + The instance is notified that it should be + patched. + STARTED (4): + The instance has started the patching + process. + DOWNLOADING_PATCHES (5): + The instance is downloading patches. + APPLYING_PATCHES (6): + The instance is applying patches. + REBOOTING (7): + The instance is rebooting. + SUCCEEDED (8): + The instance has completed applying patches. + SUCCEEDED_REBOOT_REQUIRED (9): + The instance has completed applying patches + but a reboot is required. + FAILED (10): + The instance has failed to apply the patch. + ACKED (11): + The instance acked the notification and will + start shortly. + TIMED_OUT (12): + The instance exceeded the time out while + applying the patch. + RUNNING_PRE_PATCH_STEP (13): + The instance is running the pre-patch step. + RUNNING_POST_PATCH_STEP (14): + The instance is running the post-patch step. + NO_AGENT_DETECTED (15): + The service could not detect the presence of + the agent. Check to ensure that the agent is + installed, running, and able to communicate with + the service. + """ + PATCH_STATE_UNSPECIFIED = 0 + PENDING = 1 + INACTIVE = 2 + NOTIFIED = 3 + STARTED = 4 + DOWNLOADING_PATCHES = 5 + APPLYING_PATCHES = 6 + REBOOTING = 7 + SUCCEEDED = 8 + SUCCEEDED_REBOOT_REQUIRED = 9 + FAILED = 10 + ACKED = 11 + TIMED_OUT = 12 + RUNNING_PRE_PATCH_STEP = 13 + RUNNING_POST_PATCH_STEP = 14 + NO_AGENT_DETECTED = 15 + + +class CancelPatchJobRequest(proto.Message): + r"""Message for canceling a patch job. + + Attributes: + name (str): + Required. Name of the patch in the form + ``projects/*/patchJobs/*`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AptSettings(proto.Message): + r"""Apt patching is completed by executing + ``apt-get update && apt-get upgrade``. Additional options can be set + to control how this is executed. + + Attributes: + type_ (google.cloud.osconfig_v1.types.AptSettings.Type): + By changing the type to DIST, the patching is performed + using ``apt-get dist-upgrade`` instead. + excludes (MutableSequence[str]): + List of packages to exclude from update. + These packages will be excluded + exclusive_packages (MutableSequence[str]): + An exclusive list of packages to be updated. + These are the only packages that will be + updated. If these packages are not installed, + they will be ignored. This field cannot be + specified with any other patch configuration + fields. + """ + + class Type(proto.Enum): + r"""Apt patch type. + + Values: + TYPE_UNSPECIFIED (0): + By default, upgrade will be performed. + DIST (1): + Runs ``apt-get dist-upgrade``. + UPGRADE (2): + Runs ``apt-get upgrade``. + """ + TYPE_UNSPECIFIED = 0 + DIST = 1 + UPGRADE = 2 + + type_: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + excludes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + exclusive_packages: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class YumSettings(proto.Message): + r"""Yum patching is performed by executing ``yum update``. Additional + options can be set to control how this is executed. + + Note that not all settings are supported on all platforms. + + Attributes: + security (bool): + Adds the ``--security`` flag to ``yum update``. Not + supported on all platforms. + minimal (bool): + Will cause patch to run ``yum update-minimal`` instead. + excludes (MutableSequence[str]): + List of packages to exclude from update. These packages are + excluded by using the yum ``--exclude`` flag. + exclusive_packages (MutableSequence[str]): + An exclusive list of packages to be updated. + These are the only packages that will be + updated. If these packages are not installed, + they will be ignored. This field must not be + specified with any other patch configuration + fields. + """ + + security: bool = proto.Field( + proto.BOOL, + number=1, + ) + minimal: bool = proto.Field( + proto.BOOL, + number=2, + ) + excludes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + exclusive_packages: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class GooSettings(proto.Message): + r"""Googet patching is performed by running ``googet update``.""" + + +class ZypperSettings(proto.Message): + r"""Zypper patching is performed by running ``zypper patch``. See also + https://en.opensuse.org/SDB:Zypper_manual. + + Attributes: + with_optional (bool): + Adds the ``--with-optional`` flag to ``zypper patch``. + with_update (bool): + Adds the ``--with-update`` flag, to ``zypper patch``. + categories (MutableSequence[str]): + Install only patches with these categories. + Common categories include security, recommended, + and feature. + severities (MutableSequence[str]): + Install only patches with these severities. + Common severities include critical, important, + moderate, and low. + excludes (MutableSequence[str]): + List of patches to exclude from update. + exclusive_patches (MutableSequence[str]): + An exclusive list of patches to be updated. These are the + only patches that will be installed using 'zypper patch + patch:' command. This field must not be used + with any other patch configuration fields. + """ + + with_optional: bool = proto.Field( + proto.BOOL, + number=1, + ) + with_update: bool = proto.Field( + proto.BOOL, + number=2, + ) + categories: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + severities: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + excludes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + exclusive_patches: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + + +class WindowsUpdateSettings(proto.Message): + r"""Windows patching is performed using the Windows Update Agent. + + Attributes: + classifications (MutableSequence[google.cloud.osconfig_v1.types.WindowsUpdateSettings.Classification]): + Only apply updates of these windows update + classifications. If empty, all updates are + applied. + excludes (MutableSequence[str]): + List of KBs to exclude from update. + exclusive_patches (MutableSequence[str]): + An exclusive list of kbs to be updated. These + are the only patches that will be updated. This + field must not be used with other patch + configurations. + """ + + class Classification(proto.Enum): + r"""Microsoft Windows update classifications as defined in [1] + https://support.microsoft.com/en-us/help/824684/description-of-the-standard-terminology-that-is-used-to-describe-micro + + Values: + CLASSIFICATION_UNSPECIFIED (0): + Invalid. If classifications are included, + they must be specified. + CRITICAL (1): + "A widely released fix for a specific problem that addresses + a critical, non-security-related bug." [1] + SECURITY (2): + "A widely released fix for a product-specific, + security-related vulnerability. Security vulnerabilities are + rated by their severity. The severity rating is indicated in + the Microsoft security bulletin as critical, important, + moderate, or low." [1] + DEFINITION (3): + "A widely released and frequent software update that + contains additions to a product's definition database. + Definition databases are often used to detect objects that + have specific attributes, such as malicious code, phishing + websites, or junk mail." [1] + DRIVER (4): + "Software that controls the input and output of a device." + [1] + FEATURE_PACK (5): + "New product functionality that is first distributed outside + the context of a product release and that is typically + included in the next full product release." [1] + SERVICE_PACK (6): + "A tested, cumulative set of all hotfixes, security updates, + critical updates, and updates. Additionally, service packs + may contain additional fixes for problems that are found + internally since the release of the product. Service packs + my also contain a limited number of customer-requested + design changes or features." [1] + TOOL (7): + "A utility or feature that helps complete a task or set of + tasks." [1] + UPDATE_ROLLUP (8): + "A tested, cumulative set of hotfixes, security updates, + critical updates, and updates that are packaged together for + easy deployment. A rollup generally targets a specific area, + such as security, or a component of a product, such as + Internet Information Services (IIS)." [1] + UPDATE (9): + "A widely released fix for a specific problem. An update + addresses a noncritical, non-security-related bug." [1] + """ + CLASSIFICATION_UNSPECIFIED = 0 + CRITICAL = 1 + SECURITY = 2 + DEFINITION = 3 + DRIVER = 4 + FEATURE_PACK = 5 + SERVICE_PACK = 6 + TOOL = 7 + UPDATE_ROLLUP = 8 + UPDATE = 9 + + classifications: MutableSequence[Classification] = proto.RepeatedField( + proto.ENUM, + number=1, + enum=Classification, + ) + excludes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + exclusive_patches: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class ExecStep(proto.Message): + r"""A step that runs an executable for a PatchJob. + + Attributes: + linux_exec_step_config (google.cloud.osconfig_v1.types.ExecStepConfig): + The ExecStepConfig for all Linux VMs targeted + by the PatchJob. + windows_exec_step_config (google.cloud.osconfig_v1.types.ExecStepConfig): + The ExecStepConfig for all Windows VMs + targeted by the PatchJob. + """ + + linux_exec_step_config: "ExecStepConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="ExecStepConfig", + ) + windows_exec_step_config: "ExecStepConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="ExecStepConfig", + ) + + +class ExecStepConfig(proto.Message): + r"""Common configurations for an ExecStep. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + local_path (str): + An absolute path to the executable on the VM. + + This field is a member of `oneof`_ ``executable``. + gcs_object (google.cloud.osconfig_v1.types.GcsObject): + A Cloud Storage object containing the + executable. + + This field is a member of `oneof`_ ``executable``. + allowed_success_codes (MutableSequence[int]): + Defaults to [0]. A list of possible return values that the + execution can return to indicate a success. + interpreter (google.cloud.osconfig_v1.types.ExecStepConfig.Interpreter): + The script interpreter to use to run the script. If no + interpreter is specified the script will be executed + directly, which will likely only succeed for scripts with + [shebang lines] + (https://en.wikipedia.org/wiki/Shebang_(Unix)). + """ + + class Interpreter(proto.Enum): + r"""The interpreter used to execute the a file. + + Values: + INTERPRETER_UNSPECIFIED (0): + Invalid for a Windows ExecStepConfig. For a + Linux ExecStepConfig, the interpreter will be + parsed from the shebang line of the script if + unspecified. + SHELL (1): + Indicates that the script is run with ``/bin/sh`` on Linux + and ``cmd`` on Windows. + POWERSHELL (2): + Indicates that the file is run with PowerShell flags + ``-NonInteractive``, ``-NoProfile``, and + ``-ExecutionPolicy Bypass``. + """ + INTERPRETER_UNSPECIFIED = 0 + SHELL = 1 + POWERSHELL = 2 + + local_path: str = proto.Field( + proto.STRING, + number=1, + oneof="executable", + ) + gcs_object: "GcsObject" = proto.Field( + proto.MESSAGE, + number=2, + oneof="executable", + message="GcsObject", + ) + allowed_success_codes: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=3, + ) + interpreter: Interpreter = proto.Field( + proto.ENUM, + number=4, + enum=Interpreter, + ) + + +class GcsObject(proto.Message): + r"""Cloud Storage object representation. + + Attributes: + bucket (str): + Required. Bucket of the Cloud Storage object. + object_ (str): + Required. Name of the Cloud Storage object. + generation_number (int): + Required. Generation number of the Cloud + Storage object. This is used to ensure that the + ExecStep specified by this PatchJob does not + change. + """ + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + object_: str = proto.Field( + proto.STRING, + number=2, + ) + generation_number: int = proto.Field( + proto.INT64, + number=3, + ) + + +class PatchInstanceFilter(proto.Message): + r"""A filter to target VM instances for patching. The targeted + VMs must meet all criteria specified. So if both labels and + zones are specified, the patch job targets only VMs with those + labels and in those zones. + + Attributes: + all_ (bool): + Target all VM instances in the project. If + true, no other criteria is permitted. + group_labels (MutableSequence[google.cloud.osconfig_v1.types.PatchInstanceFilter.GroupLabel]): + Targets VM instances matching ANY of these + GroupLabels. This allows targeting of disparate + groups of VM instances. + zones (MutableSequence[str]): + Targets VM instances in ANY of these zones. + Leave empty to target VM instances in any zone. + instances (MutableSequence[str]): + Targets any of the VM instances specified. Instances are + specified by their URI in the form + ``zones/[ZONE]/instances/[INSTANCE_NAME]``, + ``projects/[PROJECT_ID]/zones/[ZONE]/instances/[INSTANCE_NAME]``, + or + ``https://www.googleapis.com/compute/v1/projects/[PROJECT_ID]/zones/[ZONE]/instances/[INSTANCE_NAME]`` + instance_name_prefixes (MutableSequence[str]): + Targets VMs whose name starts with one of + these prefixes. Similar to labels, this is + another way to group VMs when targeting configs, + for example prefix="prod-". + """ + + class GroupLabel(proto.Message): + r"""Targets a group of VM instances by using their `assigned + labels `__. + Labels are key-value pairs. A ``GroupLabel`` is a combination of + labels that is used to target VMs for a patch job. + + For example, a patch job can target VMs that have the following + ``GroupLabel``: ``{"env":"test", "app":"web"}``. This means that the + patch job is applied to VMs that have both the labels ``env=test`` + and ``app=web``. + + Attributes: + labels (MutableMapping[str, str]): + Compute Engine instance labels that must be + present for a VM instance to be targeted by this + filter. + """ + + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + all_: bool = proto.Field( + proto.BOOL, + number=1, + ) + group_labels: MutableSequence[GroupLabel] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=GroupLabel, + ) + zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + instances: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + instance_name_prefixes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class PatchRollout(proto.Message): + r"""Patch rollout configuration specifications. Contains details + on the concurrency control when applying patch(es) to all + targeted VMs. + + Attributes: + mode (google.cloud.osconfig_v1.types.PatchRollout.Mode): + Mode of the patch rollout. + disruption_budget (google.cloud.osconfig_v1.types.FixedOrPercent): + The maximum number (or percentage) of VMs per zone to + disrupt at any given moment. The number of VMs calculated + from multiplying the percentage by the total number of VMs + in a zone is rounded up. + + During patching, a VM is considered disrupted from the time + the agent is notified to begin until patching has completed. + This disruption time includes the time to complete reboot + and any post-patch steps. + + A VM contributes to the disruption budget if its patching + operation fails either when applying the patches, running + pre or post patch steps, or if it fails to respond with a + success notification before timing out. VMs that are not + running or do not have an active agent do not count toward + this disruption budget. + + For zone-by-zone rollouts, if the disruption budget in a + zone is exceeded, the patch job stops, because continuing to + the next zone requires completion of the patch process in + the previous zone. + + For example, if the disruption budget has a fixed value of + ``10``, and 8 VMs fail to patch in the current zone, the + patch job continues to patch 2 VMs at a time until the zone + is completed. When that zone is completed successfully, + patching begins with 10 VMs at a time in the next zone. If + 10 VMs in the next zone fail to patch, the patch job stops. + """ + + class Mode(proto.Enum): + r"""Type of the rollout. + + Values: + MODE_UNSPECIFIED (0): + Mode must be specified. + ZONE_BY_ZONE (1): + Patches are applied one zone at a time. The + patch job begins in the region with the lowest + number of targeted VMs. Within the region, + patching begins in the zone with the lowest + number of targeted VMs. If multiple regions (or + zones within a region) have the same number of + targeted VMs, a tie-breaker is achieved by + sorting the regions or zones in alphabetical + order. + CONCURRENT_ZONES (2): + Patches are applied to VMs in all zones at + the same time. + """ + MODE_UNSPECIFIED = 0 + ZONE_BY_ZONE = 1 + CONCURRENT_ZONES = 2 + + mode: Mode = proto.Field( + proto.ENUM, + number=1, + enum=Mode, + ) + disruption_budget: osconfig_common.FixedOrPercent = proto.Field( + proto.MESSAGE, + number=2, + message=osconfig_common.FixedOrPercent, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/vulnerability.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/vulnerability.py new file mode 100644 index 000000000000..5f79780b1972 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1/types/vulnerability.py @@ -0,0 +1,608 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1", + manifest={ + "VulnerabilityReport", + "GetVulnerabilityReportRequest", + "ListVulnerabilityReportsRequest", + "ListVulnerabilityReportsResponse", + "CVSSv3", + }, +) + + +class VulnerabilityReport(proto.Message): + r"""This API resource represents the vulnerability report for a + specified Compute Engine virtual machine (VM) instance at a given + point in time. + + For more information, see `Vulnerability + reports `__. + + Attributes: + name (str): + Output only. The ``vulnerabilityReport`` API resource name. + + Format: + ``projects/{project_number}/locations/{location}/instances/{instance_id}/vulnerabilityReport`` + vulnerabilities (MutableSequence[google.cloud.osconfig_v1.types.VulnerabilityReport.Vulnerability]): + Output only. List of vulnerabilities + affecting the VM. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp for when the last + vulnerability report was generated for the VM. + """ + + class Vulnerability(proto.Message): + r"""A vulnerability affecting the VM instance. + + Attributes: + details (google.cloud.osconfig_v1.types.VulnerabilityReport.Vulnerability.Details): + Contains metadata as per the upstream feed of + the operating system and NVD. + installed_inventory_item_ids (MutableSequence[str]): + Corresponds to the ``INSTALLED_PACKAGE`` inventory item on + the VM. This field displays the inventory items affected by + this vulnerability. If the vulnerability report was not + updated after the VM inventory update, these values might + not display in VM inventory. For some distros, this field + may be empty. + available_inventory_item_ids (MutableSequence[str]): + Corresponds to the ``AVAILABLE_PACKAGE`` inventory item on + the VM. If the vulnerability report was not updated after + the VM inventory update, these values might not display in + VM inventory. If there is no available fix, the field is + empty. The ``inventory_item`` value specifies the latest + ``SoftwarePackage`` available to the VM that fixes the + vulnerability. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp for when the vulnerability was + first detected. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp for when the vulnerability was + last modified. + items (MutableSequence[google.cloud.osconfig_v1.types.VulnerabilityReport.Vulnerability.Item]): + List of items affected by the vulnerability. + """ + + class Details(proto.Message): + r"""Contains metadata information for the vulnerability. This + information is collected from the upstream feed of the operating + system. + + Attributes: + cve (str): + The CVE of the vulnerability. CVE cannot be + empty and the combination of should be unique across + vulnerabilities for a VM. + cvss_v2_score (float): + The CVSS V2 score of this vulnerability. CVSS + V2 score is on a scale of 0 - 10 where 0 + indicates low severity and 10 indicates high + severity. + cvss_v3 (google.cloud.osconfig_v1.types.CVSSv3): + The full description of the CVSSv3 for this + vulnerability from NVD. + severity (str): + Assigned severity/impact ranking from the + distro. + description (str): + The note or description describing the + vulnerability from the distro. + references (MutableSequence[google.cloud.osconfig_v1.types.VulnerabilityReport.Vulnerability.Details.Reference]): + Corresponds to the references attached to the + ``VulnerabilityDetails``. + """ + + class Reference(proto.Message): + r"""A reference for this vulnerability. + + Attributes: + url (str): + The url of the reference. + source (str): + The source of the reference e.g. NVD. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + source: str = proto.Field( + proto.STRING, + number=2, + ) + + cve: str = proto.Field( + proto.STRING, + number=1, + ) + cvss_v2_score: float = proto.Field( + proto.FLOAT, + number=2, + ) + cvss_v3: "CVSSv3" = proto.Field( + proto.MESSAGE, + number=3, + message="CVSSv3", + ) + severity: str = proto.Field( + proto.STRING, + number=4, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + references: MutableSequence[ + "VulnerabilityReport.Vulnerability.Details.Reference" + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="VulnerabilityReport.Vulnerability.Details.Reference", + ) + + class Item(proto.Message): + r"""OS inventory item that is affected by a vulnerability or + fixed as a result of a vulnerability. + + Attributes: + installed_inventory_item_id (str): + Corresponds to the ``INSTALLED_PACKAGE`` inventory item on + the VM. This field displays the inventory items affected by + this vulnerability. If the vulnerability report was not + updated after the VM inventory update, these values might + not display in VM inventory. For some operating systems, + this field might be empty. + available_inventory_item_id (str): + Corresponds to the ``AVAILABLE_PACKAGE`` inventory item on + the VM. If the vulnerability report was not updated after + the VM inventory update, these values might not display in + VM inventory. If there is no available fix, the field is + empty. The ``inventory_item`` value specifies the latest + ``SoftwarePackage`` available to the VM that fixes the + vulnerability. + fixed_cpe_uri (str): + The recommended `CPE + URI `__ update that + contains a fix for this vulnerability. + upstream_fix (str): + The upstream OS patch, packages or KB that + fixes the vulnerability. + """ + + installed_inventory_item_id: str = proto.Field( + proto.STRING, + number=1, + ) + available_inventory_item_id: str = proto.Field( + proto.STRING, + number=2, + ) + fixed_cpe_uri: str = proto.Field( + proto.STRING, + number=3, + ) + upstream_fix: str = proto.Field( + proto.STRING, + number=4, + ) + + details: "VulnerabilityReport.Vulnerability.Details" = proto.Field( + proto.MESSAGE, + number=1, + message="VulnerabilityReport.Vulnerability.Details", + ) + installed_inventory_item_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + available_inventory_item_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + items: MutableSequence[ + "VulnerabilityReport.Vulnerability.Item" + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="VulnerabilityReport.Vulnerability.Item", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + vulnerabilities: MutableSequence[Vulnerability] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Vulnerability, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class GetVulnerabilityReportRequest(proto.Message): + r"""A request message for getting the vulnerability report for + the specified VM. + + Attributes: + name (str): + Required. API resource name for vulnerability resource. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/vulnerabilityReport`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, either + Compute Engine ``instance-id`` or ``instance-name`` can be + provided. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListVulnerabilityReportsRequest(proto.Message): + r"""A request message for listing vulnerability reports for all + VM instances in the specified location. + + Attributes: + parent (str): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/-`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. + page_size (int): + The maximum number of results to return. + page_token (str): + A pagination token returned from a previous call to + ``ListVulnerabilityReports`` that indicates where this + listing should continue from. + filter (str): + If provided, this field specifies the criteria that must be + met by a ``vulnerabilityReport`` API resource to be included + in the response. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListVulnerabilityReportsResponse(proto.Message): + r"""A response message for listing vulnerability reports for all + VM instances in the specified location. + + Attributes: + vulnerability_reports (MutableSequence[google.cloud.osconfig_v1.types.VulnerabilityReport]): + List of vulnerabilityReport objects. + next_page_token (str): + The pagination token to retrieve the next + page of vulnerabilityReports object. + """ + + @property + def raw_page(self): + return self + + vulnerability_reports: MutableSequence["VulnerabilityReport"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VulnerabilityReport", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CVSSv3(proto.Message): + r"""Common Vulnerability Scoring System version 3. + For details, see + https://www.first.org/cvss/specification-document + + Attributes: + base_score (float): + The base score is a function of the base + metric scores. + https://www.first.org/cvss/specification-document#Base-Metrics + exploitability_score (float): + The Exploitability sub-score equation is + derived from the Base Exploitability metrics. + https://www.first.org/cvss/specification-document#2-1-Exploitability-Metrics + impact_score (float): + The Impact sub-score equation is derived from + the Base Impact metrics. + attack_vector (google.cloud.osconfig_v1.types.CVSSv3.AttackVector): + This metric reflects the context by which + vulnerability exploitation is possible. + attack_complexity (google.cloud.osconfig_v1.types.CVSSv3.AttackComplexity): + This metric describes the conditions beyond + the attacker's control that must exist in order + to exploit the vulnerability. + privileges_required (google.cloud.osconfig_v1.types.CVSSv3.PrivilegesRequired): + This metric describes the level of privileges + an attacker must possess before successfully + exploiting the vulnerability. + user_interaction (google.cloud.osconfig_v1.types.CVSSv3.UserInteraction): + This metric captures the requirement for a + human user, other than the attacker, to + participate in the successful compromise of the + vulnerable component. + scope (google.cloud.osconfig_v1.types.CVSSv3.Scope): + The Scope metric captures whether a + vulnerability in one vulnerable component + impacts resources in components beyond its + security scope. + confidentiality_impact (google.cloud.osconfig_v1.types.CVSSv3.Impact): + This metric measures the impact to the + confidentiality of the information resources + managed by a software component due to a + successfully exploited vulnerability. + integrity_impact (google.cloud.osconfig_v1.types.CVSSv3.Impact): + This metric measures the impact to integrity + of a successfully exploited vulnerability. + availability_impact (google.cloud.osconfig_v1.types.CVSSv3.Impact): + This metric measures the impact to the + availability of the impacted component resulting + from a successfully exploited vulnerability. + """ + + class AttackVector(proto.Enum): + r"""This metric reflects the context by which vulnerability + exploitation is possible. + + Values: + ATTACK_VECTOR_UNSPECIFIED (0): + Invalid value. + ATTACK_VECTOR_NETWORK (1): + The vulnerable component is bound to the + network stack and the set of possible attackers + extends beyond the other options listed below, + up to and including the entire Internet. + ATTACK_VECTOR_ADJACENT (2): + The vulnerable component is bound to the + network stack, but the attack is limited at the + protocol level to a logically adjacent topology. + ATTACK_VECTOR_LOCAL (3): + The vulnerable component is not bound to the + network stack and the attacker's path is via + read/write/execute capabilities. + ATTACK_VECTOR_PHYSICAL (4): + The attack requires the attacker to + physically touch or manipulate the vulnerable + component. + """ + ATTACK_VECTOR_UNSPECIFIED = 0 + ATTACK_VECTOR_NETWORK = 1 + ATTACK_VECTOR_ADJACENT = 2 + ATTACK_VECTOR_LOCAL = 3 + ATTACK_VECTOR_PHYSICAL = 4 + + class AttackComplexity(proto.Enum): + r"""This metric describes the conditions beyond the attacker's + control that must exist in order to exploit the vulnerability. + + Values: + ATTACK_COMPLEXITY_UNSPECIFIED (0): + Invalid value. + ATTACK_COMPLEXITY_LOW (1): + Specialized access conditions or extenuating + circumstances do not exist. An attacker can + expect repeatable success when attacking the + vulnerable component. + ATTACK_COMPLEXITY_HIGH (2): + A successful attack depends on conditions + beyond the attacker's control. That is, a + successful attack cannot be accomplished at + will, but requires the attacker to invest in + some measurable amount of effort in preparation + or execution against the vulnerable component + before a successful attack can be expected. + """ + ATTACK_COMPLEXITY_UNSPECIFIED = 0 + ATTACK_COMPLEXITY_LOW = 1 + ATTACK_COMPLEXITY_HIGH = 2 + + class PrivilegesRequired(proto.Enum): + r"""This metric describes the level of privileges an attacker + must possess before successfully exploiting the vulnerability. + + Values: + PRIVILEGES_REQUIRED_UNSPECIFIED (0): + Invalid value. + PRIVILEGES_REQUIRED_NONE (1): + The attacker is unauthorized prior to attack, + and therefore does not require any access to + settings or files of the vulnerable system to + carry out an attack. + PRIVILEGES_REQUIRED_LOW (2): + The attacker requires privileges that provide + basic user capabilities that could normally + affect only settings and files owned by a user. + Alternatively, an attacker with Low privileges + has the ability to access only non-sensitive + resources. + PRIVILEGES_REQUIRED_HIGH (3): + The attacker requires privileges that provide + significant (e.g., administrative) control over + the vulnerable component allowing access to + component-wide settings and files. + """ + PRIVILEGES_REQUIRED_UNSPECIFIED = 0 + PRIVILEGES_REQUIRED_NONE = 1 + PRIVILEGES_REQUIRED_LOW = 2 + PRIVILEGES_REQUIRED_HIGH = 3 + + class UserInteraction(proto.Enum): + r"""This metric captures the requirement for a human user, other + than the attacker, to participate in the successful compromise + of the vulnerable component. + + Values: + USER_INTERACTION_UNSPECIFIED (0): + Invalid value. + USER_INTERACTION_NONE (1): + The vulnerable system can be exploited + without interaction from any user. + USER_INTERACTION_REQUIRED (2): + Successful exploitation of this vulnerability + requires a user to take some action before the + vulnerability can be exploited. + """ + USER_INTERACTION_UNSPECIFIED = 0 + USER_INTERACTION_NONE = 1 + USER_INTERACTION_REQUIRED = 2 + + class Scope(proto.Enum): + r"""The Scope metric captures whether a vulnerability in one + vulnerable component impacts resources in components beyond its + security scope. + + Values: + SCOPE_UNSPECIFIED (0): + Invalid value. + SCOPE_UNCHANGED (1): + An exploited vulnerability can only affect + resources managed by the same security + authority. + SCOPE_CHANGED (2): + An exploited vulnerability can affect + resources beyond the security scope managed by + the security authority of the vulnerable + component. + """ + SCOPE_UNSPECIFIED = 0 + SCOPE_UNCHANGED = 1 + SCOPE_CHANGED = 2 + + class Impact(proto.Enum): + r"""The Impact metrics capture the effects of a successfully + exploited vulnerability on the component that suffers the worst + outcome that is most directly and predictably associated with + the attack. + + Values: + IMPACT_UNSPECIFIED (0): + Invalid value. + IMPACT_HIGH (1): + High impact. + IMPACT_LOW (2): + Low impact. + IMPACT_NONE (3): + No impact. + """ + IMPACT_UNSPECIFIED = 0 + IMPACT_HIGH = 1 + IMPACT_LOW = 2 + IMPACT_NONE = 3 + + base_score: float = proto.Field( + proto.FLOAT, + number=1, + ) + exploitability_score: float = proto.Field( + proto.FLOAT, + number=2, + ) + impact_score: float = proto.Field( + proto.FLOAT, + number=3, + ) + attack_vector: AttackVector = proto.Field( + proto.ENUM, + number=5, + enum=AttackVector, + ) + attack_complexity: AttackComplexity = proto.Field( + proto.ENUM, + number=6, + enum=AttackComplexity, + ) + privileges_required: PrivilegesRequired = proto.Field( + proto.ENUM, + number=7, + enum=PrivilegesRequired, + ) + user_interaction: UserInteraction = proto.Field( + proto.ENUM, + number=8, + enum=UserInteraction, + ) + scope: Scope = proto.Field( + proto.ENUM, + number=9, + enum=Scope, + ) + confidentiality_impact: Impact = proto.Field( + proto.ENUM, + number=10, + enum=Impact, + ) + integrity_impact: Impact = proto.Field( + proto.ENUM, + number=11, + enum=Impact, + ) + availability_impact: Impact = proto.Field( + proto.ENUM, + number=12, + enum=Impact, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/__init__.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/__init__.py new file mode 100644 index 000000000000..8ba5c7fec811 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/__init__.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.osconfig_v1alpha import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.os_config_zonal_service import ( + OsConfigZonalServiceAsyncClient, + OsConfigZonalServiceClient, +) +from .types.config_common import ( + OSPolicyComplianceState, + OSPolicyResourceCompliance, + OSPolicyResourceConfigStep, +) +from .types.instance_os_policies_compliance import ( + GetInstanceOSPoliciesComplianceRequest, + InstanceOSPoliciesCompliance, + ListInstanceOSPoliciesCompliancesRequest, + ListInstanceOSPoliciesCompliancesResponse, +) +from .types.inventory import ( + GetInventoryRequest, + Inventory, + InventoryView, + ListInventoriesRequest, + ListInventoriesResponse, +) +from .types.os_policy import OSPolicy +from .types.os_policy_assignment_reports import ( + GetOSPolicyAssignmentReportRequest, + ListOSPolicyAssignmentReportsRequest, + ListOSPolicyAssignmentReportsResponse, + OSPolicyAssignmentReport, +) +from .types.os_policy_assignments import ( + CreateOSPolicyAssignmentRequest, + DeleteOSPolicyAssignmentRequest, + GetOSPolicyAssignmentRequest, + ListOSPolicyAssignmentRevisionsRequest, + ListOSPolicyAssignmentRevisionsResponse, + ListOSPolicyAssignmentsRequest, + ListOSPolicyAssignmentsResponse, + OSPolicyAssignment, + OSPolicyAssignmentOperationMetadata, + UpdateOSPolicyAssignmentRequest, +) +from .types.osconfig_common import FixedOrPercent +from .types.vulnerability import ( + CVSSv3, + GetVulnerabilityReportRequest, + ListVulnerabilityReportsRequest, + ListVulnerabilityReportsResponse, + VulnerabilityReport, +) + +__all__ = ( + "OsConfigZonalServiceAsyncClient", + "CVSSv3", + "CreateOSPolicyAssignmentRequest", + "DeleteOSPolicyAssignmentRequest", + "FixedOrPercent", + "GetInstanceOSPoliciesComplianceRequest", + "GetInventoryRequest", + "GetOSPolicyAssignmentReportRequest", + "GetOSPolicyAssignmentRequest", + "GetVulnerabilityReportRequest", + "InstanceOSPoliciesCompliance", + "Inventory", + "InventoryView", + "ListInstanceOSPoliciesCompliancesRequest", + "ListInstanceOSPoliciesCompliancesResponse", + "ListInventoriesRequest", + "ListInventoriesResponse", + "ListOSPolicyAssignmentReportsRequest", + "ListOSPolicyAssignmentReportsResponse", + "ListOSPolicyAssignmentRevisionsRequest", + "ListOSPolicyAssignmentRevisionsResponse", + "ListOSPolicyAssignmentsRequest", + "ListOSPolicyAssignmentsResponse", + "ListVulnerabilityReportsRequest", + "ListVulnerabilityReportsResponse", + "OSPolicy", + "OSPolicyAssignment", + "OSPolicyAssignmentOperationMetadata", + "OSPolicyAssignmentReport", + "OSPolicyComplianceState", + "OSPolicyResourceCompliance", + "OSPolicyResourceConfigStep", + "OsConfigZonalServiceClient", + "UpdateOSPolicyAssignmentRequest", + "VulnerabilityReport", +) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/gapic_metadata.json b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/gapic_metadata.json new file mode 100644 index 000000000000..65c30dbfcf67 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/gapic_metadata.json @@ -0,0 +1,238 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.osconfig_v1alpha", + "protoPackage": "google.cloud.osconfig.v1alpha", + "schema": "1.0", + "services": { + "OsConfigZonalService": { + "clients": { + "grpc": { + "libraryClient": "OsConfigZonalServiceClient", + "rpcs": { + "CreateOSPolicyAssignment": { + "methods": [ + "create_os_policy_assignment" + ] + }, + "DeleteOSPolicyAssignment": { + "methods": [ + "delete_os_policy_assignment" + ] + }, + "GetInstanceOSPoliciesCompliance": { + "methods": [ + "get_instance_os_policies_compliance" + ] + }, + "GetInventory": { + "methods": [ + "get_inventory" + ] + }, + "GetOSPolicyAssignment": { + "methods": [ + "get_os_policy_assignment" + ] + }, + "GetOSPolicyAssignmentReport": { + "methods": [ + "get_os_policy_assignment_report" + ] + }, + "GetVulnerabilityReport": { + "methods": [ + "get_vulnerability_report" + ] + }, + "ListInstanceOSPoliciesCompliances": { + "methods": [ + "list_instance_os_policies_compliances" + ] + }, + "ListInventories": { + "methods": [ + "list_inventories" + ] + }, + "ListOSPolicyAssignmentReports": { + "methods": [ + "list_os_policy_assignment_reports" + ] + }, + "ListOSPolicyAssignmentRevisions": { + "methods": [ + "list_os_policy_assignment_revisions" + ] + }, + "ListOSPolicyAssignments": { + "methods": [ + "list_os_policy_assignments" + ] + }, + "ListVulnerabilityReports": { + "methods": [ + "list_vulnerability_reports" + ] + }, + "UpdateOSPolicyAssignment": { + "methods": [ + "update_os_policy_assignment" + ] + } + } + }, + "grpc-async": { + "libraryClient": "OsConfigZonalServiceAsyncClient", + "rpcs": { + "CreateOSPolicyAssignment": { + "methods": [ + "create_os_policy_assignment" + ] + }, + "DeleteOSPolicyAssignment": { + "methods": [ + "delete_os_policy_assignment" + ] + }, + "GetInstanceOSPoliciesCompliance": { + "methods": [ + "get_instance_os_policies_compliance" + ] + }, + "GetInventory": { + "methods": [ + "get_inventory" + ] + }, + "GetOSPolicyAssignment": { + "methods": [ + "get_os_policy_assignment" + ] + }, + "GetOSPolicyAssignmentReport": { + "methods": [ + "get_os_policy_assignment_report" + ] + }, + "GetVulnerabilityReport": { + "methods": [ + "get_vulnerability_report" + ] + }, + "ListInstanceOSPoliciesCompliances": { + "methods": [ + "list_instance_os_policies_compliances" + ] + }, + "ListInventories": { + "methods": [ + "list_inventories" + ] + }, + "ListOSPolicyAssignmentReports": { + "methods": [ + "list_os_policy_assignment_reports" + ] + }, + "ListOSPolicyAssignmentRevisions": { + "methods": [ + "list_os_policy_assignment_revisions" + ] + }, + "ListOSPolicyAssignments": { + "methods": [ + "list_os_policy_assignments" + ] + }, + "ListVulnerabilityReports": { + "methods": [ + "list_vulnerability_reports" + ] + }, + "UpdateOSPolicyAssignment": { + "methods": [ + "update_os_policy_assignment" + ] + } + } + }, + "rest": { + "libraryClient": "OsConfigZonalServiceClient", + "rpcs": { + "CreateOSPolicyAssignment": { + "methods": [ + "create_os_policy_assignment" + ] + }, + "DeleteOSPolicyAssignment": { + "methods": [ + "delete_os_policy_assignment" + ] + }, + "GetInstanceOSPoliciesCompliance": { + "methods": [ + "get_instance_os_policies_compliance" + ] + }, + "GetInventory": { + "methods": [ + "get_inventory" + ] + }, + "GetOSPolicyAssignment": { + "methods": [ + "get_os_policy_assignment" + ] + }, + "GetOSPolicyAssignmentReport": { + "methods": [ + "get_os_policy_assignment_report" + ] + }, + "GetVulnerabilityReport": { + "methods": [ + "get_vulnerability_report" + ] + }, + "ListInstanceOSPoliciesCompliances": { + "methods": [ + "list_instance_os_policies_compliances" + ] + }, + "ListInventories": { + "methods": [ + "list_inventories" + ] + }, + "ListOSPolicyAssignmentReports": { + "methods": [ + "list_os_policy_assignment_reports" + ] + }, + "ListOSPolicyAssignmentRevisions": { + "methods": [ + "list_os_policy_assignment_revisions" + ] + }, + "ListOSPolicyAssignments": { + "methods": [ + "list_os_policy_assignments" + ] + }, + "ListVulnerabilityReports": { + "methods": [ + "list_vulnerability_reports" + ] + }, + "UpdateOSPolicyAssignment": { + "methods": [ + "update_os_policy_assignment" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/gapic_version.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/gapic_version.py new file mode 100644 index 000000000000..45be81c3fdee --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.15.2" # {x-release-please-version} diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/py.typed b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/py.typed new file mode 100644 index 000000000000..ebf4fbd316fb --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-os-config package uses inline types. diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/__init__.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/__init__.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/__init__.py new file mode 100644 index 000000000000..39d10f171f77 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import OsConfigZonalServiceAsyncClient +from .client import OsConfigZonalServiceClient + +__all__ = ( + "OsConfigZonalServiceClient", + "OsConfigZonalServiceAsyncClient", +) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/async_client.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/async_client.py new file mode 100644 index 000000000000..1fb901aff9c5 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/async_client.py @@ -0,0 +1,2139 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.osconfig_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.osconfig_v1alpha.services.os_config_zonal_service import pagers +from google.cloud.osconfig_v1alpha.types import ( + config_common, + instance_os_policies_compliance, + inventory, + os_policy, + os_policy_assignment_reports, + os_policy_assignments, + vulnerability, +) + +from .client import OsConfigZonalServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, OsConfigZonalServiceTransport +from .transports.grpc_asyncio import OsConfigZonalServiceGrpcAsyncIOTransport + + +class OsConfigZonalServiceAsyncClient: + """Zonal OS Config API + + The OS Config service is the server-side component that allows + users to manage package installations and patch jobs for Compute + Engine VM instances. + """ + + _client: OsConfigZonalServiceClient + + DEFAULT_ENDPOINT = OsConfigZonalServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = OsConfigZonalServiceClient.DEFAULT_MTLS_ENDPOINT + + instance_path = staticmethod(OsConfigZonalServiceClient.instance_path) + parse_instance_path = staticmethod(OsConfigZonalServiceClient.parse_instance_path) + instance_os_policies_compliance_path = staticmethod( + OsConfigZonalServiceClient.instance_os_policies_compliance_path + ) + parse_instance_os_policies_compliance_path = staticmethod( + OsConfigZonalServiceClient.parse_instance_os_policies_compliance_path + ) + instance_os_policy_assignment_path = staticmethod( + OsConfigZonalServiceClient.instance_os_policy_assignment_path + ) + parse_instance_os_policy_assignment_path = staticmethod( + OsConfigZonalServiceClient.parse_instance_os_policy_assignment_path + ) + inventory_path = staticmethod(OsConfigZonalServiceClient.inventory_path) + parse_inventory_path = staticmethod(OsConfigZonalServiceClient.parse_inventory_path) + os_policy_assignment_path = staticmethod( + OsConfigZonalServiceClient.os_policy_assignment_path + ) + parse_os_policy_assignment_path = staticmethod( + OsConfigZonalServiceClient.parse_os_policy_assignment_path + ) + os_policy_assignment_report_path = staticmethod( + OsConfigZonalServiceClient.os_policy_assignment_report_path + ) + parse_os_policy_assignment_report_path = staticmethod( + OsConfigZonalServiceClient.parse_os_policy_assignment_report_path + ) + vulnerability_report_path = staticmethod( + OsConfigZonalServiceClient.vulnerability_report_path + ) + parse_vulnerability_report_path = staticmethod( + OsConfigZonalServiceClient.parse_vulnerability_report_path + ) + common_billing_account_path = staticmethod( + OsConfigZonalServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + OsConfigZonalServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(OsConfigZonalServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + OsConfigZonalServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + OsConfigZonalServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + OsConfigZonalServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(OsConfigZonalServiceClient.common_project_path) + parse_common_project_path = staticmethod( + OsConfigZonalServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(OsConfigZonalServiceClient.common_location_path) + parse_common_location_path = staticmethod( + OsConfigZonalServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OsConfigZonalServiceAsyncClient: The constructed client. + """ + return OsConfigZonalServiceClient.from_service_account_info.__func__(OsConfigZonalServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OsConfigZonalServiceAsyncClient: The constructed client. + """ + return OsConfigZonalServiceClient.from_service_account_file.__func__(OsConfigZonalServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return OsConfigZonalServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> OsConfigZonalServiceTransport: + """Returns the transport used by the client instance. + + Returns: + OsConfigZonalServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(OsConfigZonalServiceClient).get_transport_class, + type(OsConfigZonalServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, OsConfigZonalServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the os config zonal service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.OsConfigZonalServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = OsConfigZonalServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.CreateOSPolicyAssignmentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + os_policy_assignment: Optional[os_policy_assignments.OSPolicyAssignment] = None, + os_policy_assignment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create an OS policy assignment. + + This method also creates the first revision of the OS policy + assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + async def sample_create_os_policy_assignment(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + os_policy_assignment = osconfig_v1alpha.OSPolicyAssignment() + os_policy_assignment.os_policies.id = "id_value" + os_policy_assignment.os_policies.mode = "ENFORCEMENT" + os_policy_assignment.os_policies.resource_groups.resources.pkg.apt.name = "name_value" + os_policy_assignment.os_policies.resource_groups.resources.pkg.desired_state = "REMOVED" + os_policy_assignment.os_policies.resource_groups.resources.id = "id_value" + os_policy_assignment.rollout.disruption_budget.fixed = 528 + + request = osconfig_v1alpha.CreateOSPolicyAssignmentRequest( + parent="parent_value", + os_policy_assignment=os_policy_assignment, + os_policy_assignment_id="os_policy_assignment_id_value", + ) + + # Make the request + operation = client.create_os_policy_assignment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1alpha.types.CreateOSPolicyAssignmentRequest, dict]]): + The request object. A request message to create an OS + policy assignment + parent (:class:`str`): + Required. The parent resource name in + the form: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + os_policy_assignment (:class:`google.cloud.osconfig_v1alpha.types.OSPolicyAssignment`): + Required. The OS policy assignment to + be created. + + This corresponds to the ``os_policy_assignment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + os_policy_assignment_id (:class:`str`): + Required. The logical name of the OS policy assignment + in the project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the project. + + This corresponds to the ``os_policy_assignment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.osconfig_v1alpha.types.OSPolicyAssignment` OS policy assignment is an API resource that is used to + apply a set of OS policies to a dynamically targeted + group of Compute Engine VM instances. + + An OS policy is used to define the desired state + configuration for a Compute Engine VM instance + through a set of configuration resources that provide + capabilities such as installing or removing software + packages, or executing a script. + + For more information, see [OS policy and OS policy + assignment](\ https://cloud.google.com/compute/docs/os-configuration-management/working-with-os-policies). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, os_policy_assignment, os_policy_assignment_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignments.CreateOSPolicyAssignmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if os_policy_assignment is not None: + request.os_policy_assignment = os_policy_assignment + if os_policy_assignment_id is not None: + request.os_policy_assignment_id = os_policy_assignment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_os_policy_assignment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + os_policy_assignments.OSPolicyAssignment, + metadata_type=os_policy_assignments.OSPolicyAssignmentOperationMetadata, + ) + + # Done; return the response. + return response + + async def update_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.UpdateOSPolicyAssignmentRequest, dict] + ] = None, + *, + os_policy_assignment: Optional[os_policy_assignments.OSPolicyAssignment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Update an existing OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + async def sample_update_os_policy_assignment(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + os_policy_assignment = osconfig_v1alpha.OSPolicyAssignment() + os_policy_assignment.os_policies.id = "id_value" + os_policy_assignment.os_policies.mode = "ENFORCEMENT" + os_policy_assignment.os_policies.resource_groups.resources.pkg.apt.name = "name_value" + os_policy_assignment.os_policies.resource_groups.resources.pkg.desired_state = "REMOVED" + os_policy_assignment.os_policies.resource_groups.resources.id = "id_value" + os_policy_assignment.rollout.disruption_budget.fixed = 528 + + request = osconfig_v1alpha.UpdateOSPolicyAssignmentRequest( + os_policy_assignment=os_policy_assignment, + ) + + # Make the request + operation = client.update_os_policy_assignment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1alpha.types.UpdateOSPolicyAssignmentRequest, dict]]): + The request object. A request message to update an OS + policy assignment + os_policy_assignment (:class:`google.cloud.osconfig_v1alpha.types.OSPolicyAssignment`): + Required. The updated OS policy + assignment. + + This corresponds to the ``os_policy_assignment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask that controls + which fields of the assignment should be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.osconfig_v1alpha.types.OSPolicyAssignment` OS policy assignment is an API resource that is used to + apply a set of OS policies to a dynamically targeted + group of Compute Engine VM instances. + + An OS policy is used to define the desired state + configuration for a Compute Engine VM instance + through a set of configuration resources that provide + capabilities such as installing or removing software + packages, or executing a script. + + For more information, see [OS policy and OS policy + assignment](\ https://cloud.google.com/compute/docs/os-configuration-management/working-with-os-policies). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([os_policy_assignment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignments.UpdateOSPolicyAssignmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if os_policy_assignment is not None: + request.os_policy_assignment = os_policy_assignment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_os_policy_assignment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("os_policy_assignment.name", request.os_policy_assignment.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + os_policy_assignments.OSPolicyAssignment, + metadata_type=os_policy_assignments.OSPolicyAssignmentOperationMetadata, + ) + + # Done; return the response. + return response + + async def get_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.GetOSPolicyAssignmentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignments.OSPolicyAssignment: + r"""Retrieve an existing OS policy assignment. + + This method always returns the latest revision. In order to + retrieve a previous revision of the assignment, also provide the + revision ID in the ``name`` parameter. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + async def sample_get_os_policy_assignment(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.GetOSPolicyAssignmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_os_policy_assignment(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1alpha.types.GetOSPolicyAssignmentRequest, dict]]): + The request object. A request message to get an OS policy + assignment + name (:class:`str`): + Required. The resource name of OS policy assignment. + + Format: + ``projects/{project}/locations/{location}/osPolicyAssignments/{os_policy_assignment}@{revisionId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.types.OSPolicyAssignment: + OS policy assignment is an API resource that is used to + apply a set of OS policies to a dynamically targeted + group of Compute Engine VM instances. + + An OS policy is used to define the desired state + configuration for a Compute Engine VM instance + through a set of configuration resources that provide + capabilities such as installing or removing software + packages, or executing a script. + + For more information, see [OS policy and OS policy + assignment](\ https://cloud.google.com/compute/docs/os-configuration-management/working-with-os-policies). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignments.GetOSPolicyAssignmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_os_policy_assignment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_os_policy_assignments( + self, + request: Optional[ + Union[os_policy_assignments.ListOSPolicyAssignmentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOSPolicyAssignmentsAsyncPager: + r"""List the OS policy assignments under the parent + resource. + For each OS policy assignment, the latest revision is + returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + async def sample_list_os_policy_assignments(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.ListOSPolicyAssignmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_os_policy_assignments(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentsRequest, dict]]): + The request object. A request message to list OS policy + assignments for a parent resource + parent (:class:`str`): + Required. The parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.services.os_config_zonal_service.pagers.ListOSPolicyAssignmentsAsyncPager: + A response message for listing all + assignments under given parent. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignments.ListOSPolicyAssignmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_os_policy_assignments, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListOSPolicyAssignmentsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_os_policy_assignment_revisions( + self, + request: Optional[ + Union[os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOSPolicyAssignmentRevisionsAsyncPager: + r"""List the OS policy assignment revisions for a given + OS policy assignment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + async def sample_list_os_policy_assignment_revisions(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.ListOSPolicyAssignmentRevisionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_os_policy_assignment_revisions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentRevisionsRequest, dict]]): + The request object. A request message to list revisions + for a OS policy assignment + name (:class:`str`): + Required. The name of the OS policy + assignment to list revisions for. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.services.os_config_zonal_service.pagers.ListOSPolicyAssignmentRevisionsAsyncPager: + A response message for listing all + revisions for a OS policy assignment. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_os_policy_assignment_revisions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListOSPolicyAssignmentRevisionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.DeleteOSPolicyAssignmentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete the OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + If the LRO completes and is not cancelled, all revisions + associated with the OS policy assignment are deleted. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + async def sample_delete_os_policy_assignment(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.DeleteOSPolicyAssignmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_os_policy_assignment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1alpha.types.DeleteOSPolicyAssignmentRequest, dict]]): + The request object. A request message for deleting a OS + policy assignment. + name (:class:`str`): + Required. The name of the OS policy + assignment to be deleted + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignments.DeleteOSPolicyAssignmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_os_policy_assignment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=os_policy_assignments.OSPolicyAssignmentOperationMetadata, + ) + + # Done; return the response. + return response + + async def get_instance_os_policies_compliance( + self, + request: Optional[ + Union[ + instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> instance_os_policies_compliance.InstanceOSPoliciesCompliance: + r"""Get OS policies compliance data for the specified + Compute Engine VM instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + async def sample_get_instance_os_policies_compliance(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.GetInstanceOSPoliciesComplianceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance_os_policies_compliance(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1alpha.types.GetInstanceOSPoliciesComplianceRequest, dict]]): + The request object. A request message for getting OS + policies compliance data for the given + Compute Engine VM instance. + name (:class:`str`): + Required. API resource name for instance OS policies + compliance resource. + + Format: + ``projects/{project}/locations/{location}/instanceOSPoliciesCompliances/{instance}`` + + For ``{project}``, either Compute Engine project-number + or project-id can be provided. For ``{instance}``, + either Compute Engine VM instance-id or instance-name + can be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.types.InstanceOSPoliciesCompliance: + This API resource represents the OS policies compliance data for a Compute + Engine virtual machine (VM) instance at a given point + in time. + + A Compute Engine VM can have multiple OS policy + assignments, and each assignment can have multiple OS + policies. As a result, multiple OS policies could be + applied to a single VM. + + You can use this API resource to determine both the + compliance state of your VM as well as the compliance + state of an individual OS policy. + + For more information, see [View + compliance](\ https://cloud.google.com/compute/docs/os-configuration-management/view-compliance). + + """ + warnings.warn( + "OsConfigZonalServiceAsyncClient.get_instance_os_policies_compliance is deprecated", + DeprecationWarning, + ) + + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = ( + instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instance_os_policies_compliance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_instance_os_policies_compliances( + self, + request: Optional[ + Union[ + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstanceOSPoliciesCompliancesAsyncPager: + r"""List OS policies compliance data for all Compute + Engine VM instances in the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + async def sample_list_instance_os_policies_compliances(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.ListInstanceOSPoliciesCompliancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_os_policies_compliances(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1alpha.types.ListInstanceOSPoliciesCompliancesRequest, dict]]): + The request object. A request message for listing OS + policies compliance data for all Compute + Engine VMs in the given location. + parent (:class:`str`): + Required. The parent resource name. + + Format: ``projects/{project}/locations/{location}`` + + For ``{project}``, either Compute Engine project-number + or project-id can be provided. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.services.os_config_zonal_service.pagers.ListInstanceOSPoliciesCompliancesAsyncPager: + A response message for listing OS + policies compliance data for all Compute + Engine VMs in the given location. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + warnings.warn( + "OsConfigZonalServiceAsyncClient.list_instance_os_policies_compliances is deprecated", + DeprecationWarning, + ) + + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_instance_os_policies_compliances, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstanceOSPoliciesCompliancesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_os_policy_assignment_report( + self, + request: Optional[ + Union[os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignment_reports.OSPolicyAssignmentReport: + r"""Get the OS policy asssignment report for the + specified Compute Engine VM instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + async def sample_get_os_policy_assignment_report(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.GetOSPolicyAssignmentReportRequest( + name="name_value", + ) + + # Make the request + response = await client.get_os_policy_assignment_report(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1alpha.types.GetOSPolicyAssignmentReportRequest, dict]]): + The request object. Get a report of the OS policy + assignment for a VM instance. + name (:class:`str`): + Required. API resource name for OS policy assignment + report. + + Format: + ``/projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/report`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance_id}``, + either Compute Engine ``instance-id`` or + ``instance-name`` can be provided. For + ``{assignment_id}``, the OSPolicyAssignment id must be + provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.types.OSPolicyAssignmentReport: + A report of the OS policy assignment + status for a given instance. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_os_policy_assignment_report, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_os_policy_assignment_reports( + self, + request: Optional[ + Union[ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, dict + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOSPolicyAssignmentReportsAsyncPager: + r"""List OS policy asssignment reports for all Compute + Engine VM instances in the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + async def sample_list_os_policy_assignment_reports(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.ListOSPolicyAssignmentReportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_os_policy_assignment_reports(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentReportsRequest, dict]]): + The request object. List the OS policy assignment reports + for VM instances. + parent (:class:`str`): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/reports`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, + either ``instance-name``, ``instance-id``, or ``-`` can + be provided. If '-' is provided, the response will + include OSPolicyAssignmentReports for all instances in + the project/location. For ``{assignment}``, either + ``assignment-id`` or ``-`` can be provided. If '-' is + provided, the response will include + OSPolicyAssignmentReports for all OSPolicyAssignments in + the project/location. Either {instance} or {assignment} + must be ``-``. + + For example: + ``projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/-/reports`` + returns all reports for the instance + ``projects/{project}/locations/{location}/instances/-/osPolicyAssignments/{assignment-id}/reports`` + returns all the reports for the given assignment across + all instances. + ``projects/{project}/locations/{location}/instances/-/osPolicyAssignments/-/reports`` + returns all the reports for all assignments across all + instances. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.services.os_config_zonal_service.pagers.ListOSPolicyAssignmentReportsAsyncPager: + A response message for listing OS + Policy assignment reports including the + page of results and page token. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest( + request + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_os_policy_assignment_reports, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListOSPolicyAssignmentReportsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_inventory( + self, + request: Optional[Union[inventory.GetInventoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> inventory.Inventory: + r"""Get inventory data for the specified VM instance. If the VM has + no associated inventory, the message ``NOT_FOUND`` is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + async def sample_get_inventory(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.GetInventoryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_inventory(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1alpha.types.GetInventoryRequest, dict]]): + The request object. A request message for getting + inventory data for the specified VM. + name (:class:`str`): + Required. API resource name for inventory resource. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/inventory`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, + either Compute Engine ``instance-id`` or + ``instance-name`` can be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.types.Inventory: + This API resource represents the available inventory data for a + Compute Engine virtual machine (VM) instance at a + given point in time. + + You can use this API resource to determine the + inventory data of your VM. + + For more information, see [Information provided by OS + inventory + management](\ https://cloud.google.com/compute/docs/instances/os-inventory-management#data-collected). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = inventory.GetInventoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_inventory, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_inventories( + self, + request: Optional[Union[inventory.ListInventoriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInventoriesAsyncPager: + r"""List inventory data for all VM instances in the + specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + async def sample_list_inventories(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.ListInventoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inventories(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1alpha.types.ListInventoriesRequest, dict]]): + The request object. A request message for listing + inventory data for all VMs in the + specified location. + parent (:class:`str`): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/-`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.services.os_config_zonal_service.pagers.ListInventoriesAsyncPager: + A response message for listing + inventory data for all VMs in a + specified location. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = inventory.ListInventoriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_inventories, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInventoriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_vulnerability_report( + self, + request: Optional[ + Union[vulnerability.GetVulnerabilityReportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vulnerability.VulnerabilityReport: + r"""Gets the vulnerability report for the specified VM + instance. Only VMs with inventory data have + vulnerability reports associated with them. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + async def sample_get_vulnerability_report(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.GetVulnerabilityReportRequest( + name="name_value", + ) + + # Make the request + response = await client.get_vulnerability_report(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1alpha.types.GetVulnerabilityReportRequest, dict]]): + The request object. A request message for getting the + vulnerability report for the specified + VM. + name (:class:`str`): + Required. API resource name for vulnerability resource. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/vulnerabilityReport`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, + either Compute Engine ``instance-id`` or + ``instance-name`` can be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.types.VulnerabilityReport: + This API resource represents the vulnerability report for a specified + Compute Engine virtual machine (VM) instance at a + given point in time. + + For more information, see [Vulnerability + reports](\ https://cloud.google.com/compute/docs/instances/os-inventory-management#vulnerability-reports). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vulnerability.GetVulnerabilityReportRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_vulnerability_report, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_vulnerability_reports( + self, + request: Optional[ + Union[vulnerability.ListVulnerabilityReportsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVulnerabilityReportsAsyncPager: + r"""List vulnerability reports for all VM instances in + the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + async def sample_list_vulnerability_reports(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceAsyncClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.ListVulnerabilityReportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_vulnerability_reports(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.osconfig_v1alpha.types.ListVulnerabilityReportsRequest, dict]]): + The request object. A request message for listing + vulnerability reports for all VM + instances in the specified location. + parent (:class:`str`): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/-`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.services.os_config_zonal_service.pagers.ListVulnerabilityReportsAsyncPager: + A response message for listing + vulnerability reports for all VM + instances in the specified location. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = vulnerability.ListVulnerabilityReportsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_vulnerability_reports, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListVulnerabilityReportsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "OsConfigZonalServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("OsConfigZonalServiceAsyncClient",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/client.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/client.py new file mode 100644 index 000000000000..8f6ce4661e9b --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/client.py @@ -0,0 +1,2518 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.osconfig_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.osconfig_v1alpha.services.os_config_zonal_service import pagers +from google.cloud.osconfig_v1alpha.types import ( + config_common, + instance_os_policies_compliance, + inventory, + os_policy, + os_policy_assignment_reports, + os_policy_assignments, + vulnerability, +) + +from .transports.base import DEFAULT_CLIENT_INFO, OsConfigZonalServiceTransport +from .transports.grpc import OsConfigZonalServiceGrpcTransport +from .transports.grpc_asyncio import OsConfigZonalServiceGrpcAsyncIOTransport +from .transports.rest import OsConfigZonalServiceRestTransport + + +class OsConfigZonalServiceClientMeta(type): + """Metaclass for the OsConfigZonalService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[OsConfigZonalServiceTransport]] + _transport_registry["grpc"] = OsConfigZonalServiceGrpcTransport + _transport_registry["grpc_asyncio"] = OsConfigZonalServiceGrpcAsyncIOTransport + _transport_registry["rest"] = OsConfigZonalServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[OsConfigZonalServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class OsConfigZonalServiceClient(metaclass=OsConfigZonalServiceClientMeta): + """Zonal OS Config API + + The OS Config service is the server-side component that allows + users to manage package installations and patch jobs for Compute + Engine VM instances. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "osconfig.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OsConfigZonalServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OsConfigZonalServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> OsConfigZonalServiceTransport: + """Returns the transport used by the client instance. + + Returns: + OsConfigZonalServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def instance_path( + project: str, + location: str, + instance: str, + ) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str, str]: + """Parses a instance path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def instance_os_policies_compliance_path( + project: str, + location: str, + instance: str, + ) -> str: + """Returns a fully-qualified instance_os_policies_compliance string.""" + return "projects/{project}/locations/{location}/instanceOSPoliciesCompliances/{instance}".format( + project=project, + location=location, + instance=instance, + ) + + @staticmethod + def parse_instance_os_policies_compliance_path(path: str) -> Dict[str, str]: + """Parses a instance_os_policies_compliance path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instanceOSPoliciesCompliances/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def instance_os_policy_assignment_path( + project: str, + location: str, + instance: str, + assignment: str, + ) -> str: + """Returns a fully-qualified instance_os_policy_assignment string.""" + return "projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}".format( + project=project, + location=location, + instance=instance, + assignment=assignment, + ) + + @staticmethod + def parse_instance_os_policy_assignment_path(path: str) -> Dict[str, str]: + """Parses a instance_os_policy_assignment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/osPolicyAssignments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def inventory_path( + project: str, + location: str, + instance: str, + ) -> str: + """Returns a fully-qualified inventory string.""" + return "projects/{project}/locations/{location}/instances/{instance}/inventory".format( + project=project, + location=location, + instance=instance, + ) + + @staticmethod + def parse_inventory_path(path: str) -> Dict[str, str]: + """Parses a inventory path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/inventory$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def os_policy_assignment_path( + project: str, + location: str, + os_policy_assignment: str, + ) -> str: + """Returns a fully-qualified os_policy_assignment string.""" + return "projects/{project}/locations/{location}/osPolicyAssignments/{os_policy_assignment}".format( + project=project, + location=location, + os_policy_assignment=os_policy_assignment, + ) + + @staticmethod + def parse_os_policy_assignment_path(path: str) -> Dict[str, str]: + """Parses a os_policy_assignment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/osPolicyAssignments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def os_policy_assignment_report_path( + project: str, + location: str, + instance: str, + assignment: str, + ) -> str: + """Returns a fully-qualified os_policy_assignment_report string.""" + return "projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/report".format( + project=project, + location=location, + instance=instance, + assignment=assignment, + ) + + @staticmethod + def parse_os_policy_assignment_report_path(path: str) -> Dict[str, str]: + """Parses a os_policy_assignment_report path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/osPolicyAssignments/(?P.+?)/report$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def vulnerability_report_path( + project: str, + location: str, + instance: str, + ) -> str: + """Returns a fully-qualified vulnerability_report string.""" + return "projects/{project}/locations/{location}/instances/{instance}/vulnerabilityReport".format( + project=project, + location=location, + instance=instance, + ) + + @staticmethod + def parse_vulnerability_report_path(path: str) -> Dict[str, str]: + """Parses a vulnerability_report path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/vulnerabilityReport$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, OsConfigZonalServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the os config zonal service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, OsConfigZonalServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, OsConfigZonalServiceTransport): + # transport is a OsConfigZonalServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.CreateOSPolicyAssignmentRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + os_policy_assignment: Optional[os_policy_assignments.OSPolicyAssignment] = None, + os_policy_assignment_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create an OS policy assignment. + + This method also creates the first revision of the OS policy + assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + def sample_create_os_policy_assignment(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceClient() + + # Initialize request argument(s) + os_policy_assignment = osconfig_v1alpha.OSPolicyAssignment() + os_policy_assignment.os_policies.id = "id_value" + os_policy_assignment.os_policies.mode = "ENFORCEMENT" + os_policy_assignment.os_policies.resource_groups.resources.pkg.apt.name = "name_value" + os_policy_assignment.os_policies.resource_groups.resources.pkg.desired_state = "REMOVED" + os_policy_assignment.os_policies.resource_groups.resources.id = "id_value" + os_policy_assignment.rollout.disruption_budget.fixed = 528 + + request = osconfig_v1alpha.CreateOSPolicyAssignmentRequest( + parent="parent_value", + os_policy_assignment=os_policy_assignment, + os_policy_assignment_id="os_policy_assignment_id_value", + ) + + # Make the request + operation = client.create_os_policy_assignment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1alpha.types.CreateOSPolicyAssignmentRequest, dict]): + The request object. A request message to create an OS + policy assignment + parent (str): + Required. The parent resource name in + the form: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + os_policy_assignment (google.cloud.osconfig_v1alpha.types.OSPolicyAssignment): + Required. The OS policy assignment to + be created. + + This corresponds to the ``os_policy_assignment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + os_policy_assignment_id (str): + Required. The logical name of the OS policy assignment + in the project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the project. + + This corresponds to the ``os_policy_assignment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.osconfig_v1alpha.types.OSPolicyAssignment` OS policy assignment is an API resource that is used to + apply a set of OS policies to a dynamically targeted + group of Compute Engine VM instances. + + An OS policy is used to define the desired state + configuration for a Compute Engine VM instance + through a set of configuration resources that provide + capabilities such as installing or removing software + packages, or executing a script. + + For more information, see [OS policy and OS policy + assignment](\ https://cloud.google.com/compute/docs/os-configuration-management/working-with-os-policies). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, os_policy_assignment, os_policy_assignment_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignments.CreateOSPolicyAssignmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, os_policy_assignments.CreateOSPolicyAssignmentRequest + ): + request = os_policy_assignments.CreateOSPolicyAssignmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if os_policy_assignment is not None: + request.os_policy_assignment = os_policy_assignment + if os_policy_assignment_id is not None: + request.os_policy_assignment_id = os_policy_assignment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_os_policy_assignment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + os_policy_assignments.OSPolicyAssignment, + metadata_type=os_policy_assignments.OSPolicyAssignmentOperationMetadata, + ) + + # Done; return the response. + return response + + def update_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.UpdateOSPolicyAssignmentRequest, dict] + ] = None, + *, + os_policy_assignment: Optional[os_policy_assignments.OSPolicyAssignment] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Update an existing OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + def sample_update_os_policy_assignment(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceClient() + + # Initialize request argument(s) + os_policy_assignment = osconfig_v1alpha.OSPolicyAssignment() + os_policy_assignment.os_policies.id = "id_value" + os_policy_assignment.os_policies.mode = "ENFORCEMENT" + os_policy_assignment.os_policies.resource_groups.resources.pkg.apt.name = "name_value" + os_policy_assignment.os_policies.resource_groups.resources.pkg.desired_state = "REMOVED" + os_policy_assignment.os_policies.resource_groups.resources.id = "id_value" + os_policy_assignment.rollout.disruption_budget.fixed = 528 + + request = osconfig_v1alpha.UpdateOSPolicyAssignmentRequest( + os_policy_assignment=os_policy_assignment, + ) + + # Make the request + operation = client.update_os_policy_assignment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1alpha.types.UpdateOSPolicyAssignmentRequest, dict]): + The request object. A request message to update an OS + policy assignment + os_policy_assignment (google.cloud.osconfig_v1alpha.types.OSPolicyAssignment): + Required. The updated OS policy + assignment. + + This corresponds to the ``os_policy_assignment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that controls + which fields of the assignment should be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.osconfig_v1alpha.types.OSPolicyAssignment` OS policy assignment is an API resource that is used to + apply a set of OS policies to a dynamically targeted + group of Compute Engine VM instances. + + An OS policy is used to define the desired state + configuration for a Compute Engine VM instance + through a set of configuration resources that provide + capabilities such as installing or removing software + packages, or executing a script. + + For more information, see [OS policy and OS policy + assignment](\ https://cloud.google.com/compute/docs/os-configuration-management/working-with-os-policies). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([os_policy_assignment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignments.UpdateOSPolicyAssignmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, os_policy_assignments.UpdateOSPolicyAssignmentRequest + ): + request = os_policy_assignments.UpdateOSPolicyAssignmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if os_policy_assignment is not None: + request.os_policy_assignment = os_policy_assignment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_os_policy_assignment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("os_policy_assignment.name", request.os_policy_assignment.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + os_policy_assignments.OSPolicyAssignment, + metadata_type=os_policy_assignments.OSPolicyAssignmentOperationMetadata, + ) + + # Done; return the response. + return response + + def get_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.GetOSPolicyAssignmentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignments.OSPolicyAssignment: + r"""Retrieve an existing OS policy assignment. + + This method always returns the latest revision. In order to + retrieve a previous revision of the assignment, also provide the + revision ID in the ``name`` parameter. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + def sample_get_os_policy_assignment(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.GetOSPolicyAssignmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_os_policy_assignment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1alpha.types.GetOSPolicyAssignmentRequest, dict]): + The request object. A request message to get an OS policy + assignment + name (str): + Required. The resource name of OS policy assignment. + + Format: + ``projects/{project}/locations/{location}/osPolicyAssignments/{os_policy_assignment}@{revisionId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.types.OSPolicyAssignment: + OS policy assignment is an API resource that is used to + apply a set of OS policies to a dynamically targeted + group of Compute Engine VM instances. + + An OS policy is used to define the desired state + configuration for a Compute Engine VM instance + through a set of configuration resources that provide + capabilities such as installing or removing software + packages, or executing a script. + + For more information, see [OS policy and OS policy + assignment](\ https://cloud.google.com/compute/docs/os-configuration-management/working-with-os-policies). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignments.GetOSPolicyAssignmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, os_policy_assignments.GetOSPolicyAssignmentRequest): + request = os_policy_assignments.GetOSPolicyAssignmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_os_policy_assignment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_os_policy_assignments( + self, + request: Optional[ + Union[os_policy_assignments.ListOSPolicyAssignmentsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOSPolicyAssignmentsPager: + r"""List the OS policy assignments under the parent + resource. + For each OS policy assignment, the latest revision is + returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + def sample_list_os_policy_assignments(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.ListOSPolicyAssignmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_os_policy_assignments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentsRequest, dict]): + The request object. A request message to list OS policy + assignments for a parent resource + parent (str): + Required. The parent resource name. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.services.os_config_zonal_service.pagers.ListOSPolicyAssignmentsPager: + A response message for listing all + assignments under given parent. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignments.ListOSPolicyAssignmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, os_policy_assignments.ListOSPolicyAssignmentsRequest + ): + request = os_policy_assignments.ListOSPolicyAssignmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_os_policy_assignments + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOSPolicyAssignmentsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_os_policy_assignment_revisions( + self, + request: Optional[ + Union[os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOSPolicyAssignmentRevisionsPager: + r"""List the OS policy assignment revisions for a given + OS policy assignment. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + def sample_list_os_policy_assignment_revisions(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.ListOSPolicyAssignmentRevisionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_os_policy_assignment_revisions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentRevisionsRequest, dict]): + The request object. A request message to list revisions + for a OS policy assignment + name (str): + Required. The name of the OS policy + assignment to list revisions for. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.services.os_config_zonal_service.pagers.ListOSPolicyAssignmentRevisionsPager: + A response message for listing all + revisions for a OS policy assignment. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest + ): + request = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_os_policy_assignment_revisions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOSPolicyAssignmentRevisionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_os_policy_assignment( + self, + request: Optional[ + Union[os_policy_assignments.DeleteOSPolicyAssignmentRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Delete the OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + If the LRO completes and is not cancelled, all revisions + associated with the OS policy assignment are deleted. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + def sample_delete_os_policy_assignment(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.DeleteOSPolicyAssignmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_os_policy_assignment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1alpha.types.DeleteOSPolicyAssignmentRequest, dict]): + The request object. A request message for deleting a OS + policy assignment. + name (str): + Required. The name of the OS policy + assignment to be deleted + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignments.DeleteOSPolicyAssignmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, os_policy_assignments.DeleteOSPolicyAssignmentRequest + ): + request = os_policy_assignments.DeleteOSPolicyAssignmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_os_policy_assignment + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=os_policy_assignments.OSPolicyAssignmentOperationMetadata, + ) + + # Done; return the response. + return response + + def get_instance_os_policies_compliance( + self, + request: Optional[ + Union[ + instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest, + dict, + ] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> instance_os_policies_compliance.InstanceOSPoliciesCompliance: + r"""Get OS policies compliance data for the specified + Compute Engine VM instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + def sample_get_instance_os_policies_compliance(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.GetInstanceOSPoliciesComplianceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance_os_policies_compliance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1alpha.types.GetInstanceOSPoliciesComplianceRequest, dict]): + The request object. A request message for getting OS + policies compliance data for the given + Compute Engine VM instance. + name (str): + Required. API resource name for instance OS policies + compliance resource. + + Format: + ``projects/{project}/locations/{location}/instanceOSPoliciesCompliances/{instance}`` + + For ``{project}``, either Compute Engine project-number + or project-id can be provided. For ``{instance}``, + either Compute Engine VM instance-id or instance-name + can be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.types.InstanceOSPoliciesCompliance: + This API resource represents the OS policies compliance data for a Compute + Engine virtual machine (VM) instance at a given point + in time. + + A Compute Engine VM can have multiple OS policy + assignments, and each assignment can have multiple OS + policies. As a result, multiple OS policies could be + applied to a single VM. + + You can use this API resource to determine both the + compliance state of your VM as well as the compliance + state of an individual OS policy. + + For more information, see [View + compliance](\ https://cloud.google.com/compute/docs/os-configuration-management/view-compliance). + + """ + warnings.warn( + "OsConfigZonalServiceClient.get_instance_os_policies_compliance is deprecated", + DeprecationWarning, + ) + + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, + instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest, + ): + request = ( + instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_instance_os_policies_compliance + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_instance_os_policies_compliances( + self, + request: Optional[ + Union[ + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstanceOSPoliciesCompliancesPager: + r"""List OS policies compliance data for all Compute + Engine VM instances in the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + def sample_list_instance_os_policies_compliances(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.ListInstanceOSPoliciesCompliancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_os_policies_compliances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.osconfig_v1alpha.types.ListInstanceOSPoliciesCompliancesRequest, dict]): + The request object. A request message for listing OS + policies compliance data for all Compute + Engine VMs in the given location. + parent (str): + Required. The parent resource name. + + Format: ``projects/{project}/locations/{location}`` + + For ``{project}``, either Compute Engine project-number + or project-id can be provided. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.services.os_config_zonal_service.pagers.ListInstanceOSPoliciesCompliancesPager: + A response message for listing OS + policies compliance data for all Compute + Engine VMs in the given location. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + warnings.warn( + "OsConfigZonalServiceClient.list_instance_os_policies_compliances is deprecated", + DeprecationWarning, + ) + + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest, + ): + request = instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_instance_os_policies_compliances + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstanceOSPoliciesCompliancesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_os_policy_assignment_report( + self, + request: Optional[ + Union[os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignment_reports.OSPolicyAssignmentReport: + r"""Get the OS policy asssignment report for the + specified Compute Engine VM instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + def sample_get_os_policy_assignment_report(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.GetOSPolicyAssignmentReportRequest( + name="name_value", + ) + + # Make the request + response = client.get_os_policy_assignment_report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1alpha.types.GetOSPolicyAssignmentReportRequest, dict]): + The request object. Get a report of the OS policy + assignment for a VM instance. + name (str): + Required. API resource name for OS policy assignment + report. + + Format: + ``/projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/report`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance_id}``, + either Compute Engine ``instance-id`` or + ``instance-name`` can be provided. For + ``{assignment_id}``, the OSPolicyAssignment id must be + provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.types.OSPolicyAssignmentReport: + A report of the OS policy assignment + status for a given instance. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest + ): + request = os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_os_policy_assignment_report + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_os_policy_assignment_reports( + self, + request: Optional[ + Union[ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, dict + ] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOSPolicyAssignmentReportsPager: + r"""List OS policy asssignment reports for all Compute + Engine VM instances in the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + def sample_list_os_policy_assignment_reports(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.ListOSPolicyAssignmentReportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_os_policy_assignment_reports(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentReportsRequest, dict]): + The request object. List the OS policy assignment reports + for VM instances. + parent (str): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/reports`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, + either ``instance-name``, ``instance-id``, or ``-`` can + be provided. If '-' is provided, the response will + include OSPolicyAssignmentReports for all instances in + the project/location. For ``{assignment}``, either + ``assignment-id`` or ``-`` can be provided. If '-' is + provided, the response will include + OSPolicyAssignmentReports for all OSPolicyAssignments in + the project/location. Either {instance} or {assignment} + must be ``-``. + + For example: + ``projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/-/reports`` + returns all reports for the instance + ``projects/{project}/locations/{location}/instances/-/osPolicyAssignments/{assignment-id}/reports`` + returns all the reports for the given assignment across + all instances. + ``projects/{project}/locations/{location}/instances/-/osPolicyAssignments/-/reports`` + returns all the reports for all assignments across all + instances. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.services.os_config_zonal_service.pagers.ListOSPolicyAssignmentReportsPager: + A response message for listing OS + Policy assignment reports including the + page of results and page token. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest + ): + request = os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_os_policy_assignment_reports + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOSPolicyAssignmentReportsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_inventory( + self, + request: Optional[Union[inventory.GetInventoryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> inventory.Inventory: + r"""Get inventory data for the specified VM instance. If the VM has + no associated inventory, the message ``NOT_FOUND`` is returned. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + def sample_get_inventory(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.GetInventoryRequest( + name="name_value", + ) + + # Make the request + response = client.get_inventory(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1alpha.types.GetInventoryRequest, dict]): + The request object. A request message for getting + inventory data for the specified VM. + name (str): + Required. API resource name for inventory resource. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/inventory`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, + either Compute Engine ``instance-id`` or + ``instance-name`` can be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.types.Inventory: + This API resource represents the available inventory data for a + Compute Engine virtual machine (VM) instance at a + given point in time. + + You can use this API resource to determine the + inventory data of your VM. + + For more information, see [Information provided by OS + inventory + management](\ https://cloud.google.com/compute/docs/instances/os-inventory-management#data-collected). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a inventory.GetInventoryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, inventory.GetInventoryRequest): + request = inventory.GetInventoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_inventory] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_inventories( + self, + request: Optional[Union[inventory.ListInventoriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInventoriesPager: + r"""List inventory data for all VM instances in the + specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + def sample_list_inventories(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.ListInventoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_inventories(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.osconfig_v1alpha.types.ListInventoriesRequest, dict]): + The request object. A request message for listing + inventory data for all VMs in the + specified location. + parent (str): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/-`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.services.os_config_zonal_service.pagers.ListInventoriesPager: + A response message for listing + inventory data for all VMs in a + specified location. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a inventory.ListInventoriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, inventory.ListInventoriesRequest): + request = inventory.ListInventoriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_inventories] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInventoriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_vulnerability_report( + self, + request: Optional[ + Union[vulnerability.GetVulnerabilityReportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vulnerability.VulnerabilityReport: + r"""Gets the vulnerability report for the specified VM + instance. Only VMs with inventory data have + vulnerability reports associated with them. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + def sample_get_vulnerability_report(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.GetVulnerabilityReportRequest( + name="name_value", + ) + + # Make the request + response = client.get_vulnerability_report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.osconfig_v1alpha.types.GetVulnerabilityReportRequest, dict]): + The request object. A request message for getting the + vulnerability report for the specified + VM. + name (str): + Required. API resource name for vulnerability resource. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/vulnerabilityReport`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, + either Compute Engine ``instance-id`` or + ``instance-name`` can be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.types.VulnerabilityReport: + This API resource represents the vulnerability report for a specified + Compute Engine virtual machine (VM) instance at a + given point in time. + + For more information, see [Vulnerability + reports](\ https://cloud.google.com/compute/docs/instances/os-inventory-management#vulnerability-reports). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vulnerability.GetVulnerabilityReportRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vulnerability.GetVulnerabilityReportRequest): + request = vulnerability.GetVulnerabilityReportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_vulnerability_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_vulnerability_reports( + self, + request: Optional[ + Union[vulnerability.ListVulnerabilityReportsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListVulnerabilityReportsPager: + r"""List vulnerability reports for all VM instances in + the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import osconfig_v1alpha + + def sample_list_vulnerability_reports(): + # Create a client + client = osconfig_v1alpha.OsConfigZonalServiceClient() + + # Initialize request argument(s) + request = osconfig_v1alpha.ListVulnerabilityReportsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_vulnerability_reports(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.osconfig_v1alpha.types.ListVulnerabilityReportsRequest, dict]): + The request object. A request message for listing + vulnerability reports for all VM + instances in the specified location. + parent (str): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/-`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.osconfig_v1alpha.services.os_config_zonal_service.pagers.ListVulnerabilityReportsPager: + A response message for listing + vulnerability reports for all VM + instances in the specified location. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a vulnerability.ListVulnerabilityReportsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, vulnerability.ListVulnerabilityReportsRequest): + request = vulnerability.ListVulnerabilityReportsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_vulnerability_reports + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListVulnerabilityReportsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "OsConfigZonalServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("OsConfigZonalServiceClient",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/pagers.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/pagers.py new file mode 100644 index 000000000000..e01b621c4449 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/pagers.py @@ -0,0 +1,871 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.osconfig_v1alpha.types import ( + instance_os_policies_compliance, + inventory, + os_policy_assignment_reports, + os_policy_assignments, + vulnerability, +) + + +class ListOSPolicyAssignmentsPager: + """A pager for iterating through ``list_os_policy_assignments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``os_policy_assignments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOSPolicyAssignments`` requests and continue to iterate + through the ``os_policy_assignments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., os_policy_assignments.ListOSPolicyAssignmentsResponse], + request: os_policy_assignments.ListOSPolicyAssignmentsRequest, + response: os_policy_assignments.ListOSPolicyAssignmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentsRequest): + The initial request object. + response (google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = os_policy_assignments.ListOSPolicyAssignmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[os_policy_assignments.ListOSPolicyAssignmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[os_policy_assignments.OSPolicyAssignment]: + for page in self.pages: + yield from page.os_policy_assignments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOSPolicyAssignmentsAsyncPager: + """A pager for iterating through ``list_os_policy_assignments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``os_policy_assignments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListOSPolicyAssignments`` requests and continue to iterate + through the ``os_policy_assignments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[os_policy_assignments.ListOSPolicyAssignmentsResponse] + ], + request: os_policy_assignments.ListOSPolicyAssignmentsRequest, + response: os_policy_assignments.ListOSPolicyAssignmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentsRequest): + The initial request object. + response (google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = os_policy_assignments.ListOSPolicyAssignmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[os_policy_assignments.ListOSPolicyAssignmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[os_policy_assignments.OSPolicyAssignment]: + async def async_generator(): + async for page in self.pages: + for response in page.os_policy_assignments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOSPolicyAssignmentRevisionsPager: + """A pager for iterating through ``list_os_policy_assignment_revisions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentRevisionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``os_policy_assignments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOSPolicyAssignmentRevisions`` requests and continue to iterate + through the ``os_policy_assignments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentRevisionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse + ], + request: os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, + response: os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentRevisionsRequest): + The initial request object. + response (google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentRevisionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[os_policy_assignments.OSPolicyAssignment]: + for page in self.pages: + yield from page.os_policy_assignments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOSPolicyAssignmentRevisionsAsyncPager: + """A pager for iterating through ``list_os_policy_assignment_revisions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentRevisionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``os_policy_assignments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListOSPolicyAssignmentRevisions`` requests and continue to iterate + through the ``os_policy_assignments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentRevisionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse], + ], + request: os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, + response: os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentRevisionsRequest): + The initial request object. + response (google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentRevisionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest( + request + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[os_policy_assignments.OSPolicyAssignment]: + async def async_generator(): + async for page in self.pages: + for response in page.os_policy_assignments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstanceOSPoliciesCompliancesPager: + """A pager for iterating through ``list_instance_os_policies_compliances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1alpha.types.ListInstanceOSPoliciesCompliancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instance_os_policies_compliances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstanceOSPoliciesCompliances`` requests and continue to iterate + through the ``instance_os_policies_compliances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1alpha.types.ListInstanceOSPoliciesCompliancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse, + ], + request: instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest, + response: instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1alpha.types.ListInstanceOSPoliciesCompliancesRequest): + The initial request object. + response (google.cloud.osconfig_v1alpha.types.ListInstanceOSPoliciesCompliancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest( + request + ) + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[ + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__( + self, + ) -> Iterator[instance_os_policies_compliance.InstanceOSPoliciesCompliance]: + for page in self.pages: + yield from page.instance_os_policies_compliances + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstanceOSPoliciesCompliancesAsyncPager: + """A pager for iterating through ``list_instance_os_policies_compliances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1alpha.types.ListInstanceOSPoliciesCompliancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instance_os_policies_compliances`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstanceOSPoliciesCompliances`` requests and continue to iterate + through the ``instance_os_policies_compliances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1alpha.types.ListInstanceOSPoliciesCompliancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[ + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse + ], + ], + request: instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest, + response: instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1alpha.types.ListInstanceOSPoliciesCompliancesRequest): + The initial request object. + response (google.cloud.osconfig_v1alpha.types.ListInstanceOSPoliciesCompliancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest( + request + ) + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[ + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterator[instance_os_policies_compliance.InstanceOSPoliciesCompliance]: + async def async_generator(): + async for page in self.pages: + for response in page.instance_os_policies_compliances: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOSPolicyAssignmentReportsPager: + """A pager for iterating through ``list_os_policy_assignment_reports`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentReportsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``os_policy_assignment_reports`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOSPolicyAssignmentReports`` requests and continue to iterate + through the ``os_policy_assignment_reports`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentReportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse + ], + request: os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, + response: os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentReportsRequest): + The initial request object. + response (google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentReportsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest(request) + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__( + self, + ) -> Iterator[os_policy_assignment_reports.OSPolicyAssignmentReport]: + for page in self.pages: + yield from page.os_policy_assignment_reports + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOSPolicyAssignmentReportsAsyncPager: + """A pager for iterating through ``list_os_policy_assignment_reports`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentReportsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``os_policy_assignment_reports`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListOSPolicyAssignmentReports`` requests and continue to iterate + through the ``os_policy_assignment_reports`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentReportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse + ], + ], + request: os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, + response: os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentReportsRequest): + The initial request object. + response (google.cloud.osconfig_v1alpha.types.ListOSPolicyAssignmentReportsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest(request) + ) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterator[os_policy_assignment_reports.OSPolicyAssignmentReport]: + async def async_generator(): + async for page in self.pages: + for response in page.os_policy_assignment_reports: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInventoriesPager: + """A pager for iterating through ``list_inventories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1alpha.types.ListInventoriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``inventories`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInventories`` requests and continue to iterate + through the ``inventories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1alpha.types.ListInventoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., inventory.ListInventoriesResponse], + request: inventory.ListInventoriesRequest, + response: inventory.ListInventoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1alpha.types.ListInventoriesRequest): + The initial request object. + response (google.cloud.osconfig_v1alpha.types.ListInventoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = inventory.ListInventoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[inventory.ListInventoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[inventory.Inventory]: + for page in self.pages: + yield from page.inventories + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInventoriesAsyncPager: + """A pager for iterating through ``list_inventories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1alpha.types.ListInventoriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``inventories`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInventories`` requests and continue to iterate + through the ``inventories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1alpha.types.ListInventoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[inventory.ListInventoriesResponse]], + request: inventory.ListInventoriesRequest, + response: inventory.ListInventoriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1alpha.types.ListInventoriesRequest): + The initial request object. + response (google.cloud.osconfig_v1alpha.types.ListInventoriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = inventory.ListInventoriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[inventory.ListInventoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[inventory.Inventory]: + async def async_generator(): + async for page in self.pages: + for response in page.inventories: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListVulnerabilityReportsPager: + """A pager for iterating through ``list_vulnerability_reports`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1alpha.types.ListVulnerabilityReportsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``vulnerability_reports`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListVulnerabilityReports`` requests and continue to iterate + through the ``vulnerability_reports`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1alpha.types.ListVulnerabilityReportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., vulnerability.ListVulnerabilityReportsResponse], + request: vulnerability.ListVulnerabilityReportsRequest, + response: vulnerability.ListVulnerabilityReportsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1alpha.types.ListVulnerabilityReportsRequest): + The initial request object. + response (google.cloud.osconfig_v1alpha.types.ListVulnerabilityReportsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vulnerability.ListVulnerabilityReportsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[vulnerability.ListVulnerabilityReportsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[vulnerability.VulnerabilityReport]: + for page in self.pages: + yield from page.vulnerability_reports + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListVulnerabilityReportsAsyncPager: + """A pager for iterating through ``list_vulnerability_reports`` requests. + + This class thinly wraps an initial + :class:`google.cloud.osconfig_v1alpha.types.ListVulnerabilityReportsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``vulnerability_reports`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListVulnerabilityReports`` requests and continue to iterate + through the ``vulnerability_reports`` field on the + corresponding responses. + + All the usual :class:`google.cloud.osconfig_v1alpha.types.ListVulnerabilityReportsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[vulnerability.ListVulnerabilityReportsResponse] + ], + request: vulnerability.ListVulnerabilityReportsRequest, + response: vulnerability.ListVulnerabilityReportsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.osconfig_v1alpha.types.ListVulnerabilityReportsRequest): + The initial request object. + response (google.cloud.osconfig_v1alpha.types.ListVulnerabilityReportsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = vulnerability.ListVulnerabilityReportsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[vulnerability.ListVulnerabilityReportsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[vulnerability.VulnerabilityReport]: + async def async_generator(): + async for page in self.pages: + for response in page.vulnerability_reports: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/__init__.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/__init__.py new file mode 100644 index 000000000000..77d28e750fe7 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import OsConfigZonalServiceTransport +from .grpc import OsConfigZonalServiceGrpcTransport +from .grpc_asyncio import OsConfigZonalServiceGrpcAsyncIOTransport +from .rest import OsConfigZonalServiceRestInterceptor, OsConfigZonalServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[OsConfigZonalServiceTransport]] +_transport_registry["grpc"] = OsConfigZonalServiceGrpcTransport +_transport_registry["grpc_asyncio"] = OsConfigZonalServiceGrpcAsyncIOTransport +_transport_registry["rest"] = OsConfigZonalServiceRestTransport + +__all__ = ( + "OsConfigZonalServiceTransport", + "OsConfigZonalServiceGrpcTransport", + "OsConfigZonalServiceGrpcAsyncIOTransport", + "OsConfigZonalServiceRestTransport", + "OsConfigZonalServiceRestInterceptor", +) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/base.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/base.py new file mode 100644 index 000000000000..cd444113dcd2 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/base.py @@ -0,0 +1,383 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.osconfig_v1alpha import gapic_version as package_version +from google.cloud.osconfig_v1alpha.types import ( + instance_os_policies_compliance, + inventory, + os_policy_assignment_reports, + os_policy_assignments, + vulnerability, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class OsConfigZonalServiceTransport(abc.ABC): + """Abstract transport class for OsConfigZonalService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "osconfig.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_os_policy_assignment: gapic_v1.method.wrap_method( + self.create_os_policy_assignment, + default_timeout=None, + client_info=client_info, + ), + self.update_os_policy_assignment: gapic_v1.method.wrap_method( + self.update_os_policy_assignment, + default_timeout=None, + client_info=client_info, + ), + self.get_os_policy_assignment: gapic_v1.method.wrap_method( + self.get_os_policy_assignment, + default_timeout=None, + client_info=client_info, + ), + self.list_os_policy_assignments: gapic_v1.method.wrap_method( + self.list_os_policy_assignments, + default_timeout=None, + client_info=client_info, + ), + self.list_os_policy_assignment_revisions: gapic_v1.method.wrap_method( + self.list_os_policy_assignment_revisions, + default_timeout=None, + client_info=client_info, + ), + self.delete_os_policy_assignment: gapic_v1.method.wrap_method( + self.delete_os_policy_assignment, + default_timeout=None, + client_info=client_info, + ), + self.get_instance_os_policies_compliance: gapic_v1.method.wrap_method( + self.get_instance_os_policies_compliance, + default_timeout=None, + client_info=client_info, + ), + self.list_instance_os_policies_compliances: gapic_v1.method.wrap_method( + self.list_instance_os_policies_compliances, + default_timeout=None, + client_info=client_info, + ), + self.get_os_policy_assignment_report: gapic_v1.method.wrap_method( + self.get_os_policy_assignment_report, + default_timeout=None, + client_info=client_info, + ), + self.list_os_policy_assignment_reports: gapic_v1.method.wrap_method( + self.list_os_policy_assignment_reports, + default_timeout=None, + client_info=client_info, + ), + self.get_inventory: gapic_v1.method.wrap_method( + self.get_inventory, + default_timeout=None, + client_info=client_info, + ), + self.list_inventories: gapic_v1.method.wrap_method( + self.list_inventories, + default_timeout=None, + client_info=client_info, + ), + self.get_vulnerability_report: gapic_v1.method.wrap_method( + self.get_vulnerability_report, + default_timeout=None, + client_info=client_info, + ), + self.list_vulnerability_reports: gapic_v1.method.wrap_method( + self.list_vulnerability_reports, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.CreateOSPolicyAssignmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.UpdateOSPolicyAssignmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.GetOSPolicyAssignmentRequest], + Union[ + os_policy_assignments.OSPolicyAssignment, + Awaitable[os_policy_assignments.OSPolicyAssignment], + ], + ]: + raise NotImplementedError() + + @property + def list_os_policy_assignments( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentsRequest], + Union[ + os_policy_assignments.ListOSPolicyAssignmentsResponse, + Awaitable[os_policy_assignments.ListOSPolicyAssignmentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_os_policy_assignment_revisions( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest], + Union[ + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse, + Awaitable[os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.DeleteOSPolicyAssignmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_instance_os_policies_compliance( + self, + ) -> Callable[ + [instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest], + Union[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance, + Awaitable[instance_os_policies_compliance.InstanceOSPoliciesCompliance], + ], + ]: + raise NotImplementedError() + + @property + def list_instance_os_policies_compliances( + self, + ) -> Callable[ + [instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest], + Union[ + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse, + Awaitable[ + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse + ], + ], + ]: + raise NotImplementedError() + + @property + def get_os_policy_assignment_report( + self, + ) -> Callable[ + [os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest], + Union[ + os_policy_assignment_reports.OSPolicyAssignmentReport, + Awaitable[os_policy_assignment_reports.OSPolicyAssignmentReport], + ], + ]: + raise NotImplementedError() + + @property + def list_os_policy_assignment_reports( + self, + ) -> Callable[ + [os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest], + Union[ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse, + Awaitable[ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse + ], + ], + ]: + raise NotImplementedError() + + @property + def get_inventory( + self, + ) -> Callable[ + [inventory.GetInventoryRequest], + Union[inventory.Inventory, Awaitable[inventory.Inventory]], + ]: + raise NotImplementedError() + + @property + def list_inventories( + self, + ) -> Callable[ + [inventory.ListInventoriesRequest], + Union[ + inventory.ListInventoriesResponse, + Awaitable[inventory.ListInventoriesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_vulnerability_report( + self, + ) -> Callable[ + [vulnerability.GetVulnerabilityReportRequest], + Union[ + vulnerability.VulnerabilityReport, + Awaitable[vulnerability.VulnerabilityReport], + ], + ]: + raise NotImplementedError() + + @property + def list_vulnerability_reports( + self, + ) -> Callable[ + [vulnerability.ListVulnerabilityReportsRequest], + Union[ + vulnerability.ListVulnerabilityReportsResponse, + Awaitable[vulnerability.ListVulnerabilityReportsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("OsConfigZonalServiceTransport",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/grpc.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/grpc.py new file mode 100644 index 000000000000..113eec466914 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/grpc.py @@ -0,0 +1,729 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.osconfig_v1alpha.types import ( + instance_os_policies_compliance, + inventory, + os_policy_assignment_reports, + os_policy_assignments, + vulnerability, +) + +from .base import DEFAULT_CLIENT_INFO, OsConfigZonalServiceTransport + + +class OsConfigZonalServiceGrpcTransport(OsConfigZonalServiceTransport): + """gRPC backend transport for OsConfigZonalService. + + Zonal OS Config API + + The OS Config service is the server-side component that allows + users to manage package installations and patch jobs for Compute + Engine VM instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def create_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.CreateOSPolicyAssignmentRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the create os policy assignment method over gRPC. + + Create an OS policy assignment. + + This method also creates the first revision of the OS policy + assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + Returns: + Callable[[~.CreateOSPolicyAssignmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_os_policy_assignment" not in self._stubs: + self._stubs["create_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/CreateOSPolicyAssignment", + request_serializer=os_policy_assignments.CreateOSPolicyAssignmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_os_policy_assignment"] + + @property + def update_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.UpdateOSPolicyAssignmentRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the update os policy assignment method over gRPC. + + Update an existing OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + Returns: + Callable[[~.UpdateOSPolicyAssignmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_os_policy_assignment" not in self._stubs: + self._stubs["update_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/UpdateOSPolicyAssignment", + request_serializer=os_policy_assignments.UpdateOSPolicyAssignmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_os_policy_assignment"] + + @property + def get_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.GetOSPolicyAssignmentRequest], + os_policy_assignments.OSPolicyAssignment, + ]: + r"""Return a callable for the get os policy assignment method over gRPC. + + Retrieve an existing OS policy assignment. + + This method always returns the latest revision. In order to + retrieve a previous revision of the assignment, also provide the + revision ID in the ``name`` parameter. + + Returns: + Callable[[~.GetOSPolicyAssignmentRequest], + ~.OSPolicyAssignment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_os_policy_assignment" not in self._stubs: + self._stubs["get_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/GetOSPolicyAssignment", + request_serializer=os_policy_assignments.GetOSPolicyAssignmentRequest.serialize, + response_deserializer=os_policy_assignments.OSPolicyAssignment.deserialize, + ) + return self._stubs["get_os_policy_assignment"] + + @property + def list_os_policy_assignments( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentsRequest], + os_policy_assignments.ListOSPolicyAssignmentsResponse, + ]: + r"""Return a callable for the list os policy assignments method over gRPC. + + List the OS policy assignments under the parent + resource. + For each OS policy assignment, the latest revision is + returned. + + Returns: + Callable[[~.ListOSPolicyAssignmentsRequest], + ~.ListOSPolicyAssignmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_os_policy_assignments" not in self._stubs: + self._stubs["list_os_policy_assignments"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/ListOSPolicyAssignments", + request_serializer=os_policy_assignments.ListOSPolicyAssignmentsRequest.serialize, + response_deserializer=os_policy_assignments.ListOSPolicyAssignmentsResponse.deserialize, + ) + return self._stubs["list_os_policy_assignments"] + + @property + def list_os_policy_assignment_revisions( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest], + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse, + ]: + r"""Return a callable for the list os policy assignment + revisions method over gRPC. + + List the OS policy assignment revisions for a given + OS policy assignment. + + Returns: + Callable[[~.ListOSPolicyAssignmentRevisionsRequest], + ~.ListOSPolicyAssignmentRevisionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_os_policy_assignment_revisions" not in self._stubs: + self._stubs[ + "list_os_policy_assignment_revisions" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/ListOSPolicyAssignmentRevisions", + request_serializer=os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest.serialize, + response_deserializer=os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.deserialize, + ) + return self._stubs["list_os_policy_assignment_revisions"] + + @property + def delete_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.DeleteOSPolicyAssignmentRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the delete os policy assignment method over gRPC. + + Delete the OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + If the LRO completes and is not cancelled, all revisions + associated with the OS policy assignment are deleted. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + Returns: + Callable[[~.DeleteOSPolicyAssignmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_os_policy_assignment" not in self._stubs: + self._stubs["delete_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/DeleteOSPolicyAssignment", + request_serializer=os_policy_assignments.DeleteOSPolicyAssignmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_os_policy_assignment"] + + @property + def get_instance_os_policies_compliance( + self, + ) -> Callable[ + [instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest], + instance_os_policies_compliance.InstanceOSPoliciesCompliance, + ]: + r"""Return a callable for the get instance os policies + compliance method over gRPC. + + Get OS policies compliance data for the specified + Compute Engine VM instance. + + Returns: + Callable[[~.GetInstanceOSPoliciesComplianceRequest], + ~.InstanceOSPoliciesCompliance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance_os_policies_compliance" not in self._stubs: + self._stubs[ + "get_instance_os_policies_compliance" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/GetInstanceOSPoliciesCompliance", + request_serializer=instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest.serialize, + response_deserializer=instance_os_policies_compliance.InstanceOSPoliciesCompliance.deserialize, + ) + return self._stubs["get_instance_os_policies_compliance"] + + @property + def list_instance_os_policies_compliances( + self, + ) -> Callable[ + [instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest], + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse, + ]: + r"""Return a callable for the list instance os policies + compliances method over gRPC. + + List OS policies compliance data for all Compute + Engine VM instances in the specified zone. + + Returns: + Callable[[~.ListInstanceOSPoliciesCompliancesRequest], + ~.ListInstanceOSPoliciesCompliancesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instance_os_policies_compliances" not in self._stubs: + self._stubs[ + "list_instance_os_policies_compliances" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/ListInstanceOSPoliciesCompliances", + request_serializer=instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest.serialize, + response_deserializer=instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse.deserialize, + ) + return self._stubs["list_instance_os_policies_compliances"] + + @property + def get_os_policy_assignment_report( + self, + ) -> Callable[ + [os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest], + os_policy_assignment_reports.OSPolicyAssignmentReport, + ]: + r"""Return a callable for the get os policy assignment + report method over gRPC. + + Get the OS policy asssignment report for the + specified Compute Engine VM instance. + + Returns: + Callable[[~.GetOSPolicyAssignmentReportRequest], + ~.OSPolicyAssignmentReport]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_os_policy_assignment_report" not in self._stubs: + self._stubs[ + "get_os_policy_assignment_report" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/GetOSPolicyAssignmentReport", + request_serializer=os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest.serialize, + response_deserializer=os_policy_assignment_reports.OSPolicyAssignmentReport.deserialize, + ) + return self._stubs["get_os_policy_assignment_report"] + + @property + def list_os_policy_assignment_reports( + self, + ) -> Callable[ + [os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest], + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse, + ]: + r"""Return a callable for the list os policy assignment + reports method over gRPC. + + List OS policy asssignment reports for all Compute + Engine VM instances in the specified zone. + + Returns: + Callable[[~.ListOSPolicyAssignmentReportsRequest], + ~.ListOSPolicyAssignmentReportsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_os_policy_assignment_reports" not in self._stubs: + self._stubs[ + "list_os_policy_assignment_reports" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/ListOSPolicyAssignmentReports", + request_serializer=os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest.serialize, + response_deserializer=os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.deserialize, + ) + return self._stubs["list_os_policy_assignment_reports"] + + @property + def get_inventory( + self, + ) -> Callable[[inventory.GetInventoryRequest], inventory.Inventory]: + r"""Return a callable for the get inventory method over gRPC. + + Get inventory data for the specified VM instance. If the VM has + no associated inventory, the message ``NOT_FOUND`` is returned. + + Returns: + Callable[[~.GetInventoryRequest], + ~.Inventory]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_inventory" not in self._stubs: + self._stubs["get_inventory"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/GetInventory", + request_serializer=inventory.GetInventoryRequest.serialize, + response_deserializer=inventory.Inventory.deserialize, + ) + return self._stubs["get_inventory"] + + @property + def list_inventories( + self, + ) -> Callable[ + [inventory.ListInventoriesRequest], inventory.ListInventoriesResponse + ]: + r"""Return a callable for the list inventories method over gRPC. + + List inventory data for all VM instances in the + specified zone. + + Returns: + Callable[[~.ListInventoriesRequest], + ~.ListInventoriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_inventories" not in self._stubs: + self._stubs["list_inventories"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/ListInventories", + request_serializer=inventory.ListInventoriesRequest.serialize, + response_deserializer=inventory.ListInventoriesResponse.deserialize, + ) + return self._stubs["list_inventories"] + + @property + def get_vulnerability_report( + self, + ) -> Callable[ + [vulnerability.GetVulnerabilityReportRequest], vulnerability.VulnerabilityReport + ]: + r"""Return a callable for the get vulnerability report method over gRPC. + + Gets the vulnerability report for the specified VM + instance. Only VMs with inventory data have + vulnerability reports associated with them. + + Returns: + Callable[[~.GetVulnerabilityReportRequest], + ~.VulnerabilityReport]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_vulnerability_report" not in self._stubs: + self._stubs["get_vulnerability_report"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/GetVulnerabilityReport", + request_serializer=vulnerability.GetVulnerabilityReportRequest.serialize, + response_deserializer=vulnerability.VulnerabilityReport.deserialize, + ) + return self._stubs["get_vulnerability_report"] + + @property + def list_vulnerability_reports( + self, + ) -> Callable[ + [vulnerability.ListVulnerabilityReportsRequest], + vulnerability.ListVulnerabilityReportsResponse, + ]: + r"""Return a callable for the list vulnerability reports method over gRPC. + + List vulnerability reports for all VM instances in + the specified zone. + + Returns: + Callable[[~.ListVulnerabilityReportsRequest], + ~.ListVulnerabilityReportsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_vulnerability_reports" not in self._stubs: + self._stubs["list_vulnerability_reports"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/ListVulnerabilityReports", + request_serializer=vulnerability.ListVulnerabilityReportsRequest.serialize, + response_deserializer=vulnerability.ListVulnerabilityReportsResponse.deserialize, + ) + return self._stubs["list_vulnerability_reports"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("OsConfigZonalServiceGrpcTransport",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/grpc_asyncio.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..76cc64b1ff4f --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/grpc_asyncio.py @@ -0,0 +1,733 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.osconfig_v1alpha.types import ( + instance_os_policies_compliance, + inventory, + os_policy_assignment_reports, + os_policy_assignments, + vulnerability, +) + +from .base import DEFAULT_CLIENT_INFO, OsConfigZonalServiceTransport +from .grpc import OsConfigZonalServiceGrpcTransport + + +class OsConfigZonalServiceGrpcAsyncIOTransport(OsConfigZonalServiceTransport): + """gRPC AsyncIO backend transport for OsConfigZonalService. + + Zonal OS Config API + + The OS Config service is the server-side component that allows + users to manage package installations and patch jobs for Compute + Engine VM instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.CreateOSPolicyAssignmentRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create os policy assignment method over gRPC. + + Create an OS policy assignment. + + This method also creates the first revision of the OS policy + assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + Returns: + Callable[[~.CreateOSPolicyAssignmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_os_policy_assignment" not in self._stubs: + self._stubs["create_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/CreateOSPolicyAssignment", + request_serializer=os_policy_assignments.CreateOSPolicyAssignmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_os_policy_assignment"] + + @property + def update_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.UpdateOSPolicyAssignmentRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update os policy assignment method over gRPC. + + Update an existing OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + Returns: + Callable[[~.UpdateOSPolicyAssignmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_os_policy_assignment" not in self._stubs: + self._stubs["update_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/UpdateOSPolicyAssignment", + request_serializer=os_policy_assignments.UpdateOSPolicyAssignmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_os_policy_assignment"] + + @property + def get_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.GetOSPolicyAssignmentRequest], + Awaitable[os_policy_assignments.OSPolicyAssignment], + ]: + r"""Return a callable for the get os policy assignment method over gRPC. + + Retrieve an existing OS policy assignment. + + This method always returns the latest revision. In order to + retrieve a previous revision of the assignment, also provide the + revision ID in the ``name`` parameter. + + Returns: + Callable[[~.GetOSPolicyAssignmentRequest], + Awaitable[~.OSPolicyAssignment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_os_policy_assignment" not in self._stubs: + self._stubs["get_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/GetOSPolicyAssignment", + request_serializer=os_policy_assignments.GetOSPolicyAssignmentRequest.serialize, + response_deserializer=os_policy_assignments.OSPolicyAssignment.deserialize, + ) + return self._stubs["get_os_policy_assignment"] + + @property + def list_os_policy_assignments( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentsRequest], + Awaitable[os_policy_assignments.ListOSPolicyAssignmentsResponse], + ]: + r"""Return a callable for the list os policy assignments method over gRPC. + + List the OS policy assignments under the parent + resource. + For each OS policy assignment, the latest revision is + returned. + + Returns: + Callable[[~.ListOSPolicyAssignmentsRequest], + Awaitable[~.ListOSPolicyAssignmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_os_policy_assignments" not in self._stubs: + self._stubs["list_os_policy_assignments"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/ListOSPolicyAssignments", + request_serializer=os_policy_assignments.ListOSPolicyAssignmentsRequest.serialize, + response_deserializer=os_policy_assignments.ListOSPolicyAssignmentsResponse.deserialize, + ) + return self._stubs["list_os_policy_assignments"] + + @property + def list_os_policy_assignment_revisions( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest], + Awaitable[os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse], + ]: + r"""Return a callable for the list os policy assignment + revisions method over gRPC. + + List the OS policy assignment revisions for a given + OS policy assignment. + + Returns: + Callable[[~.ListOSPolicyAssignmentRevisionsRequest], + Awaitable[~.ListOSPolicyAssignmentRevisionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_os_policy_assignment_revisions" not in self._stubs: + self._stubs[ + "list_os_policy_assignment_revisions" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/ListOSPolicyAssignmentRevisions", + request_serializer=os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest.serialize, + response_deserializer=os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.deserialize, + ) + return self._stubs["list_os_policy_assignment_revisions"] + + @property + def delete_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.DeleteOSPolicyAssignmentRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete os policy assignment method over gRPC. + + Delete the OS policy assignment. + + This method creates a new revision of the OS policy assignment. + + This method returns a long running operation (LRO) that contains + the rollout details. The rollout can be cancelled by cancelling + the LRO. + + If the LRO completes and is not cancelled, all revisions + associated with the OS policy assignment are deleted. + + For more information, see `Method: + projects.locations.osPolicyAssignments.operations.cancel `__. + + Returns: + Callable[[~.DeleteOSPolicyAssignmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_os_policy_assignment" not in self._stubs: + self._stubs["delete_os_policy_assignment"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/DeleteOSPolicyAssignment", + request_serializer=os_policy_assignments.DeleteOSPolicyAssignmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_os_policy_assignment"] + + @property + def get_instance_os_policies_compliance( + self, + ) -> Callable[ + [instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest], + Awaitable[instance_os_policies_compliance.InstanceOSPoliciesCompliance], + ]: + r"""Return a callable for the get instance os policies + compliance method over gRPC. + + Get OS policies compliance data for the specified + Compute Engine VM instance. + + Returns: + Callable[[~.GetInstanceOSPoliciesComplianceRequest], + Awaitable[~.InstanceOSPoliciesCompliance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instance_os_policies_compliance" not in self._stubs: + self._stubs[ + "get_instance_os_policies_compliance" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/GetInstanceOSPoliciesCompliance", + request_serializer=instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest.serialize, + response_deserializer=instance_os_policies_compliance.InstanceOSPoliciesCompliance.deserialize, + ) + return self._stubs["get_instance_os_policies_compliance"] + + @property + def list_instance_os_policies_compliances( + self, + ) -> Callable[ + [instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest], + Awaitable[ + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse + ], + ]: + r"""Return a callable for the list instance os policies + compliances method over gRPC. + + List OS policies compliance data for all Compute + Engine VM instances in the specified zone. + + Returns: + Callable[[~.ListInstanceOSPoliciesCompliancesRequest], + Awaitable[~.ListInstanceOSPoliciesCompliancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instance_os_policies_compliances" not in self._stubs: + self._stubs[ + "list_instance_os_policies_compliances" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/ListInstanceOSPoliciesCompliances", + request_serializer=instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest.serialize, + response_deserializer=instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse.deserialize, + ) + return self._stubs["list_instance_os_policies_compliances"] + + @property + def get_os_policy_assignment_report( + self, + ) -> Callable[ + [os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest], + Awaitable[os_policy_assignment_reports.OSPolicyAssignmentReport], + ]: + r"""Return a callable for the get os policy assignment + report method over gRPC. + + Get the OS policy asssignment report for the + specified Compute Engine VM instance. + + Returns: + Callable[[~.GetOSPolicyAssignmentReportRequest], + Awaitable[~.OSPolicyAssignmentReport]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_os_policy_assignment_report" not in self._stubs: + self._stubs[ + "get_os_policy_assignment_report" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/GetOSPolicyAssignmentReport", + request_serializer=os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest.serialize, + response_deserializer=os_policy_assignment_reports.OSPolicyAssignmentReport.deserialize, + ) + return self._stubs["get_os_policy_assignment_report"] + + @property + def list_os_policy_assignment_reports( + self, + ) -> Callable[ + [os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest], + Awaitable[os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse], + ]: + r"""Return a callable for the list os policy assignment + reports method over gRPC. + + List OS policy asssignment reports for all Compute + Engine VM instances in the specified zone. + + Returns: + Callable[[~.ListOSPolicyAssignmentReportsRequest], + Awaitable[~.ListOSPolicyAssignmentReportsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_os_policy_assignment_reports" not in self._stubs: + self._stubs[ + "list_os_policy_assignment_reports" + ] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/ListOSPolicyAssignmentReports", + request_serializer=os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest.serialize, + response_deserializer=os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.deserialize, + ) + return self._stubs["list_os_policy_assignment_reports"] + + @property + def get_inventory( + self, + ) -> Callable[[inventory.GetInventoryRequest], Awaitable[inventory.Inventory]]: + r"""Return a callable for the get inventory method over gRPC. + + Get inventory data for the specified VM instance. If the VM has + no associated inventory, the message ``NOT_FOUND`` is returned. + + Returns: + Callable[[~.GetInventoryRequest], + Awaitable[~.Inventory]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_inventory" not in self._stubs: + self._stubs["get_inventory"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/GetInventory", + request_serializer=inventory.GetInventoryRequest.serialize, + response_deserializer=inventory.Inventory.deserialize, + ) + return self._stubs["get_inventory"] + + @property + def list_inventories( + self, + ) -> Callable[ + [inventory.ListInventoriesRequest], Awaitable[inventory.ListInventoriesResponse] + ]: + r"""Return a callable for the list inventories method over gRPC. + + List inventory data for all VM instances in the + specified zone. + + Returns: + Callable[[~.ListInventoriesRequest], + Awaitable[~.ListInventoriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_inventories" not in self._stubs: + self._stubs["list_inventories"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/ListInventories", + request_serializer=inventory.ListInventoriesRequest.serialize, + response_deserializer=inventory.ListInventoriesResponse.deserialize, + ) + return self._stubs["list_inventories"] + + @property + def get_vulnerability_report( + self, + ) -> Callable[ + [vulnerability.GetVulnerabilityReportRequest], + Awaitable[vulnerability.VulnerabilityReport], + ]: + r"""Return a callable for the get vulnerability report method over gRPC. + + Gets the vulnerability report for the specified VM + instance. Only VMs with inventory data have + vulnerability reports associated with them. + + Returns: + Callable[[~.GetVulnerabilityReportRequest], + Awaitable[~.VulnerabilityReport]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_vulnerability_report" not in self._stubs: + self._stubs["get_vulnerability_report"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/GetVulnerabilityReport", + request_serializer=vulnerability.GetVulnerabilityReportRequest.serialize, + response_deserializer=vulnerability.VulnerabilityReport.deserialize, + ) + return self._stubs["get_vulnerability_report"] + + @property + def list_vulnerability_reports( + self, + ) -> Callable[ + [vulnerability.ListVulnerabilityReportsRequest], + Awaitable[vulnerability.ListVulnerabilityReportsResponse], + ]: + r"""Return a callable for the list vulnerability reports method over gRPC. + + List vulnerability reports for all VM instances in + the specified zone. + + Returns: + Callable[[~.ListVulnerabilityReportsRequest], + Awaitable[~.ListVulnerabilityReportsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_vulnerability_reports" not in self._stubs: + self._stubs["list_vulnerability_reports"] = self.grpc_channel.unary_unary( + "/google.cloud.osconfig.v1alpha.OsConfigZonalService/ListVulnerabilityReports", + request_serializer=vulnerability.ListVulnerabilityReportsRequest.serialize, + response_deserializer=vulnerability.ListVulnerabilityReportsResponse.deserialize, + ) + return self._stubs["list_vulnerability_reports"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("OsConfigZonalServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/rest.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/rest.py new file mode 100644 index 000000000000..f0983d6bac74 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/services/os_config_zonal_service/transports/rest.py @@ -0,0 +1,2233 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.osconfig_v1alpha.types import ( + instance_os_policies_compliance, + inventory, + os_policy_assignment_reports, + os_policy_assignments, + vulnerability, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import OsConfigZonalServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class OsConfigZonalServiceRestInterceptor: + """Interceptor for OsConfigZonalService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the OsConfigZonalServiceRestTransport. + + .. code-block:: python + class MyCustomOsConfigZonalServiceInterceptor(OsConfigZonalServiceRestInterceptor): + def pre_create_os_policy_assignment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_os_policy_assignment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_os_policy_assignment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_os_policy_assignment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance_os_policies_compliance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance_os_policies_compliance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_inventory(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_inventory(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_os_policy_assignment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_os_policy_assignment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_os_policy_assignment_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_os_policy_assignment_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_vulnerability_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_vulnerability_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instance_os_policies_compliances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instance_os_policies_compliances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_inventories(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_inventories(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_os_policy_assignment_reports(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_os_policy_assignment_reports(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_os_policy_assignment_revisions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_os_policy_assignment_revisions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_os_policy_assignments(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_os_policy_assignments(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_vulnerability_reports(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_vulnerability_reports(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_os_policy_assignment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_os_policy_assignment(self, response): + logging.log(f"Received response: {response}") + return response + + transport = OsConfigZonalServiceRestTransport(interceptor=MyCustomOsConfigZonalServiceInterceptor()) + client = OsConfigZonalServiceClient(transport=transport) + + + """ + + def pre_create_os_policy_assignment( + self, + request: os_policy_assignments.CreateOSPolicyAssignmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignments.CreateOSPolicyAssignmentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for create_os_policy_assignment + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_create_os_policy_assignment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_os_policy_assignment + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_delete_os_policy_assignment( + self, + request: os_policy_assignments.DeleteOSPolicyAssignmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignments.DeleteOSPolicyAssignmentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for delete_os_policy_assignment + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_delete_os_policy_assignment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_os_policy_assignment + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_get_instance_os_policies_compliance( + self, + request: instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for get_instance_os_policies_compliance + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_get_instance_os_policies_compliance( + self, response: instance_os_policies_compliance.InstanceOSPoliciesCompliance + ) -> instance_os_policies_compliance.InstanceOSPoliciesCompliance: + """Post-rpc interceptor for get_instance_os_policies_compliance + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_get_inventory( + self, + request: inventory.GetInventoryRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[inventory.GetInventoryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_inventory + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_get_inventory(self, response: inventory.Inventory) -> inventory.Inventory: + """Post-rpc interceptor for get_inventory + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_get_os_policy_assignment( + self, + request: os_policy_assignments.GetOSPolicyAssignmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignments.GetOSPolicyAssignmentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get_os_policy_assignment + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_get_os_policy_assignment( + self, response: os_policy_assignments.OSPolicyAssignment + ) -> os_policy_assignments.OSPolicyAssignment: + """Post-rpc interceptor for get_os_policy_assignment + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_get_os_policy_assignment_report( + self, + request: os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for get_os_policy_assignment_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_get_os_policy_assignment_report( + self, response: os_policy_assignment_reports.OSPolicyAssignmentReport + ) -> os_policy_assignment_reports.OSPolicyAssignmentReport: + """Post-rpc interceptor for get_os_policy_assignment_report + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_get_vulnerability_report( + self, + request: vulnerability.GetVulnerabilityReportRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[vulnerability.GetVulnerabilityReportRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_vulnerability_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_get_vulnerability_report( + self, response: vulnerability.VulnerabilityReport + ) -> vulnerability.VulnerabilityReport: + """Post-rpc interceptor for get_vulnerability_report + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_list_instance_os_policies_compliances( + self, + request: instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_instance_os_policies_compliances + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_list_instance_os_policies_compliances( + self, + response: instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse, + ) -> instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse: + """Post-rpc interceptor for list_instance_os_policies_compliances + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_list_inventories( + self, + request: inventory.ListInventoriesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[inventory.ListInventoriesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_inventories + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_list_inventories( + self, response: inventory.ListInventoriesResponse + ) -> inventory.ListInventoriesResponse: + """Post-rpc interceptor for list_inventories + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_list_os_policy_assignment_reports( + self, + request: os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_os_policy_assignment_reports + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_list_os_policy_assignment_reports( + self, + response: os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse, + ) -> os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse: + """Post-rpc interceptor for list_os_policy_assignment_reports + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_list_os_policy_assignment_revisions( + self, + request: os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for list_os_policy_assignment_revisions + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_list_os_policy_assignment_revisions( + self, response: os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse + ) -> os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse: + """Post-rpc interceptor for list_os_policy_assignment_revisions + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_list_os_policy_assignments( + self, + request: os_policy_assignments.ListOSPolicyAssignmentsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignments.ListOSPolicyAssignmentsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_os_policy_assignments + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_list_os_policy_assignments( + self, response: os_policy_assignments.ListOSPolicyAssignmentsResponse + ) -> os_policy_assignments.ListOSPolicyAssignmentsResponse: + """Post-rpc interceptor for list_os_policy_assignments + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_list_vulnerability_reports( + self, + request: vulnerability.ListVulnerabilityReportsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + vulnerability.ListVulnerabilityReportsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_vulnerability_reports + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_list_vulnerability_reports( + self, response: vulnerability.ListVulnerabilityReportsResponse + ) -> vulnerability.ListVulnerabilityReportsResponse: + """Post-rpc interceptor for list_vulnerability_reports + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + def pre_update_os_policy_assignment( + self, + request: os_policy_assignments.UpdateOSPolicyAssignmentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + os_policy_assignments.UpdateOSPolicyAssignmentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for update_os_policy_assignment + + Override in a subclass to manipulate the request or metadata + before they are sent to the OsConfigZonalService server. + """ + return request, metadata + + def post_update_os_policy_assignment( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_os_policy_assignment + + Override in a subclass to manipulate the response + after it is returned by the OsConfigZonalService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class OsConfigZonalServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: OsConfigZonalServiceRestInterceptor + + +class OsConfigZonalServiceRestTransport(OsConfigZonalServiceTransport): + """REST backend transport for OsConfigZonalService. + + Zonal OS Config API + + The OS Config service is the server-side component that allows + users to manage package installations and patch jobs for Compute + Engine VM instances. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "osconfig.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[OsConfigZonalServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or OsConfigZonalServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1alpha/{name=projects/*/locations/*/osPolicyAssignments/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/osPolicyAssignments/*/operations/*}", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateOSPolicyAssignment(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("CreateOSPolicyAssignment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "osPolicyAssignmentId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignments.CreateOSPolicyAssignmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create os policy + assignment method over HTTP. + + Args: + request (~.os_policy_assignments.CreateOSPolicyAssignmentRequest): + The request object. A request message to create an OS + policy assignment + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*}/osPolicyAssignments", + "body": "os_policy_assignment", + }, + ] + request, metadata = self._interceptor.pre_create_os_policy_assignment( + request, metadata + ) + pb_request = os_policy_assignments.CreateOSPolicyAssignmentRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_os_policy_assignment(resp) + return resp + + class _DeleteOSPolicyAssignment(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("DeleteOSPolicyAssignment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignments.DeleteOSPolicyAssignmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete os policy + assignment method over HTTP. + + Args: + request (~.os_policy_assignments.DeleteOSPolicyAssignmentRequest): + The request object. A request message for deleting a OS + policy assignment. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=projects/*/locations/*/osPolicyAssignments/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_os_policy_assignment( + request, metadata + ) + pb_request = os_policy_assignments.DeleteOSPolicyAssignmentRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_os_policy_assignment(resp) + return resp + + class _GetInstanceOSPoliciesCompliance(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("GetInstanceOSPoliciesCompliance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> instance_os_policies_compliance.InstanceOSPoliciesCompliance: + r"""Call the get instance os policies + compliance method over HTTP. + + Args: + request (~.instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest): + The request object. A request message for getting OS + policies compliance data for the given + Compute Engine VM instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.instance_os_policies_compliance.InstanceOSPoliciesCompliance: + This API resource represents the OS policies compliance + data for a Compute Engine virtual machine (VM) instance + at a given point in time. + + A Compute Engine VM can have multiple OS policy + assignments, and each assignment can have multiple OS + policies. As a result, multiple OS policies could be + applied to a single VM. + + You can use this API resource to determine both the + compliance state of your VM as well as the compliance + state of an individual OS policy. + + For more information, see `View + compliance `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/instanceOSPoliciesCompliances/*}", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_get_instance_os_policies_compliance( + request, metadata + ) + pb_request = instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = instance_os_policies_compliance.InstanceOSPoliciesCompliance() + pb_resp = instance_os_policies_compliance.InstanceOSPoliciesCompliance.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance_os_policies_compliance(resp) + return resp + + class _GetInventory(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("GetInventory") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: inventory.GetInventoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> inventory.Inventory: + r"""Call the get inventory method over HTTP. + + Args: + request (~.inventory.GetInventoryRequest): + The request object. A request message for getting + inventory data for the specified VM. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.inventory.Inventory: + This API resource represents the available inventory + data for a Compute Engine virtual machine (VM) instance + at a given point in time. + + You can use this API resource to determine the inventory + data of your VM. + + For more information, see `Information provided by OS + inventory + management `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/instances/*/inventory}", + }, + ] + request, metadata = self._interceptor.pre_get_inventory(request, metadata) + pb_request = inventory.GetInventoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = inventory.Inventory() + pb_resp = inventory.Inventory.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_inventory(resp) + return resp + + class _GetOSPolicyAssignment(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("GetOSPolicyAssignment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignments.GetOSPolicyAssignmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignments.OSPolicyAssignment: + r"""Call the get os policy assignment method over HTTP. + + Args: + request (~.os_policy_assignments.GetOSPolicyAssignmentRequest): + The request object. A request message to get an OS policy + assignment + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.os_policy_assignments.OSPolicyAssignment: + OS policy assignment is an API resource that is used to + apply a set of OS policies to a dynamically targeted + group of Compute Engine VM instances. + + An OS policy is used to define the desired state + configuration for a Compute Engine VM instance through a + set of configuration resources that provide capabilities + such as installing or removing software packages, or + executing a script. + + For more information, see `OS policy and OS policy + assignment `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/osPolicyAssignments/*}", + }, + ] + request, metadata = self._interceptor.pre_get_os_policy_assignment( + request, metadata + ) + pb_request = os_policy_assignments.GetOSPolicyAssignmentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = os_policy_assignments.OSPolicyAssignment() + pb_resp = os_policy_assignments.OSPolicyAssignment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_os_policy_assignment(resp) + return resp + + class _GetOSPolicyAssignmentReport(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("GetOSPolicyAssignmentReport") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignment_reports.OSPolicyAssignmentReport: + r"""Call the get os policy assignment + report method over HTTP. + + Args: + request (~.os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest): + The request object. Get a report of the OS policy + assignment for a VM instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.os_policy_assignment_reports.OSPolicyAssignmentReport: + A report of the OS policy assignment + status for a given instance. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/instances/*/osPolicyAssignments/*/report}", + }, + ] + request, metadata = self._interceptor.pre_get_os_policy_assignment_report( + request, metadata + ) + pb_request = ( + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = os_policy_assignment_reports.OSPolicyAssignmentReport() + pb_resp = os_policy_assignment_reports.OSPolicyAssignmentReport.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_os_policy_assignment_report(resp) + return resp + + class _GetVulnerabilityReport(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("GetVulnerabilityReport") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vulnerability.GetVulnerabilityReportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vulnerability.VulnerabilityReport: + r"""Call the get vulnerability report method over HTTP. + + Args: + request (~.vulnerability.GetVulnerabilityReportRequest): + The request object. A request message for getting the + vulnerability report for the specified + VM. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vulnerability.VulnerabilityReport: + This API resource represents the vulnerability report + for a specified Compute Engine virtual machine (VM) + instance at a given point in time. + + For more information, see `Vulnerability + reports `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/instances/*/vulnerabilityReport}", + }, + ] + request, metadata = self._interceptor.pre_get_vulnerability_report( + request, metadata + ) + pb_request = vulnerability.GetVulnerabilityReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vulnerability.VulnerabilityReport() + pb_resp = vulnerability.VulnerabilityReport.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_vulnerability_report(resp) + return resp + + class _ListInstanceOSPoliciesCompliances(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("ListInstanceOSPoliciesCompliances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse: + r"""Call the list instance os policies + compliances method over HTTP. + + Args: + request (~.instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest): + The request object. A request message for listing OS + policies compliance data for all Compute + Engine VMs in the given location. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse: + A response message for listing OS + policies compliance data for all Compute + Engine VMs in the given location. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*}/instanceOSPoliciesCompliances", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_list_instance_os_policies_compliances( + request, metadata + ) + pb_request = instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse() + ) + pb_resp = instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instance_os_policies_compliances(resp) + return resp + + class _ListInventories(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("ListInventories") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: inventory.ListInventoriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> inventory.ListInventoriesResponse: + r"""Call the list inventories method over HTTP. + + Args: + request (~.inventory.ListInventoriesRequest): + The request object. A request message for listing + inventory data for all VMs in the + specified location. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.inventory.ListInventoriesResponse: + A response message for listing + inventory data for all VMs in a + specified location. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/instances/*}/inventories", + }, + ] + request, metadata = self._interceptor.pre_list_inventories( + request, metadata + ) + pb_request = inventory.ListInventoriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = inventory.ListInventoriesResponse() + pb_resp = inventory.ListInventoriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_inventories(resp) + return resp + + class _ListOSPolicyAssignmentReports(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("ListOSPolicyAssignmentReports") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse: + r"""Call the list os policy assignment + reports method over HTTP. + + Args: + request (~.os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest): + The request object. List the OS policy assignment reports + for VM instances. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse: + A response message for listing OS + Policy assignment reports including the + page of results and page token. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/instances/*/osPolicyAssignments/*}/reports", + }, + ] + request, metadata = self._interceptor.pre_list_os_policy_assignment_reports( + request, metadata + ) + pb_request = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + pb_resp = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.pb( + resp + ) + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_os_policy_assignment_reports(resp) + return resp + + class _ListOSPolicyAssignmentRevisions(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("ListOSPolicyAssignmentRevisions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse: + r"""Call the list os policy assignment + revisions method over HTTP. + + Args: + request (~.os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest): + The request object. A request message to list revisions + for a OS policy assignment + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse: + A response message for listing all + revisions for a OS policy assignment. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=projects/*/locations/*/osPolicyAssignments/*}:listRevisions", + }, + ] + ( + request, + metadata, + ) = self._interceptor.pre_list_os_policy_assignment_revisions( + request, metadata + ) + pb_request = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest.pb(request) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + pb_resp = os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_os_policy_assignment_revisions(resp) + return resp + + class _ListOSPolicyAssignments(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("ListOSPolicyAssignments") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignments.ListOSPolicyAssignmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> os_policy_assignments.ListOSPolicyAssignmentsResponse: + r"""Call the list os policy + assignments method over HTTP. + + Args: + request (~.os_policy_assignments.ListOSPolicyAssignmentsRequest): + The request object. A request message to list OS policy + assignments for a parent resource + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.os_policy_assignments.ListOSPolicyAssignmentsResponse: + A response message for listing all + assignments under given parent. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*}/osPolicyAssignments", + }, + ] + request, metadata = self._interceptor.pre_list_os_policy_assignments( + request, metadata + ) + pb_request = os_policy_assignments.ListOSPolicyAssignmentsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = os_policy_assignments.ListOSPolicyAssignmentsResponse() + pb_resp = os_policy_assignments.ListOSPolicyAssignmentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_os_policy_assignments(resp) + return resp + + class _ListVulnerabilityReports(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("ListVulnerabilityReports") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: vulnerability.ListVulnerabilityReportsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> vulnerability.ListVulnerabilityReportsResponse: + r"""Call the list vulnerability + reports method over HTTP. + + Args: + request (~.vulnerability.ListVulnerabilityReportsRequest): + The request object. A request message for listing + vulnerability reports for all VM + instances in the specified location. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.vulnerability.ListVulnerabilityReportsResponse: + A response message for listing + vulnerability reports for all VM + instances in the specified location. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/instances/*}/vulnerabilityReports", + }, + ] + request, metadata = self._interceptor.pre_list_vulnerability_reports( + request, metadata + ) + pb_request = vulnerability.ListVulnerabilityReportsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vulnerability.ListVulnerabilityReportsResponse() + pb_resp = vulnerability.ListVulnerabilityReportsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_vulnerability_reports(resp) + return resp + + class _UpdateOSPolicyAssignment(OsConfigZonalServiceRestStub): + def __hash__(self): + return hash("UpdateOSPolicyAssignment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: os_policy_assignments.UpdateOSPolicyAssignmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update os policy + assignment method over HTTP. + + Args: + request (~.os_policy_assignments.UpdateOSPolicyAssignmentRequest): + The request object. A request message to update an OS + policy assignment + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{os_policy_assignment.name=projects/*/locations/*/osPolicyAssignments/*}", + "body": "os_policy_assignment", + }, + ] + request, metadata = self._interceptor.pre_update_os_policy_assignment( + request, metadata + ) + pb_request = os_policy_assignments.UpdateOSPolicyAssignmentRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_os_policy_assignment(resp) + return resp + + @property + def create_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.CreateOSPolicyAssignmentRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateOSPolicyAssignment(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.DeleteOSPolicyAssignmentRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteOSPolicyAssignment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance_os_policies_compliance( + self, + ) -> Callable[ + [instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest], + instance_os_policies_compliance.InstanceOSPoliciesCompliance, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstanceOSPoliciesCompliance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_inventory( + self, + ) -> Callable[[inventory.GetInventoryRequest], inventory.Inventory]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInventory(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.GetOSPolicyAssignmentRequest], + os_policy_assignments.OSPolicyAssignment, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetOSPolicyAssignment(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_os_policy_assignment_report( + self, + ) -> Callable[ + [os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest], + os_policy_assignment_reports.OSPolicyAssignmentReport, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetOSPolicyAssignmentReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_vulnerability_report( + self, + ) -> Callable[ + [vulnerability.GetVulnerabilityReportRequest], vulnerability.VulnerabilityReport + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetVulnerabilityReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instance_os_policies_compliances( + self, + ) -> Callable[ + [instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest], + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstanceOSPoliciesCompliances(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_inventories( + self, + ) -> Callable[ + [inventory.ListInventoriesRequest], inventory.ListInventoriesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInventories(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_os_policy_assignment_reports( + self, + ) -> Callable[ + [os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest], + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListOSPolicyAssignmentReports(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_os_policy_assignment_revisions( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest], + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListOSPolicyAssignmentRevisions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_os_policy_assignments( + self, + ) -> Callable[ + [os_policy_assignments.ListOSPolicyAssignmentsRequest], + os_policy_assignments.ListOSPolicyAssignmentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListOSPolicyAssignments(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_vulnerability_reports( + self, + ) -> Callable[ + [vulnerability.ListVulnerabilityReportsRequest], + vulnerability.ListVulnerabilityReportsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListVulnerabilityReports(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_os_policy_assignment( + self, + ) -> Callable[ + [os_policy_assignments.UpdateOSPolicyAssignmentRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateOSPolicyAssignment(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("OsConfigZonalServiceRestTransport",) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/__init__.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/__init__.py new file mode 100644 index 000000000000..0bdde607a2e2 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/__init__.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .config_common import ( + OSPolicyComplianceState, + OSPolicyResourceCompliance, + OSPolicyResourceConfigStep, +) +from .instance_os_policies_compliance import ( + GetInstanceOSPoliciesComplianceRequest, + InstanceOSPoliciesCompliance, + ListInstanceOSPoliciesCompliancesRequest, + ListInstanceOSPoliciesCompliancesResponse, +) +from .inventory import ( + GetInventoryRequest, + Inventory, + InventoryView, + ListInventoriesRequest, + ListInventoriesResponse, +) +from .os_policy import OSPolicy +from .os_policy_assignment_reports import ( + GetOSPolicyAssignmentReportRequest, + ListOSPolicyAssignmentReportsRequest, + ListOSPolicyAssignmentReportsResponse, + OSPolicyAssignmentReport, +) +from .os_policy_assignments import ( + CreateOSPolicyAssignmentRequest, + DeleteOSPolicyAssignmentRequest, + GetOSPolicyAssignmentRequest, + ListOSPolicyAssignmentRevisionsRequest, + ListOSPolicyAssignmentRevisionsResponse, + ListOSPolicyAssignmentsRequest, + ListOSPolicyAssignmentsResponse, + OSPolicyAssignment, + OSPolicyAssignmentOperationMetadata, + UpdateOSPolicyAssignmentRequest, +) +from .osconfig_common import FixedOrPercent +from .vulnerability import ( + CVSSv3, + GetVulnerabilityReportRequest, + ListVulnerabilityReportsRequest, + ListVulnerabilityReportsResponse, + VulnerabilityReport, +) + +__all__ = ( + "OSPolicyResourceCompliance", + "OSPolicyResourceConfigStep", + "OSPolicyComplianceState", + "GetInstanceOSPoliciesComplianceRequest", + "InstanceOSPoliciesCompliance", + "ListInstanceOSPoliciesCompliancesRequest", + "ListInstanceOSPoliciesCompliancesResponse", + "GetInventoryRequest", + "Inventory", + "ListInventoriesRequest", + "ListInventoriesResponse", + "InventoryView", + "OSPolicy", + "GetOSPolicyAssignmentReportRequest", + "ListOSPolicyAssignmentReportsRequest", + "ListOSPolicyAssignmentReportsResponse", + "OSPolicyAssignmentReport", + "CreateOSPolicyAssignmentRequest", + "DeleteOSPolicyAssignmentRequest", + "GetOSPolicyAssignmentRequest", + "ListOSPolicyAssignmentRevisionsRequest", + "ListOSPolicyAssignmentRevisionsResponse", + "ListOSPolicyAssignmentsRequest", + "ListOSPolicyAssignmentsResponse", + "OSPolicyAssignment", + "OSPolicyAssignmentOperationMetadata", + "UpdateOSPolicyAssignmentRequest", + "FixedOrPercent", + "CVSSv3", + "GetVulnerabilityReportRequest", + "ListVulnerabilityReportsRequest", + "ListVulnerabilityReportsResponse", + "VulnerabilityReport", +) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/config_common.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/config_common.py new file mode 100644 index 000000000000..7fa9905ae22a --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/config_common.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1alpha", + manifest={ + "OSPolicyComplianceState", + "OSPolicyResourceConfigStep", + "OSPolicyResourceCompliance", + }, +) + + +class OSPolicyComplianceState(proto.Enum): + r"""Supported OSPolicy compliance states. + + Values: + OS_POLICY_COMPLIANCE_STATE_UNSPECIFIED (0): + Default value. This value is unused. + COMPLIANT (1): + Compliant state. + NON_COMPLIANT (2): + Non-compliant state + UNKNOWN (3): + Unknown compliance state. + NO_OS_POLICIES_APPLICABLE (4): + No applicable OS policies were found for the + instance. This state is only applicable to the + instance. + """ + _pb_options = {"deprecated": True} + OS_POLICY_COMPLIANCE_STATE_UNSPECIFIED = 0 + COMPLIANT = 1 + NON_COMPLIANT = 2 + UNKNOWN = 3 + NO_OS_POLICIES_APPLICABLE = 4 + + +class OSPolicyResourceConfigStep(proto.Message): + r"""Step performed by the OS Config agent for configuring an + ``OSPolicyResource`` to its desired state. + + Attributes: + type_ (google.cloud.osconfig_v1alpha.types.OSPolicyResourceConfigStep.Type): + Configuration step type. + outcome (google.cloud.osconfig_v1alpha.types.OSPolicyResourceConfigStep.Outcome): + Outcome of the configuration step. + error_message (str): + An error message recorded during the + execution of this step. Only populated when + outcome is FAILED. + """ + + class Type(proto.Enum): + r"""Supported configuration step types + + Values: + TYPE_UNSPECIFIED (0): + Default value. This value is unused. + VALIDATION (1): + Validation to detect resource conflicts, + schema errors, etc. + DESIRED_STATE_CHECK (2): + Check the current desired state status of the + resource. + DESIRED_STATE_ENFORCEMENT (3): + Enforce the desired state for a resource that + is not in desired state. + DESIRED_STATE_CHECK_POST_ENFORCEMENT (4): + Re-check desired state status for a resource + after enforcement of all resources in the + current configuration run. + + This step is used to determine the final desired + state status for the resource. It accounts for + any resources that might have drifted from their + desired state due to side effects from + configuring other resources during the current + configuration run. + """ + _pb_options = {"deprecated": True} + TYPE_UNSPECIFIED = 0 + VALIDATION = 1 + DESIRED_STATE_CHECK = 2 + DESIRED_STATE_ENFORCEMENT = 3 + DESIRED_STATE_CHECK_POST_ENFORCEMENT = 4 + + class Outcome(proto.Enum): + r"""Supported outcomes for a configuration step. + + Values: + OUTCOME_UNSPECIFIED (0): + Default value. This value is unused. + SUCCEEDED (1): + The step succeeded. + FAILED (2): + The step failed. + """ + _pb_options = {"deprecated": True} + OUTCOME_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + + type_: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + outcome: Outcome = proto.Field( + proto.ENUM, + number=2, + enum=Outcome, + ) + error_message: str = proto.Field( + proto.STRING, + number=3, + ) + + +class OSPolicyResourceCompliance(proto.Message): + r"""Compliance data for an OS policy resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + os_policy_resource_id (str): + The id of the OS policy resource. + config_steps (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicyResourceConfigStep]): + Ordered list of configuration steps taken by + the agent for the OS policy resource. + state (google.cloud.osconfig_v1alpha.types.OSPolicyComplianceState): + Compliance state of the OS policy resource. + exec_resource_output (google.cloud.osconfig_v1alpha.types.OSPolicyResourceCompliance.ExecResourceOutput): + ExecResource specific output. + + This field is a member of `oneof`_ ``output``. + """ + + class ExecResourceOutput(proto.Message): + r"""ExecResource specific output. + + Attributes: + enforcement_output (bytes): + Output from Enforcement phase output file (if + run). Output size is limited to 100K bytes. + """ + + enforcement_output: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + os_policy_resource_id: str = proto.Field( + proto.STRING, + number=1, + ) + config_steps: MutableSequence["OSPolicyResourceConfigStep"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="OSPolicyResourceConfigStep", + ) + state: "OSPolicyComplianceState" = proto.Field( + proto.ENUM, + number=3, + enum="OSPolicyComplianceState", + ) + exec_resource_output: ExecResourceOutput = proto.Field( + proto.MESSAGE, + number=4, + oneof="output", + message=ExecResourceOutput, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/instance_os_policies_compliance.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/instance_os_policies_compliance.py new file mode 100644 index 000000000000..fa99a2c99d45 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/instance_os_policies_compliance.py @@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.osconfig_v1alpha.types import config_common + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1alpha", + manifest={ + "InstanceOSPoliciesCompliance", + "GetInstanceOSPoliciesComplianceRequest", + "ListInstanceOSPoliciesCompliancesRequest", + "ListInstanceOSPoliciesCompliancesResponse", + }, +) + + +class InstanceOSPoliciesCompliance(proto.Message): + r"""This API resource represents the OS policies compliance data for a + Compute Engine virtual machine (VM) instance at a given point in + time. + + A Compute Engine VM can have multiple OS policy assignments, and + each assignment can have multiple OS policies. As a result, multiple + OS policies could be applied to a single VM. + + You can use this API resource to determine both the compliance state + of your VM as well as the compliance state of an individual OS + policy. + + For more information, see `View + compliance `__. + + Attributes: + name (str): + Output only. The ``InstanceOSPoliciesCompliance`` API + resource name. + + Format: + ``projects/{project_number}/locations/{location}/instanceOSPoliciesCompliances/{instance_id}`` + instance (str): + Output only. The Compute Engine VM instance + name. + state (google.cloud.osconfig_v1alpha.types.OSPolicyComplianceState): + Output only. Compliance state of the VM. + detailed_state (str): + Output only. Detailed compliance state of the VM. This field + is populated only when compliance state is ``UNKNOWN``. + + It may contain one of the following values: + + - ``no-compliance-data``: Compliance data is not available + for this VM. + - ``no-agent-detected``: OS Config agent is not detected + for this VM. + - ``config-not-supported-by-agent``: The version of the OS + Config agent running on this VM does not support + configuration management. + - ``inactive``: VM is not running. + - ``internal-service-errors``: There were internal service + errors encountered while enforcing compliance. + - ``agent-errors``: OS config agent encountered errors + while enforcing compliance. + detailed_state_reason (str): + Output only. The reason for the ``detailed_state`` of the VM + (if any). + os_policy_compliances (MutableSequence[google.cloud.osconfig_v1alpha.types.InstanceOSPoliciesCompliance.OSPolicyCompliance]): + Output only. Compliance data for each ``OSPolicy`` that is + applied to the VM. + last_compliance_check_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp of the last compliance + check for the VM. + last_compliance_run_id (str): + Output only. Unique identifier for the last + compliance run. This id will be logged by the OS + config agent during a compliance run and can be + used for debugging and tracing purpose. + """ + + class OSPolicyCompliance(proto.Message): + r"""Compliance data for an OS policy + + Attributes: + os_policy_id (str): + The OS policy id + os_policy_assignment (str): + Reference to the ``OSPolicyAssignment`` API resource that + the ``OSPolicy`` belongs to. + + Format: + ``projects/{project_number}/locations/{location}/osPolicyAssignments/{os_policy_assignment_id@revision_id}`` + state (google.cloud.osconfig_v1alpha.types.OSPolicyComplianceState): + Compliance state of the OS policy. + os_policy_resource_compliances (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicyResourceCompliance]): + Compliance data for each ``OSPolicyResource`` that is + applied to the VM. + """ + + os_policy_id: str = proto.Field( + proto.STRING, + number=1, + ) + os_policy_assignment: str = proto.Field( + proto.STRING, + number=2, + ) + state: config_common.OSPolicyComplianceState = proto.Field( + proto.ENUM, + number=4, + enum=config_common.OSPolicyComplianceState, + ) + os_policy_resource_compliances: MutableSequence[ + config_common.OSPolicyResourceCompliance + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=config_common.OSPolicyResourceCompliance, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + instance: str = proto.Field( + proto.STRING, + number=2, + ) + state: config_common.OSPolicyComplianceState = proto.Field( + proto.ENUM, + number=3, + enum=config_common.OSPolicyComplianceState, + ) + detailed_state: str = proto.Field( + proto.STRING, + number=4, + ) + detailed_state_reason: str = proto.Field( + proto.STRING, + number=5, + ) + os_policy_compliances: MutableSequence[OSPolicyCompliance] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=OSPolicyCompliance, + ) + last_compliance_check_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + last_compliance_run_id: str = proto.Field( + proto.STRING, + number=8, + ) + + +class GetInstanceOSPoliciesComplianceRequest(proto.Message): + r"""A request message for getting OS policies compliance data for + the given Compute Engine VM instance. + + Attributes: + name (str): + Required. API resource name for instance OS policies + compliance resource. + + Format: + ``projects/{project}/locations/{location}/instanceOSPoliciesCompliances/{instance}`` + + For ``{project}``, either Compute Engine project-number or + project-id can be provided. For ``{instance}``, either + Compute Engine VM instance-id or instance-name can be + provided. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListInstanceOSPoliciesCompliancesRequest(proto.Message): + r"""A request message for listing OS policies compliance data for + all Compute Engine VMs in the given location. + + Attributes: + parent (str): + Required. The parent resource name. + + Format: ``projects/{project}/locations/{location}`` + + For ``{project}``, either Compute Engine project-number or + project-id can be provided. + page_size (int): + The maximum number of results to return. + page_token (str): + A pagination token returned from a previous call to + ``ListInstanceOSPoliciesCompliances`` that indicates where + this listing should continue from. + filter (str): + If provided, this field specifies the criteria that must be + met by a ``InstanceOSPoliciesCompliance`` API resource to be + included in the response. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListInstanceOSPoliciesCompliancesResponse(proto.Message): + r"""A response message for listing OS policies compliance data + for all Compute Engine VMs in the given location. + + Attributes: + instance_os_policies_compliances (MutableSequence[google.cloud.osconfig_v1alpha.types.InstanceOSPoliciesCompliance]): + List of instance OS policies compliance + objects. + next_page_token (str): + The pagination token to retrieve the next + page of instance OS policies compliance objects. + """ + + @property + def raw_page(self): + return self + + instance_os_policies_compliances: MutableSequence[ + "InstanceOSPoliciesCompliance" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="InstanceOSPoliciesCompliance", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/inventory.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/inventory.py new file mode 100644 index 000000000000..8bff670e8e7a --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/inventory.py @@ -0,0 +1,743 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1alpha", + manifest={ + "InventoryView", + "Inventory", + "GetInventoryRequest", + "ListInventoriesRequest", + "ListInventoriesResponse", + }, +) + + +class InventoryView(proto.Enum): + r"""The view for inventory objects. + + Values: + INVENTORY_VIEW_UNSPECIFIED (0): + The default value. + The API defaults to the BASIC view. + BASIC (1): + Returns the basic inventory information that includes + ``os_info``. + FULL (2): + Returns all fields. + """ + INVENTORY_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + +class Inventory(proto.Message): + r"""This API resource represents the available inventory data for a + Compute Engine virtual machine (VM) instance at a given point in + time. + + You can use this API resource to determine the inventory data of + your VM. + + For more information, see `Information provided by OS inventory + management `__. + + Attributes: + name (str): + Output only. The ``Inventory`` API resource name. + + Format: + ``projects/{project_number}/locations/{location}/instances/{instance_id}/inventory`` + os_info (google.cloud.osconfig_v1alpha.types.Inventory.OsInfo): + Output only. Base level operating system + information for the VM. + items (MutableMapping[str, google.cloud.osconfig_v1alpha.types.Inventory.Item]): + Output only. Inventory items related to the + VM keyed by an opaque unique identifier for each + inventory item. The identifier is unique to each + distinct and addressable inventory item and will + change, when there is a new package version. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp of the last reported + inventory for the VM. + """ + + class OsInfo(proto.Message): + r"""Operating system information for the VM. + + Attributes: + hostname (str): + The VM hostname. + long_name (str): + The operating system long name. + For example 'Debian GNU/Linux 9' or 'Microsoft + Window Server 2019 Datacenter'. + short_name (str): + The operating system short name. + For example, 'windows' or 'debian'. + version (str): + The version of the operating system. + architecture (str): + The system architecture of the operating + system. + kernel_version (str): + The kernel version of the operating system. + kernel_release (str): + The kernel release of the operating system. + osconfig_agent_version (str): + The current version of the OS Config agent + running on the VM. + """ + + hostname: str = proto.Field( + proto.STRING, + number=9, + ) + long_name: str = proto.Field( + proto.STRING, + number=2, + ) + short_name: str = proto.Field( + proto.STRING, + number=3, + ) + version: str = proto.Field( + proto.STRING, + number=4, + ) + architecture: str = proto.Field( + proto.STRING, + number=5, + ) + kernel_version: str = proto.Field( + proto.STRING, + number=6, + ) + kernel_release: str = proto.Field( + proto.STRING, + number=7, + ) + osconfig_agent_version: str = proto.Field( + proto.STRING, + number=8, + ) + + class Item(proto.Message): + r"""A single piece of inventory on a VM. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + Identifier for this item, unique across items + for this VM. + origin_type (google.cloud.osconfig_v1alpha.types.Inventory.Item.OriginType): + The origin of this inventory item. + create_time (google.protobuf.timestamp_pb2.Timestamp): + When this inventory item was first detected. + update_time (google.protobuf.timestamp_pb2.Timestamp): + When this inventory item was last modified. + type_ (google.cloud.osconfig_v1alpha.types.Inventory.Item.Type): + The specific type of inventory, correlating + to its specific details. + installed_package (google.cloud.osconfig_v1alpha.types.Inventory.SoftwarePackage): + Software package present on the VM instance. + + This field is a member of `oneof`_ ``details``. + available_package (google.cloud.osconfig_v1alpha.types.Inventory.SoftwarePackage): + Software package available to be installed on + the VM instance. + + This field is a member of `oneof`_ ``details``. + """ + + class OriginType(proto.Enum): + r"""The origin of a specific inventory item. + + Values: + ORIGIN_TYPE_UNSPECIFIED (0): + Invalid. An origin type must be specified. + INVENTORY_REPORT (1): + This inventory item was discovered as the + result of the agent reporting inventory via the + reporting API. + """ + ORIGIN_TYPE_UNSPECIFIED = 0 + INVENTORY_REPORT = 1 + + class Type(proto.Enum): + r"""The different types of inventory that are tracked on a VM. + + Values: + TYPE_UNSPECIFIED (0): + Invalid. An type must be specified. + INSTALLED_PACKAGE (1): + This represents a package that is installed + on the VM. + AVAILABLE_PACKAGE (2): + This represents an update that is available + for a package. + """ + TYPE_UNSPECIFIED = 0 + INSTALLED_PACKAGE = 1 + AVAILABLE_PACKAGE = 2 + + id: str = proto.Field( + proto.STRING, + number=1, + ) + origin_type: "Inventory.Item.OriginType" = proto.Field( + proto.ENUM, + number=2, + enum="Inventory.Item.OriginType", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + type_: "Inventory.Item.Type" = proto.Field( + proto.ENUM, + number=5, + enum="Inventory.Item.Type", + ) + installed_package: "Inventory.SoftwarePackage" = proto.Field( + proto.MESSAGE, + number=6, + oneof="details", + message="Inventory.SoftwarePackage", + ) + available_package: "Inventory.SoftwarePackage" = proto.Field( + proto.MESSAGE, + number=7, + oneof="details", + message="Inventory.SoftwarePackage", + ) + + class SoftwarePackage(proto.Message): + r"""Software package information of the operating system. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + yum_package (google.cloud.osconfig_v1alpha.types.Inventory.VersionedPackage): + Yum package info. For details about the yum package manager, + see + https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/6/html/deployment_guide/ch-yum. + + This field is a member of `oneof`_ ``details``. + apt_package (google.cloud.osconfig_v1alpha.types.Inventory.VersionedPackage): + Details of an APT package. + For details about the apt package manager, see + https://wiki.debian.org/Apt. + + This field is a member of `oneof`_ ``details``. + zypper_package (google.cloud.osconfig_v1alpha.types.Inventory.VersionedPackage): + Details of a Zypper package. For details about the Zypper + package manager, see + https://en.opensuse.org/SDB:Zypper_manual. + + This field is a member of `oneof`_ ``details``. + googet_package (google.cloud.osconfig_v1alpha.types.Inventory.VersionedPackage): + Details of a Googet package. + For details about the googet package manager, + see https://github.com/google/googet. + + This field is a member of `oneof`_ ``details``. + zypper_patch (google.cloud.osconfig_v1alpha.types.Inventory.ZypperPatch): + Details of a Zypper patch. For details about the Zypper + package manager, see + https://en.opensuse.org/SDB:Zypper_manual. + + This field is a member of `oneof`_ ``details``. + wua_package (google.cloud.osconfig_v1alpha.types.Inventory.WindowsUpdatePackage): + Details of a Windows Update package. See + https://docs.microsoft.com/en-us/windows/win32/api/_wua/ for + information about Windows Update. + + This field is a member of `oneof`_ ``details``. + qfe_package (google.cloud.osconfig_v1alpha.types.Inventory.WindowsQuickFixEngineeringPackage): + Details of a Windows Quick Fix engineering + package. See + https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/win32-quickfixengineering + for info in Windows Quick Fix Engineering. + + This field is a member of `oneof`_ ``details``. + cos_package (google.cloud.osconfig_v1alpha.types.Inventory.VersionedPackage): + Details of a COS package. + + This field is a member of `oneof`_ ``details``. + windows_application (google.cloud.osconfig_v1alpha.types.Inventory.WindowsApplication): + Details of Windows Application. + + This field is a member of `oneof`_ ``details``. + """ + + yum_package: "Inventory.VersionedPackage" = proto.Field( + proto.MESSAGE, + number=1, + oneof="details", + message="Inventory.VersionedPackage", + ) + apt_package: "Inventory.VersionedPackage" = proto.Field( + proto.MESSAGE, + number=2, + oneof="details", + message="Inventory.VersionedPackage", + ) + zypper_package: "Inventory.VersionedPackage" = proto.Field( + proto.MESSAGE, + number=3, + oneof="details", + message="Inventory.VersionedPackage", + ) + googet_package: "Inventory.VersionedPackage" = proto.Field( + proto.MESSAGE, + number=4, + oneof="details", + message="Inventory.VersionedPackage", + ) + zypper_patch: "Inventory.ZypperPatch" = proto.Field( + proto.MESSAGE, + number=5, + oneof="details", + message="Inventory.ZypperPatch", + ) + wua_package: "Inventory.WindowsUpdatePackage" = proto.Field( + proto.MESSAGE, + number=6, + oneof="details", + message="Inventory.WindowsUpdatePackage", + ) + qfe_package: "Inventory.WindowsQuickFixEngineeringPackage" = proto.Field( + proto.MESSAGE, + number=7, + oneof="details", + message="Inventory.WindowsQuickFixEngineeringPackage", + ) + cos_package: "Inventory.VersionedPackage" = proto.Field( + proto.MESSAGE, + number=8, + oneof="details", + message="Inventory.VersionedPackage", + ) + windows_application: "Inventory.WindowsApplication" = proto.Field( + proto.MESSAGE, + number=9, + oneof="details", + message="Inventory.WindowsApplication", + ) + + class VersionedPackage(proto.Message): + r"""Information related to the a standard versioned package. + This includes package info for APT, Yum, Zypper, and Googet + package managers. + + Attributes: + package_name (str): + The name of the package. + architecture (str): + The system architecture this package is + intended for. + version (str): + The version of the package. + """ + + package_name: str = proto.Field( + proto.STRING, + number=4, + ) + architecture: str = proto.Field( + proto.STRING, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=3, + ) + + class ZypperPatch(proto.Message): + r"""Details related to a Zypper Patch. + + Attributes: + patch_name (str): + The name of the patch. + category (str): + The category of the patch. + severity (str): + The severity specified for this patch + summary (str): + Any summary information provided about this + patch. + """ + + patch_name: str = proto.Field( + proto.STRING, + number=5, + ) + category: str = proto.Field( + proto.STRING, + number=2, + ) + severity: str = proto.Field( + proto.STRING, + number=3, + ) + summary: str = proto.Field( + proto.STRING, + number=4, + ) + + class WindowsUpdatePackage(proto.Message): + r"""Details related to a Windows Update package. Field data and names + are taken from Windows Update API IUpdate Interface: + https://docs.microsoft.com/en-us/windows/win32/api/_wua/ Descriptive + fields like title, and description are localized based on the locale + of the VM being updated. + + Attributes: + title (str): + The localized title of the update package. + description (str): + The localized description of the update + package. + categories (MutableSequence[google.cloud.osconfig_v1alpha.types.Inventory.WindowsUpdatePackage.WindowsUpdateCategory]): + The categories that are associated with this + update package. + kb_article_ids (MutableSequence[str]): + A collection of Microsoft Knowledge Base + article IDs that are associated with the update + package. + support_url (str): + A hyperlink to the language-specific support + information for the update. + more_info_urls (MutableSequence[str]): + A collection of URLs that provide more + information about the update package. + update_id (str): + Gets the identifier of an update package. + Stays the same across revisions. + revision_number (int): + The revision number of this update package. + last_deployment_change_time (google.protobuf.timestamp_pb2.Timestamp): + The last published date of the update, in + (UTC) date and time. + """ + + class WindowsUpdateCategory(proto.Message): + r"""Categories specified by the Windows Update. + + Attributes: + id (str): + The identifier of the windows update + category. + name (str): + The name of the windows update category. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + + title: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + categories: MutableSequence[ + "Inventory.WindowsUpdatePackage.WindowsUpdateCategory" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Inventory.WindowsUpdatePackage.WindowsUpdateCategory", + ) + kb_article_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + support_url: str = proto.Field( + proto.STRING, + number=11, + ) + more_info_urls: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + update_id: str = proto.Field( + proto.STRING, + number=6, + ) + revision_number: int = proto.Field( + proto.INT32, + number=7, + ) + last_deployment_change_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + + class WindowsQuickFixEngineeringPackage(proto.Message): + r"""Information related to a Quick Fix Engineering package. + Fields are taken from Windows QuickFixEngineering Interface and + match the source names: + + https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/win32-quickfixengineering + + Attributes: + caption (str): + A short textual description of the QFE + update. + description (str): + A textual description of the QFE update. + hot_fix_id (str): + Unique identifier associated with a + particular QFE update. + install_time (google.protobuf.timestamp_pb2.Timestamp): + Date that the QFE update was installed. Mapped from + installed_on field. + """ + + caption: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + hot_fix_id: str = proto.Field( + proto.STRING, + number=3, + ) + install_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + class WindowsApplication(proto.Message): + r"""Contains information about a Windows application that is + retrieved from the Windows Registry. For more information about + these fields, see: + + https://docs.microsoft.com/en-us/windows/win32/msi/uninstall-registry-key + + Attributes: + display_name (str): + The name of the application or product. + display_version (str): + The version of the product or application in + string format. + publisher (str): + The name of the manufacturer for the product + or application. + install_date (google.type.date_pb2.Date): + The last time this product received service. + The value of this property is replaced each time + a patch is applied or removed from the product + or the command-line option is used to repair the + product. + help_link (str): + The internet address for technical support. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + display_version: str = proto.Field( + proto.STRING, + number=2, + ) + publisher: str = proto.Field( + proto.STRING, + number=3, + ) + install_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=4, + message=date_pb2.Date, + ) + help_link: str = proto.Field( + proto.STRING, + number=5, + ) + + name: str = proto.Field( + proto.STRING, + number=3, + ) + os_info: OsInfo = proto.Field( + proto.MESSAGE, + number=1, + message=OsInfo, + ) + items: MutableMapping[str, Item] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message=Item, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class GetInventoryRequest(proto.Message): + r"""A request message for getting inventory data for the + specified VM. + + Attributes: + name (str): + Required. API resource name for inventory resource. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/inventory`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, either + Compute Engine ``instance-id`` or ``instance-name`` can be + provided. + view (google.cloud.osconfig_v1alpha.types.InventoryView): + Inventory view indicating what information + should be included in the inventory resource. If + unspecified, the default view is BASIC. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: "InventoryView" = proto.Field( + proto.ENUM, + number=2, + enum="InventoryView", + ) + + +class ListInventoriesRequest(proto.Message): + r"""A request message for listing inventory data for all VMs in + the specified location. + + Attributes: + parent (str): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/-`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. + view (google.cloud.osconfig_v1alpha.types.InventoryView): + Inventory view indicating what information + should be included in the inventory resource. If + unspecified, the default view is BASIC. + page_size (int): + The maximum number of results to return. + page_token (str): + A pagination token returned from a previous call to + ``ListInventories`` that indicates where this listing should + continue from. + filter (str): + If provided, this field specifies the criteria that must be + met by a ``Inventory`` API resource to be included in the + response. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + view: "InventoryView" = proto.Field( + proto.ENUM, + number=2, + enum="InventoryView", + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInventoriesResponse(proto.Message): + r"""A response message for listing inventory data for all VMs in + a specified location. + + Attributes: + inventories (MutableSequence[google.cloud.osconfig_v1alpha.types.Inventory]): + List of inventory objects. + next_page_token (str): + The pagination token to retrieve the next + page of inventory objects. + """ + + @property + def raw_page(self): + return self + + inventories: MutableSequence["Inventory"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Inventory", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/os_policy.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/os_policy.py new file mode 100644 index 000000000000..71fb4294dc59 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/os_policy.py @@ -0,0 +1,1088 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1alpha", + manifest={ + "OSPolicy", + }, +) + + +class OSPolicy(proto.Message): + r"""An OS policy defines the desired state configuration for a + VM. + + Attributes: + id (str): + Required. The id of the OS policy with the following + restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the assignment. + description (str): + Policy description. + Length of the description is limited to 1024 + characters. + mode (google.cloud.osconfig_v1alpha.types.OSPolicy.Mode): + Required. Policy mode + resource_groups (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicy.ResourceGroup]): + Required. List of resource groups for the policy. For a + particular VM, resource groups are evaluated in the order + specified and the first resource group that is applicable is + selected and the rest are ignored. + + If none of the resource groups are applicable for a VM, the + VM is considered to be non-compliant w.r.t this policy. This + behavior can be toggled by the flag + ``allow_no_resource_group_match`` + allow_no_resource_group_match (bool): + This flag determines the OS policy compliance status when + none of the resource groups within the policy are applicable + for a VM. Set this value to ``true`` if the policy needs to + be reported as compliant even if the policy has nothing to + validate or enforce. + """ + + class Mode(proto.Enum): + r"""Policy mode + + Values: + MODE_UNSPECIFIED (0): + Invalid mode + VALIDATION (1): + This mode checks if the configuration + resources in the policy are in their desired + state. No actions are performed if they are not + in the desired state. This mode is used for + reporting purposes. + ENFORCEMENT (2): + This mode checks if the configuration + resources in the policy are in their desired + state, and if not, enforces the desired state. + """ + MODE_UNSPECIFIED = 0 + VALIDATION = 1 + ENFORCEMENT = 2 + + class OSFilter(proto.Message): + r"""Filtering criteria to select VMs based on OS details. + + Attributes: + os_short_name (str): + This should match OS short name emitted by + the OS inventory agent. An empty value matches + any OS. + os_version (str): + This value should match the version emitted by the OS + inventory agent. Prefix matches are supported if asterisk(*) + is provided as the last character. For example, to match all + versions with a major version of ``7``, specify the + following value for this field ``7.*`` + """ + + os_short_name: str = proto.Field( + proto.STRING, + number=1, + ) + os_version: str = proto.Field( + proto.STRING, + number=2, + ) + + class InventoryFilter(proto.Message): + r"""Filtering criteria to select VMs based on inventory details. + + Attributes: + os_short_name (str): + Required. The OS short name + os_version (str): + The OS version + + Prefix matches are supported if asterisk(*) is provided as + the last character. For example, to match all versions with + a major version of ``7``, specify the following value for + this field ``7.*`` + + An empty string matches all OS versions. + """ + + os_short_name: str = proto.Field( + proto.STRING, + number=1, + ) + os_version: str = proto.Field( + proto.STRING, + number=2, + ) + + class Resource(proto.Message): + r"""An OS policy resource is used to define the desired state + configuration and provides a specific functionality like + installing/removing packages, executing a script etc. + + The system ensures that resources are always in their desired + state by taking necessary actions if they have drifted from + their desired state. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + Required. The id of the resource with the following + restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the OS policy. + pkg (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.PackageResource): + Package resource + + This field is a member of `oneof`_ ``resource_type``. + repository (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.RepositoryResource): + Package repository resource + + This field is a member of `oneof`_ ``resource_type``. + exec_ (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.ExecResource): + Exec resource + + This field is a member of `oneof`_ ``resource_type``. + file (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.FileResource): + File resource + + This field is a member of `oneof`_ ``resource_type``. + """ + + class File(proto.Message): + r"""A remote or local file. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + remote (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.File.Remote): + A generic remote file. + + This field is a member of `oneof`_ ``type``. + gcs (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.File.Gcs): + A Cloud Storage object. + + This field is a member of `oneof`_ ``type``. + local_path (str): + A local path within the VM to use. + + This field is a member of `oneof`_ ``type``. + allow_insecure (bool): + Defaults to false. When false, files are + subject to validations based on the file type: + + Remote: A checksum must be specified. + Cloud Storage: An object generation number must + be specified. + """ + + class Remote(proto.Message): + r"""Specifies a file available via some URI. + + Attributes: + uri (str): + Required. URI from which to fetch the object. It should + contain both the protocol and path following the format + ``{protocol}://{location}``. + sha256_checksum (str): + SHA256 checksum of the remote file. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + sha256_checksum: str = proto.Field( + proto.STRING, + number=2, + ) + + class Gcs(proto.Message): + r"""Specifies a file available as a Cloud Storage Object. + + Attributes: + bucket (str): + Required. Bucket of the Cloud Storage object. + object_ (str): + Required. Name of the Cloud Storage object. + generation (int): + Generation number of the Cloud Storage + object. + """ + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + object_: str = proto.Field( + proto.STRING, + number=2, + ) + generation: int = proto.Field( + proto.INT64, + number=3, + ) + + remote: "OSPolicy.Resource.File.Remote" = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message="OSPolicy.Resource.File.Remote", + ) + gcs: "OSPolicy.Resource.File.Gcs" = proto.Field( + proto.MESSAGE, + number=2, + oneof="type", + message="OSPolicy.Resource.File.Gcs", + ) + local_path: str = proto.Field( + proto.STRING, + number=3, + oneof="type", + ) + allow_insecure: bool = proto.Field( + proto.BOOL, + number=4, + ) + + class PackageResource(proto.Message): + r"""A resource that manages a system package. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + desired_state (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.PackageResource.DesiredState): + Required. The desired state the agent should + maintain for this package. + apt (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.PackageResource.APT): + A package managed by Apt. + + This field is a member of `oneof`_ ``system_package``. + deb (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.PackageResource.Deb): + A deb package file. + + This field is a member of `oneof`_ ``system_package``. + yum (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.PackageResource.YUM): + A package managed by YUM. + + This field is a member of `oneof`_ ``system_package``. + zypper (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.PackageResource.Zypper): + A package managed by Zypper. + + This field is a member of `oneof`_ ``system_package``. + rpm (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.PackageResource.RPM): + An rpm package file. + + This field is a member of `oneof`_ ``system_package``. + googet (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.PackageResource.GooGet): + A package managed by GooGet. + + This field is a member of `oneof`_ ``system_package``. + msi (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.PackageResource.MSI): + An MSI package. + + This field is a member of `oneof`_ ``system_package``. + """ + + class DesiredState(proto.Enum): + r"""The desired state that the OS Config agent maintains on the + VM. + + Values: + DESIRED_STATE_UNSPECIFIED (0): + Unspecified is invalid. + INSTALLED (1): + Ensure that the package is installed. + REMOVED (2): + The agent ensures that the package is not + installed and uninstalls it if detected. + """ + DESIRED_STATE_UNSPECIFIED = 0 + INSTALLED = 1 + REMOVED = 2 + + class Deb(proto.Message): + r"""A deb package file. dpkg packages only support INSTALLED + state. + + Attributes: + source (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.File): + Required. A deb package. + pull_deps (bool): + Whether dependencies should also be installed. + + - install when false: ``dpkg -i package`` + - install when true: + ``apt-get update && apt-get -y install package.deb`` + """ + + source: "OSPolicy.Resource.File" = proto.Field( + proto.MESSAGE, + number=1, + message="OSPolicy.Resource.File", + ) + pull_deps: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class APT(proto.Message): + r"""A package managed by APT. + + - install: ``apt-get update && apt-get -y install [name]`` + - remove: ``apt-get -y remove [name]`` + + Attributes: + name (str): + Required. Package name. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class RPM(proto.Message): + r"""An RPM package file. RPM packages only support INSTALLED + state. + + Attributes: + source (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.File): + Required. An rpm package. + pull_deps (bool): + Whether dependencies should also be installed. + + - install when false: + ``rpm --upgrade --replacepkgs package.rpm`` + - install when true: ``yum -y install package.rpm`` or + ``zypper -y install package.rpm`` + """ + + source: "OSPolicy.Resource.File" = proto.Field( + proto.MESSAGE, + number=1, + message="OSPolicy.Resource.File", + ) + pull_deps: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class YUM(proto.Message): + r"""A package managed by YUM. + + - install: ``yum -y install package`` + - remove: ``yum -y remove package`` + + Attributes: + name (str): + Required. Package name. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class Zypper(proto.Message): + r"""A package managed by Zypper. + + - install: ``zypper -y install package`` + - remove: ``zypper -y rm package`` + + Attributes: + name (str): + Required. Package name. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class GooGet(proto.Message): + r"""A package managed by GooGet. + + - install: ``googet -noconfirm install package`` + - remove: ``googet -noconfirm remove package`` + + Attributes: + name (str): + Required. Package name. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class MSI(proto.Message): + r"""An MSI package. MSI packages only support INSTALLED state. + + Attributes: + source (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.File): + Required. The MSI package. + properties (MutableSequence[str]): + Additional properties to use during installation. This + should be in the format of Property=Setting. Appended to the + defaults of ``ACTION=INSTALL REBOOT=ReallySuppress``. + """ + + source: "OSPolicy.Resource.File" = proto.Field( + proto.MESSAGE, + number=1, + message="OSPolicy.Resource.File", + ) + properties: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + desired_state: "OSPolicy.Resource.PackageResource.DesiredState" = ( + proto.Field( + proto.ENUM, + number=1, + enum="OSPolicy.Resource.PackageResource.DesiredState", + ) + ) + apt: "OSPolicy.Resource.PackageResource.APT" = proto.Field( + proto.MESSAGE, + number=2, + oneof="system_package", + message="OSPolicy.Resource.PackageResource.APT", + ) + deb: "OSPolicy.Resource.PackageResource.Deb" = proto.Field( + proto.MESSAGE, + number=3, + oneof="system_package", + message="OSPolicy.Resource.PackageResource.Deb", + ) + yum: "OSPolicy.Resource.PackageResource.YUM" = proto.Field( + proto.MESSAGE, + number=4, + oneof="system_package", + message="OSPolicy.Resource.PackageResource.YUM", + ) + zypper: "OSPolicy.Resource.PackageResource.Zypper" = proto.Field( + proto.MESSAGE, + number=5, + oneof="system_package", + message="OSPolicy.Resource.PackageResource.Zypper", + ) + rpm: "OSPolicy.Resource.PackageResource.RPM" = proto.Field( + proto.MESSAGE, + number=6, + oneof="system_package", + message="OSPolicy.Resource.PackageResource.RPM", + ) + googet: "OSPolicy.Resource.PackageResource.GooGet" = proto.Field( + proto.MESSAGE, + number=7, + oneof="system_package", + message="OSPolicy.Resource.PackageResource.GooGet", + ) + msi: "OSPolicy.Resource.PackageResource.MSI" = proto.Field( + proto.MESSAGE, + number=8, + oneof="system_package", + message="OSPolicy.Resource.PackageResource.MSI", + ) + + class RepositoryResource(proto.Message): + r"""A resource that manages a package repository. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + apt (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.RepositoryResource.AptRepository): + An Apt Repository. + + This field is a member of `oneof`_ ``repository``. + yum (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.RepositoryResource.YumRepository): + A Yum Repository. + + This field is a member of `oneof`_ ``repository``. + zypper (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.RepositoryResource.ZypperRepository): + A Zypper Repository. + + This field is a member of `oneof`_ ``repository``. + goo (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.RepositoryResource.GooRepository): + A Goo Repository. + + This field is a member of `oneof`_ ``repository``. + """ + + class AptRepository(proto.Message): + r"""Represents a single apt package repository. These will be added to a + repo file that will be managed at + ``/etc/apt/sources.list.d/google_osconfig.list``. + + Attributes: + archive_type (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.RepositoryResource.AptRepository.ArchiveType): + Required. Type of archive files in this + repository. + uri (str): + Required. URI for this repository. + distribution (str): + Required. Distribution of this repository. + components (MutableSequence[str]): + Required. List of components for this + repository. Must contain at least one item. + gpg_key (str): + URI of the key file for this repository. The agent maintains + a keyring at + ``/etc/apt/trusted.gpg.d/osconfig_agent_managed.gpg``. + """ + + class ArchiveType(proto.Enum): + r"""Type of archive. + + Values: + ARCHIVE_TYPE_UNSPECIFIED (0): + Unspecified is invalid. + DEB (1): + Deb indicates that the archive contains + binary files. + DEB_SRC (2): + Deb-src indicates that the archive contains + source files. + """ + ARCHIVE_TYPE_UNSPECIFIED = 0 + DEB = 1 + DEB_SRC = 2 + + archive_type: "OSPolicy.Resource.RepositoryResource.AptRepository.ArchiveType" = proto.Field( + proto.ENUM, + number=1, + enum="OSPolicy.Resource.RepositoryResource.AptRepository.ArchiveType", + ) + uri: str = proto.Field( + proto.STRING, + number=2, + ) + distribution: str = proto.Field( + proto.STRING, + number=3, + ) + components: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + gpg_key: str = proto.Field( + proto.STRING, + number=5, + ) + + class YumRepository(proto.Message): + r"""Represents a single yum package repository. These are added to a + repo file that is managed at + ``/etc/yum.repos.d/google_osconfig.repo``. + + Attributes: + id (str): + Required. A one word, unique name for this repository. This + is the ``repo id`` in the yum config file and also the + ``display_name`` if ``display_name`` is omitted. This id is + also used as the unique identifier when checking for + resource conflicts. + display_name (str): + The display name of the repository. + base_url (str): + Required. The location of the repository + directory. + gpg_keys (MutableSequence[str]): + URIs of GPG keys. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + base_url: str = proto.Field( + proto.STRING, + number=3, + ) + gpg_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + class ZypperRepository(proto.Message): + r"""Represents a single zypper package repository. These are added to a + repo file that is managed at + ``/etc/zypp/repos.d/google_osconfig.repo``. + + Attributes: + id (str): + Required. A one word, unique name for this repository. This + is the ``repo id`` in the zypper config file and also the + ``display_name`` if ``display_name`` is omitted. This id is + also used as the unique identifier when checking for + GuestPolicy conflicts. + display_name (str): + The display name of the repository. + base_url (str): + Required. The location of the repository + directory. + gpg_keys (MutableSequence[str]): + URIs of GPG keys. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + base_url: str = proto.Field( + proto.STRING, + number=3, + ) + gpg_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + class GooRepository(proto.Message): + r"""Represents a Goo package repository. These are added to a repo file + that is managed at + ``C:/ProgramData/GooGet/repos/google_osconfig.repo``. + + Attributes: + name (str): + Required. The name of the repository. + url (str): + Required. The url of the repository. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + url: str = proto.Field( + proto.STRING, + number=2, + ) + + apt: "OSPolicy.Resource.RepositoryResource.AptRepository" = proto.Field( + proto.MESSAGE, + number=1, + oneof="repository", + message="OSPolicy.Resource.RepositoryResource.AptRepository", + ) + yum: "OSPolicy.Resource.RepositoryResource.YumRepository" = proto.Field( + proto.MESSAGE, + number=2, + oneof="repository", + message="OSPolicy.Resource.RepositoryResource.YumRepository", + ) + zypper: "OSPolicy.Resource.RepositoryResource.ZypperRepository" = ( + proto.Field( + proto.MESSAGE, + number=3, + oneof="repository", + message="OSPolicy.Resource.RepositoryResource.ZypperRepository", + ) + ) + goo: "OSPolicy.Resource.RepositoryResource.GooRepository" = proto.Field( + proto.MESSAGE, + number=4, + oneof="repository", + message="OSPolicy.Resource.RepositoryResource.GooRepository", + ) + + class ExecResource(proto.Message): + r"""A resource that allows executing scripts on the VM. + + The ``ExecResource`` has 2 stages: ``validate`` and ``enforce`` and + both stages accept a script as an argument to execute. + + When the ``ExecResource`` is applied by the agent, it first executes + the script in the ``validate`` stage. The ``validate`` stage can + signal that the ``ExecResource`` is already in the desired state by + returning an exit code of ``100``. If the ``ExecResource`` is not in + the desired state, it should return an exit code of ``101``. Any + other exit code returned by this stage is considered an error. + + If the ``ExecResource`` is not in the desired state based on the + exit code from the ``validate`` stage, the agent proceeds to execute + the script from the ``enforce`` stage. If the ``ExecResource`` is + already in the desired state, the ``enforce`` stage will not be run. + Similar to ``validate`` stage, the ``enforce`` stage should return + an exit code of ``100`` to indicate that the resource in now in its + desired state. Any other exit code is considered an error. + + NOTE: An exit code of ``100`` was chosen over ``0`` (and ``101`` vs + ``1``) to have an explicit indicator of ``in desired state``, + ``not in desired state`` and errors. Because, for example, + Powershell will always return an exit code of ``0`` unless an + ``exit`` statement is provided in the script. So, for reasons of + consistency and being explicit, exit codes ``100`` and ``101`` were + chosen. + + Attributes: + validate (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.ExecResource.Exec): + Required. What to run to validate this + resource is in the desired state. An exit code + of 100 indicates "in desired state", and exit + code of 101 indicates "not in desired state". + Any other exit code indicates a failure running + validate. + enforce (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.ExecResource.Exec): + What to run to bring this resource into the + desired state. An exit code of 100 indicates + "success", any other exit code indicates a + failure running enforce. + """ + + class Exec(proto.Message): + r"""A file or script to execute. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + file (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.File): + A remote or local file. + + This field is a member of `oneof`_ ``source``. + script (str): + An inline script. + The size of the script is limited to 1024 + characters. + + This field is a member of `oneof`_ ``source``. + args (MutableSequence[str]): + Optional arguments to pass to the source + during execution. + interpreter (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.ExecResource.Exec.Interpreter): + Required. The script interpreter to use. + output_file_path (str): + Only recorded for enforce Exec. + Path to an output file (that is created by this + Exec) whose content will be recorded in + OSPolicyResourceCompliance after a successful + run. Absence or failure to read this file will + result in this ExecResource being non-compliant. + Output file size is limited to 100K bytes. + """ + + class Interpreter(proto.Enum): + r"""The interpreter to use. + + Values: + INTERPRETER_UNSPECIFIED (0): + Invalid value, the request will return + validation error. + NONE (1): + If an interpreter is not specified, the source is executed + directly. This execution, without an interpreter, only + succeeds for executables and scripts that have shebang + lines. + SHELL (2): + Indicates that the script runs with ``/bin/sh`` on Linux and + ``cmd.exe`` on Windows. + POWERSHELL (3): + Indicates that the script runs with + PowerShell. + """ + INTERPRETER_UNSPECIFIED = 0 + NONE = 1 + SHELL = 2 + POWERSHELL = 3 + + file: "OSPolicy.Resource.File" = proto.Field( + proto.MESSAGE, + number=1, + oneof="source", + message="OSPolicy.Resource.File", + ) + script: str = proto.Field( + proto.STRING, + number=2, + oneof="source", + ) + args: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + interpreter: "OSPolicy.Resource.ExecResource.Exec.Interpreter" = ( + proto.Field( + proto.ENUM, + number=4, + enum="OSPolicy.Resource.ExecResource.Exec.Interpreter", + ) + ) + output_file_path: str = proto.Field( + proto.STRING, + number=5, + ) + + validate: "OSPolicy.Resource.ExecResource.Exec" = proto.Field( + proto.MESSAGE, + number=1, + message="OSPolicy.Resource.ExecResource.Exec", + ) + enforce: "OSPolicy.Resource.ExecResource.Exec" = proto.Field( + proto.MESSAGE, + number=2, + message="OSPolicy.Resource.ExecResource.Exec", + ) + + class FileResource(proto.Message): + r"""A resource that manages the state of a file. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + file (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.File): + A remote or local source. + + This field is a member of `oneof`_ ``source``. + content (str): + A a file with this content. + The size of the content is limited to 1024 + characters. + + This field is a member of `oneof`_ ``source``. + path (str): + Required. The absolute path of the file + within the VM. + state (google.cloud.osconfig_v1alpha.types.OSPolicy.Resource.FileResource.DesiredState): + Required. Desired state of the file. + permissions (str): + Consists of three octal digits which + represent, in order, the permissions of the + owner, group, and other users for the file + (similarly to the numeric mode used in the linux + chmod utility). Each digit represents a three + bit number with the 4 bit corresponding to the + read permissions, the 2 bit corresponds to the + write bit, and the one bit corresponds to the + execute permission. Default behavior is 755. + + Below are some examples of permissions and their + associated values: + + read, write, and execute: 7 + read and execute: 5 + read and write: 6 + read only: 4 + """ + + class DesiredState(proto.Enum): + r"""Desired state of the file. + + Values: + DESIRED_STATE_UNSPECIFIED (0): + Unspecified is invalid. + PRESENT (1): + Ensure file at path is present. + ABSENT (2): + Ensure file at path is absent. + CONTENTS_MATCH (3): + Ensure the contents of the file at path + matches. If the file does not exist it will be + created. + """ + DESIRED_STATE_UNSPECIFIED = 0 + PRESENT = 1 + ABSENT = 2 + CONTENTS_MATCH = 3 + + file: "OSPolicy.Resource.File" = proto.Field( + proto.MESSAGE, + number=1, + oneof="source", + message="OSPolicy.Resource.File", + ) + content: str = proto.Field( + proto.STRING, + number=2, + oneof="source", + ) + path: str = proto.Field( + proto.STRING, + number=3, + ) + state: "OSPolicy.Resource.FileResource.DesiredState" = proto.Field( + proto.ENUM, + number=4, + enum="OSPolicy.Resource.FileResource.DesiredState", + ) + permissions: str = proto.Field( + proto.STRING, + number=5, + ) + + id: str = proto.Field( + proto.STRING, + number=1, + ) + pkg: "OSPolicy.Resource.PackageResource" = proto.Field( + proto.MESSAGE, + number=2, + oneof="resource_type", + message="OSPolicy.Resource.PackageResource", + ) + repository: "OSPolicy.Resource.RepositoryResource" = proto.Field( + proto.MESSAGE, + number=3, + oneof="resource_type", + message="OSPolicy.Resource.RepositoryResource", + ) + exec_: "OSPolicy.Resource.ExecResource" = proto.Field( + proto.MESSAGE, + number=4, + oneof="resource_type", + message="OSPolicy.Resource.ExecResource", + ) + file: "OSPolicy.Resource.FileResource" = proto.Field( + proto.MESSAGE, + number=5, + oneof="resource_type", + message="OSPolicy.Resource.FileResource", + ) + + class ResourceGroup(proto.Message): + r"""Resource groups provide a mechanism to group OS policy resources. + + Resource groups enable OS policy authors to create a single OS + policy to be applied to VMs running different operating Systems. + + When the OS policy is applied to a target VM, the appropriate + resource group within the OS policy is selected based on the + ``OSFilter`` specified within the resource group. + + Attributes: + os_filter (google.cloud.osconfig_v1alpha.types.OSPolicy.OSFilter): + Deprecated. Use the ``inventory_filters`` field instead. + Used to specify the OS filter for a resource group + inventory_filters (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicy.InventoryFilter]): + List of inventory filters for the resource group. + + The resources in this resource group are applied to the + target VM if it satisfies at least one of the following + inventory filters. + + For example, to apply this resource group to VMs running + either ``RHEL`` or ``CentOS`` operating systems, specify 2 + items for the list with following values: + inventory_filters[0].os_short_name='rhel' and + inventory_filters[1].os_short_name='centos' + + If the list is empty, this resource group will be applied to + the target VM unconditionally. + resources (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicy.Resource]): + Required. List of resources configured for + this resource group. The resources are executed + in the exact order specified here. + """ + + os_filter: "OSPolicy.OSFilter" = proto.Field( + proto.MESSAGE, + number=1, + message="OSPolicy.OSFilter", + ) + inventory_filters: MutableSequence[ + "OSPolicy.InventoryFilter" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="OSPolicy.InventoryFilter", + ) + resources: MutableSequence["OSPolicy.Resource"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="OSPolicy.Resource", + ) + + id: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + mode: Mode = proto.Field( + proto.ENUM, + number=3, + enum=Mode, + ) + resource_groups: MutableSequence[ResourceGroup] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=ResourceGroup, + ) + allow_no_resource_group_match: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/os_policy_assignment_reports.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/os_policy_assignment_reports.py new file mode 100644 index 000000000000..9f11874181e2 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/os_policy_assignment_reports.py @@ -0,0 +1,448 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1alpha", + manifest={ + "GetOSPolicyAssignmentReportRequest", + "ListOSPolicyAssignmentReportsRequest", + "ListOSPolicyAssignmentReportsResponse", + "OSPolicyAssignmentReport", + }, +) + + +class GetOSPolicyAssignmentReportRequest(proto.Message): + r"""Get a report of the OS policy assignment for a VM instance. + + Attributes: + name (str): + Required. API resource name for OS policy assignment report. + + Format: + ``/projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/report`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance_id}``, + either Compute Engine ``instance-id`` or ``instance-name`` + can be provided. For ``{assignment_id}``, the + OSPolicyAssignment id must be provided. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListOSPolicyAssignmentReportsRequest(proto.Message): + r"""List the OS policy assignment reports for VM instances. + + Attributes: + parent (str): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/reports`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, either + ``instance-name``, ``instance-id``, or ``-`` can be + provided. If '-' is provided, the response will include + OSPolicyAssignmentReports for all instances in the + project/location. For ``{assignment}``, either + ``assignment-id`` or ``-`` can be provided. If '-' is + provided, the response will include + OSPolicyAssignmentReports for all OSPolicyAssignments in the + project/location. Either {instance} or {assignment} must be + ``-``. + + For example: + ``projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/-/reports`` + returns all reports for the instance + ``projects/{project}/locations/{location}/instances/-/osPolicyAssignments/{assignment-id}/reports`` + returns all the reports for the given assignment across all + instances. + ``projects/{project}/locations/{location}/instances/-/osPolicyAssignments/-/reports`` + returns all the reports for all assignments across all + instances. + page_size (int): + The maximum number of results to return. + filter (str): + If provided, this field specifies the criteria that must be + met by the ``OSPolicyAssignmentReport`` API resource that is + included in the response. + page_token (str): + A pagination token returned from a previous call to the + ``ListOSPolicyAssignmentReports`` method that indicates + where this listing should continue from. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListOSPolicyAssignmentReportsResponse(proto.Message): + r"""A response message for listing OS Policy assignment reports + including the page of results and page token. + + Attributes: + os_policy_assignment_reports (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicyAssignmentReport]): + List of OS policy assignment reports. + next_page_token (str): + The pagination token to retrieve the next + page of OS policy assignment report objects. + """ + + @property + def raw_page(self): + return self + + os_policy_assignment_reports: MutableSequence[ + "OSPolicyAssignmentReport" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="OSPolicyAssignmentReport", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OSPolicyAssignmentReport(proto.Message): + r"""A report of the OS policy assignment status for a given + instance. + + Attributes: + name (str): + The ``OSPolicyAssignmentReport`` API resource name. + + Format: + ``projects/{project_number}/locations/{location}/instances/{instance_id}/osPolicyAssignments/{os_policy_assignment_id}/report`` + instance (str): + The Compute Engine VM instance name. + os_policy_assignment (str): + Reference to the ``OSPolicyAssignment`` API resource that + the ``OSPolicy`` belongs to. + + Format: + ``projects/{project_number}/locations/{location}/osPolicyAssignments/{os_policy_assignment_id@revision_id}`` + os_policy_compliances (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicyAssignmentReport.OSPolicyCompliance]): + Compliance data for each ``OSPolicy`` that is applied to the + VM. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp for when the report was last + generated. + last_run_id (str): + Unique identifier of the last attempted run + to apply the OS policies associated with this + assignment on the VM. + + This ID is logged by the OS Config agent while + applying the OS policies associated with this + assignment on the VM. NOTE: If the service is + unable to successfully connect to the agent for + this run, then this id will not be available in + the agent logs. + """ + + class OSPolicyCompliance(proto.Message): + r"""Compliance data for an OS policy + + Attributes: + os_policy_id (str): + The OS policy id + compliance_state (google.cloud.osconfig_v1alpha.types.OSPolicyAssignmentReport.OSPolicyCompliance.ComplianceState): + The compliance state of the OS policy. + compliance_state_reason (str): + The reason for the OS policy to be in an unknown compliance + state. This field is always populated when + ``compliance_state`` is ``UNKNOWN``. + + If populated, the field can contain one of the following + values: + + - ``vm-not-running``: The VM was not running. + - ``os-policies-not-supported-by-agent``: The version of + the OS Config agent running on the VM does not support + running OS policies. + - ``no-agent-detected``: The OS Config agent is not + detected for the VM. + - ``resource-execution-errors``: The OS Config agent + encountered errors while executing one or more resources + in the policy. See ``os_policy_resource_compliances`` for + details. + - ``task-timeout``: The task sent to the agent to apply the + policy timed out. + - ``unexpected-agent-state``: The OS Config agent did not + report the final status of the task that attempted to + apply the policy. Instead, the agent unexpectedly started + working on a different task. This mostly happens when the + agent or VM unexpectedly restarts while applying OS + policies. + - ``internal-service-errors``: Internal service errors were + encountered while attempting to apply the policy. + os_policy_resource_compliances (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance]): + Compliance data for each resource within the + policy that is applied to the VM. + """ + + class ComplianceState(proto.Enum): + r"""Possible compliance states for an os policy. + + Values: + UNKNOWN (0): + The policy is in an unknown compliance state. + + Refer to the field ``compliance_state_reason`` to learn the + exact reason for the policy to be in this compliance state. + COMPLIANT (1): + Policy is compliant. + + The policy is compliant if all the underlying + resources are also compliant. + NON_COMPLIANT (2): + Policy is non-compliant. + + The policy is non-compliant if one or more + underlying resources are non-compliant. + """ + UNKNOWN = 0 + COMPLIANT = 1 + NON_COMPLIANT = 2 + + class OSPolicyResourceCompliance(proto.Message): + r"""Compliance data for an OS policy resource. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + os_policy_resource_id (str): + The ID of the OS policy resource. + config_steps (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep]): + Ordered list of configuration completed by + the agent for the OS policy resource. + compliance_state (google.cloud.osconfig_v1alpha.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ComplianceState): + The compliance state of the resource. + compliance_state_reason (str): + A reason for the resource to be in the given compliance + state. This field is always populated when + ``compliance_state`` is ``UNKNOWN``. + + The following values are supported when + ``compliance_state == UNKNOWN`` + + - ``execution-errors``: Errors were encountered by the + agent while executing the resource and the compliance + state couldn't be determined. + - ``execution-skipped-by-agent``: Resource execution was + skipped by the agent because errors were encountered + while executing prior resources in the OS policy. + - ``os-policy-execution-attempt-failed``: The execution of + the OS policy containing this resource failed and the + compliance state couldn't be determined. + exec_resource_output (google.cloud.osconfig_v1alpha.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ExecResourceOutput): + ExecResource specific output. + + This field is a member of `oneof`_ ``output``. + """ + + class ComplianceState(proto.Enum): + r"""Possible compliance states for a resource. + + Values: + UNKNOWN (0): + The resource is in an unknown compliance state. + + To get more details about why the policy is in this state, + review the output of the ``compliance_state_reason`` field. + COMPLIANT (1): + Resource is compliant. + NON_COMPLIANT (2): + Resource is non-compliant. + """ + UNKNOWN = 0 + COMPLIANT = 1 + NON_COMPLIANT = 2 + + class OSPolicyResourceConfigStep(proto.Message): + r"""Step performed by the OS Config agent for configuring an + ``OSPolicy`` resource to its desired state. + + Attributes: + type_ (google.cloud.osconfig_v1alpha.types.OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep.Type): + Configuration step type. + error_message (str): + An error message recorded during the + execution of this step. Only populated if errors + were encountered during this step execution. + """ + + class Type(proto.Enum): + r"""Supported configuration step types + + Values: + TYPE_UNSPECIFIED (0): + Default value. This value is unused. + VALIDATION (1): + Checks for resource conflicts such as schema + errors. + DESIRED_STATE_CHECK (2): + Checks the current status of the desired + state for a resource. + DESIRED_STATE_ENFORCEMENT (3): + Enforces the desired state for a resource + that is not in desired state. + DESIRED_STATE_CHECK_POST_ENFORCEMENT (4): + Re-checks the status of the desired state. + This check is done for a resource after the + enforcement of all OS policies. + + This step is used to determine the final desired + state status for the resource. It accounts for + any resources that might have drifted from their + desired state due to side effects from executing + other resources. + """ + TYPE_UNSPECIFIED = 0 + VALIDATION = 1 + DESIRED_STATE_CHECK = 2 + DESIRED_STATE_ENFORCEMENT = 3 + DESIRED_STATE_CHECK_POST_ENFORCEMENT = 4 + + type_: "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep.Type" = proto.Field( + proto.ENUM, + number=1, + enum="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep.Type", + ) + error_message: str = proto.Field( + proto.STRING, + number=2, + ) + + class ExecResourceOutput(proto.Message): + r"""ExecResource specific output. + + Attributes: + enforcement_output (bytes): + Output from enforcement phase output file (if + run). Output size is limited to 100K bytes. + """ + + enforcement_output: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + os_policy_resource_id: str = proto.Field( + proto.STRING, + number=1, + ) + config_steps: MutableSequence[ + "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.OSPolicyResourceConfigStep", + ) + compliance_state: "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ComplianceState" = proto.Field( + proto.ENUM, + number=3, + enum="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ComplianceState", + ) + compliance_state_reason: str = proto.Field( + proto.STRING, + number=4, + ) + exec_resource_output: "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ExecResourceOutput" = proto.Field( + proto.MESSAGE, + number=5, + oneof="output", + message="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance.ExecResourceOutput", + ) + + os_policy_id: str = proto.Field( + proto.STRING, + number=1, + ) + compliance_state: "OSPolicyAssignmentReport.OSPolicyCompliance.ComplianceState" = proto.Field( + proto.ENUM, + number=2, + enum="OSPolicyAssignmentReport.OSPolicyCompliance.ComplianceState", + ) + compliance_state_reason: str = proto.Field( + proto.STRING, + number=3, + ) + os_policy_resource_compliances: MutableSequence[ + "OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="OSPolicyAssignmentReport.OSPolicyCompliance.OSPolicyResourceCompliance", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + instance: str = proto.Field( + proto.STRING, + number=2, + ) + os_policy_assignment: str = proto.Field( + proto.STRING, + number=3, + ) + os_policy_compliances: MutableSequence[OSPolicyCompliance] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=OSPolicyCompliance, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + last_run_id: str = proto.Field( + proto.STRING, + number=6, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/os_policy_assignments.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/os_policy_assignments.py new file mode 100644 index 000000000000..cf26b16cb936 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/os_policy_assignments.py @@ -0,0 +1,633 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.osconfig_v1alpha.types import os_policy, osconfig_common + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1alpha", + manifest={ + "OSPolicyAssignment", + "OSPolicyAssignmentOperationMetadata", + "CreateOSPolicyAssignmentRequest", + "UpdateOSPolicyAssignmentRequest", + "GetOSPolicyAssignmentRequest", + "ListOSPolicyAssignmentsRequest", + "ListOSPolicyAssignmentsResponse", + "ListOSPolicyAssignmentRevisionsRequest", + "ListOSPolicyAssignmentRevisionsResponse", + "DeleteOSPolicyAssignmentRequest", + }, +) + + +class OSPolicyAssignment(proto.Message): + r"""OS policy assignment is an API resource that is used to apply a set + of OS policies to a dynamically targeted group of Compute Engine VM + instances. + + An OS policy is used to define the desired state configuration for a + Compute Engine VM instance through a set of configuration resources + that provide capabilities such as installing or removing software + packages, or executing a script. + + For more information, see `OS policy and OS policy + assignment `__. + + Attributes: + name (str): + Resource name. + + Format: + ``projects/{project_number}/locations/{location}/osPolicyAssignments/{os_policy_assignment_id}`` + + This field is ignored when you create an OS policy + assignment. + description (str): + OS policy assignment description. + Length of the description is limited to 1024 + characters. + os_policies (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicy]): + Required. List of OS policies to be applied + to the VMs. + instance_filter (google.cloud.osconfig_v1alpha.types.OSPolicyAssignment.InstanceFilter): + Required. Filter to select VMs. + rollout (google.cloud.osconfig_v1alpha.types.OSPolicyAssignment.Rollout): + Required. Rollout to deploy the OS policy assignment. A + rollout is triggered in the following situations: + + 1) OSPolicyAssignment is created. + 2) OSPolicyAssignment is updated and the update contains + changes to one of the following fields: + + - instance_filter + - os_policies + + 3) OSPolicyAssignment is deleted. + revision_id (str): + Output only. The assignment revision ID + A new revision is committed whenever a rollout + is triggered for a OS policy assignment + revision_create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp that the revision + was created. + etag (str): + The etag for this OS policy assignment. + If this is provided on update, it must match the + server's etag. + rollout_state (google.cloud.osconfig_v1alpha.types.OSPolicyAssignment.RolloutState): + Output only. OS policy assignment rollout + state + baseline (bool): + Output only. Indicates that this revision has been + successfully rolled out in this zone and new VMs will be + assigned OS policies from this revision. + + For a given OS policy assignment, there is only one revision + with a value of ``true`` for this field. + deleted (bool): + Output only. Indicates that this revision + deletes the OS policy assignment. + reconciling (bool): + Output only. Indicates that reconciliation is in progress + for the revision. This value is ``true`` when the + ``rollout_state`` is one of: + + - IN_PROGRESS + - CANCELLING + uid (str): + Output only. Server generated unique id for + the OS policy assignment resource. + """ + + class RolloutState(proto.Enum): + r"""OS policy assignment rollout state + + Values: + ROLLOUT_STATE_UNSPECIFIED (0): + Invalid value + IN_PROGRESS (1): + The rollout is in progress. + CANCELLING (2): + The rollout is being cancelled. + CANCELLED (3): + The rollout is cancelled. + SUCCEEDED (4): + The rollout has completed successfully. + """ + ROLLOUT_STATE_UNSPECIFIED = 0 + IN_PROGRESS = 1 + CANCELLING = 2 + CANCELLED = 3 + SUCCEEDED = 4 + + class LabelSet(proto.Message): + r"""Message representing label set. + + - A label is a key value pair set for a VM. + - A LabelSet is a set of labels. + - Labels within a LabelSet are ANDed. In other words, a LabelSet is + applicable for a VM only if it matches all the labels in the + LabelSet. + - Example: A LabelSet with 2 labels: ``env=prod`` and + ``type=webserver`` will only be applicable for those VMs with + both labels present. + + Attributes: + labels (MutableMapping[str, str]): + Labels are identified by key/value pairs in + this map. A VM should contain all the key/value + pairs specified in this map to be selected. + """ + + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + + class InstanceFilter(proto.Message): + r"""Filters to select target VMs for an assignment. + + If more than one filter criteria is specified below, a VM will + be selected if and only if it satisfies all of them. + + Attributes: + all_ (bool): + Target all VMs in the project. If true, no + other criteria is permitted. + os_short_names (MutableSequence[str]): + Deprecated. Use the ``inventories`` field instead. A VM is + selected if it's OS short name matches with any of the + values provided in this list. + inclusion_labels (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicyAssignment.LabelSet]): + List of label sets used for VM inclusion. + + If the list has more than one ``LabelSet``, the VM is + included if any of the label sets are applicable for the VM. + exclusion_labels (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicyAssignment.LabelSet]): + List of label sets used for VM exclusion. + + If the list has more than one label set, the VM + is excluded if any of the label sets are + applicable for the VM. + inventories (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicyAssignment.InstanceFilter.Inventory]): + List of inventories to select VMs. + + A VM is selected if its inventory data matches + at least one of the following inventories. + """ + + class Inventory(proto.Message): + r"""VM inventory details. + + Attributes: + os_short_name (str): + Required. The OS short name + os_version (str): + The OS version + + Prefix matches are supported if asterisk(*) is provided as + the last character. For example, to match all versions with + a major version of ``7``, specify the following value for + this field ``7.*`` + + An empty string matches all OS versions. + """ + + os_short_name: str = proto.Field( + proto.STRING, + number=1, + ) + os_version: str = proto.Field( + proto.STRING, + number=2, + ) + + all_: bool = proto.Field( + proto.BOOL, + number=1, + ) + os_short_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + inclusion_labels: MutableSequence[ + "OSPolicyAssignment.LabelSet" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="OSPolicyAssignment.LabelSet", + ) + exclusion_labels: MutableSequence[ + "OSPolicyAssignment.LabelSet" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="OSPolicyAssignment.LabelSet", + ) + inventories: MutableSequence[ + "OSPolicyAssignment.InstanceFilter.Inventory" + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="OSPolicyAssignment.InstanceFilter.Inventory", + ) + + class Rollout(proto.Message): + r"""Message to configure the rollout at the zonal level for the + OS policy assignment. + + Attributes: + disruption_budget (google.cloud.osconfig_v1alpha.types.FixedOrPercent): + Required. The maximum number (or percentage) + of VMs per zone to disrupt at any given moment. + min_wait_duration (google.protobuf.duration_pb2.Duration): + Required. This determines the minimum duration of time to + wait after the configuration changes are applied through the + current rollout. A VM continues to count towards the + ``disruption_budget`` at least until this duration of time + has passed after configuration changes are applied. + """ + + disruption_budget: osconfig_common.FixedOrPercent = proto.Field( + proto.MESSAGE, + number=1, + message=osconfig_common.FixedOrPercent, + ) + min_wait_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + os_policies: MutableSequence[os_policy.OSPolicy] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=os_policy.OSPolicy, + ) + instance_filter: InstanceFilter = proto.Field( + proto.MESSAGE, + number=4, + message=InstanceFilter, + ) + rollout: Rollout = proto.Field( + proto.MESSAGE, + number=5, + message=Rollout, + ) + revision_id: str = proto.Field( + proto.STRING, + number=6, + ) + revision_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + etag: str = proto.Field( + proto.STRING, + number=8, + ) + rollout_state: RolloutState = proto.Field( + proto.ENUM, + number=9, + enum=RolloutState, + ) + baseline: bool = proto.Field( + proto.BOOL, + number=10, + ) + deleted: bool = proto.Field( + proto.BOOL, + number=11, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=12, + ) + uid: str = proto.Field( + proto.STRING, + number=13, + ) + + +class OSPolicyAssignmentOperationMetadata(proto.Message): + r"""OS policy assignment operation metadata provided by OS policy + assignment API methods that return long running operations. + + Attributes: + os_policy_assignment (str): + Reference to the ``OSPolicyAssignment`` API resource. + + Format: + ``projects/{project_number}/locations/{location}/osPolicyAssignments/{os_policy_assignment_id@revision_id}`` + api_method (google.cloud.osconfig_v1alpha.types.OSPolicyAssignmentOperationMetadata.APIMethod): + The OS policy assignment API method. + rollout_state (google.cloud.osconfig_v1alpha.types.OSPolicyAssignmentOperationMetadata.RolloutState): + State of the rollout + rollout_start_time (google.protobuf.timestamp_pb2.Timestamp): + Rollout start time + rollout_update_time (google.protobuf.timestamp_pb2.Timestamp): + Rollout update time + """ + + class APIMethod(proto.Enum): + r"""The OS policy assignment API method. + + Values: + API_METHOD_UNSPECIFIED (0): + Invalid value + CREATE (1): + Create OS policy assignment API method + UPDATE (2): + Update OS policy assignment API method + DELETE (3): + Delete OS policy assignment API method + """ + API_METHOD_UNSPECIFIED = 0 + CREATE = 1 + UPDATE = 2 + DELETE = 3 + + class RolloutState(proto.Enum): + r"""State of the rollout + + Values: + ROLLOUT_STATE_UNSPECIFIED (0): + Invalid value + IN_PROGRESS (1): + The rollout is in progress. + CANCELLING (2): + The rollout is being cancelled. + CANCELLED (3): + The rollout is cancelled. + SUCCEEDED (4): + The rollout has completed successfully. + """ + ROLLOUT_STATE_UNSPECIFIED = 0 + IN_PROGRESS = 1 + CANCELLING = 2 + CANCELLED = 3 + SUCCEEDED = 4 + + os_policy_assignment: str = proto.Field( + proto.STRING, + number=1, + ) + api_method: APIMethod = proto.Field( + proto.ENUM, + number=2, + enum=APIMethod, + ) + rollout_state: RolloutState = proto.Field( + proto.ENUM, + number=3, + enum=RolloutState, + ) + rollout_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + rollout_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class CreateOSPolicyAssignmentRequest(proto.Message): + r"""A request message to create an OS policy assignment + + Attributes: + parent (str): + Required. The parent resource name in the + form: projects/{project}/locations/{location} + os_policy_assignment (google.cloud.osconfig_v1alpha.types.OSPolicyAssignment): + Required. The OS policy assignment to be + created. + os_policy_assignment_id (str): + Required. The logical name of the OS policy assignment in + the project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the project. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + os_policy_assignment: "OSPolicyAssignment" = proto.Field( + proto.MESSAGE, + number=2, + message="OSPolicyAssignment", + ) + os_policy_assignment_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class UpdateOSPolicyAssignmentRequest(proto.Message): + r"""A request message to update an OS policy assignment + + Attributes: + os_policy_assignment (google.cloud.osconfig_v1alpha.types.OSPolicyAssignment): + Required. The updated OS policy assignment. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that controls which + fields of the assignment should be updated. + """ + + os_policy_assignment: "OSPolicyAssignment" = proto.Field( + proto.MESSAGE, + number=1, + message="OSPolicyAssignment", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetOSPolicyAssignmentRequest(proto.Message): + r"""A request message to get an OS policy assignment + + Attributes: + name (str): + Required. The resource name of OS policy assignment. + + Format: + ``projects/{project}/locations/{location}/osPolicyAssignments/{os_policy_assignment}@{revisionId}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListOSPolicyAssignmentsRequest(proto.Message): + r"""A request message to list OS policy assignments for a parent + resource + + Attributes: + parent (str): + Required. The parent resource name. + page_size (int): + The maximum number of assignments to return. + page_token (str): + A pagination token returned from a previous call to + ``ListOSPolicyAssignments`` that indicates where this + listing should continue from. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListOSPolicyAssignmentsResponse(proto.Message): + r"""A response message for listing all assignments under given + parent. + + Attributes: + os_policy_assignments (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicyAssignment]): + The list of assignments + next_page_token (str): + The pagination token to retrieve the next + page of OS policy assignments. + """ + + @property + def raw_page(self): + return self + + os_policy_assignments: MutableSequence["OSPolicyAssignment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="OSPolicyAssignment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListOSPolicyAssignmentRevisionsRequest(proto.Message): + r"""A request message to list revisions for a OS policy + assignment + + Attributes: + name (str): + Required. The name of the OS policy + assignment to list revisions for. + page_size (int): + The maximum number of revisions to return. + page_token (str): + A pagination token returned from a previous call to + ``ListOSPolicyAssignmentRevisions`` that indicates where + this listing should continue from. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListOSPolicyAssignmentRevisionsResponse(proto.Message): + r"""A response message for listing all revisions for a OS policy + assignment. + + Attributes: + os_policy_assignments (MutableSequence[google.cloud.osconfig_v1alpha.types.OSPolicyAssignment]): + The OS policy assignment revisions + next_page_token (str): + The pagination token to retrieve the next + page of OS policy assignment revisions. + """ + + @property + def raw_page(self): + return self + + os_policy_assignments: MutableSequence["OSPolicyAssignment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="OSPolicyAssignment", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteOSPolicyAssignmentRequest(proto.Message): + r"""A request message for deleting a OS policy assignment. + + Attributes: + name (str): + Required. The name of the OS policy + assignment to be deleted + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/osconfig_common.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/osconfig_common.py new file mode 100644 index 000000000000..5da5f290cb5c --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/osconfig_common.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1alpha", + manifest={ + "FixedOrPercent", + }, +) + + +class FixedOrPercent(proto.Message): + r"""Message encapsulating a value that can be either absolute + ("fixed") or relative ("percent") to a value. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fixed (int): + Specifies a fixed value. + + This field is a member of `oneof`_ ``mode``. + percent (int): + Specifies the relative value defined as a + percentage, which will be multiplied by a + reference value. + + This field is a member of `oneof`_ ``mode``. + """ + + fixed: int = proto.Field( + proto.INT32, + number=1, + oneof="mode", + ) + percent: int = proto.Field( + proto.INT32, + number=2, + oneof="mode", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/osconfig_zonal_service.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/osconfig_zonal_service.py new file mode 100644 index 000000000000..4221e9291d7d --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/osconfig_zonal_service.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1alpha", + manifest={}, +) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/vulnerability.py b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/vulnerability.py new file mode 100644 index 000000000000..8e40d00b1bb0 --- /dev/null +++ b/packages/google-cloud-os-config/google/cloud/osconfig_v1alpha/types/vulnerability.py @@ -0,0 +1,608 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.osconfig.v1alpha", + manifest={ + "VulnerabilityReport", + "GetVulnerabilityReportRequest", + "ListVulnerabilityReportsRequest", + "ListVulnerabilityReportsResponse", + "CVSSv3", + }, +) + + +class VulnerabilityReport(proto.Message): + r"""This API resource represents the vulnerability report for a + specified Compute Engine virtual machine (VM) instance at a given + point in time. + + For more information, see `Vulnerability + reports `__. + + Attributes: + name (str): + Output only. The ``vulnerabilityReport`` API resource name. + + Format: + ``projects/{project_number}/locations/{location}/instances/{instance_id}/vulnerabilityReport`` + vulnerabilities (MutableSequence[google.cloud.osconfig_v1alpha.types.VulnerabilityReport.Vulnerability]): + Output only. List of vulnerabilities + affecting the VM. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp for when the last + vulnerability report was generated for the VM. + """ + + class Vulnerability(proto.Message): + r"""A vulnerability affecting the VM instance. + + Attributes: + details (google.cloud.osconfig_v1alpha.types.VulnerabilityReport.Vulnerability.Details): + Contains metadata as per the upstream feed of + the operating system and NVD. + installed_inventory_item_ids (MutableSequence[str]): + Corresponds to the ``INSTALLED_PACKAGE`` inventory item on + the VM. This field displays the inventory items affected by + this vulnerability. If the vulnerability report was not + updated after the VM inventory update, these values might + not display in VM inventory. For some distros, this field + may be empty. + available_inventory_item_ids (MutableSequence[str]): + Corresponds to the ``AVAILABLE_PACKAGE`` inventory item on + the VM. If the vulnerability report was not updated after + the VM inventory update, these values might not display in + VM inventory. If there is no available fix, the field is + empty. The ``inventory_item`` value specifies the latest + ``SoftwarePackage`` available to the VM that fixes the + vulnerability. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp for when the vulnerability was + first detected. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp for when the vulnerability was + last modified. + items (MutableSequence[google.cloud.osconfig_v1alpha.types.VulnerabilityReport.Vulnerability.Item]): + List of items affected by the vulnerability. + """ + + class Details(proto.Message): + r"""Contains metadata information for the vulnerability. This + information is collected from the upstream feed of the operating + system. + + Attributes: + cve (str): + The CVE of the vulnerability. CVE cannot be + empty and the combination of should be unique across + vulnerabilities for a VM. + cvss_v2_score (float): + The CVSS V2 score of this vulnerability. CVSS + V2 score is on a scale of 0 - 10 where 0 + indicates low severity and 10 indicates high + severity. + cvss_v3 (google.cloud.osconfig_v1alpha.types.CVSSv3): + The full description of the CVSSv3 for this + vulnerability from NVD. + severity (str): + Assigned severity/impact ranking from the + distro. + description (str): + The note or description describing the + vulnerability from the distro. + references (MutableSequence[google.cloud.osconfig_v1alpha.types.VulnerabilityReport.Vulnerability.Details.Reference]): + Corresponds to the references attached to the + ``VulnerabilityDetails``. + """ + + class Reference(proto.Message): + r"""A reference for this vulnerability. + + Attributes: + url (str): + The url of the reference. + source (str): + The source of the reference e.g. NVD. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + source: str = proto.Field( + proto.STRING, + number=2, + ) + + cve: str = proto.Field( + proto.STRING, + number=1, + ) + cvss_v2_score: float = proto.Field( + proto.FLOAT, + number=2, + ) + cvss_v3: "CVSSv3" = proto.Field( + proto.MESSAGE, + number=3, + message="CVSSv3", + ) + severity: str = proto.Field( + proto.STRING, + number=4, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + references: MutableSequence[ + "VulnerabilityReport.Vulnerability.Details.Reference" + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="VulnerabilityReport.Vulnerability.Details.Reference", + ) + + class Item(proto.Message): + r"""OS inventory item that is affected by a vulnerability or + fixed as a result of a vulnerability. + + Attributes: + installed_inventory_item_id (str): + Corresponds to the ``INSTALLED_PACKAGE`` inventory item on + the VM. This field displays the inventory items affected by + this vulnerability. If the vulnerability report was not + updated after the VM inventory update, these values might + not display in VM inventory. For some operating systems, + this field might be empty. + available_inventory_item_id (str): + Corresponds to the ``AVAILABLE_PACKAGE`` inventory item on + the VM. If the vulnerability report was not updated after + the VM inventory update, these values might not display in + VM inventory. If there is no available fix, the field is + empty. The ``inventory_item`` value specifies the latest + ``SoftwarePackage`` available to the VM that fixes the + vulnerability. + fixed_cpe_uri (str): + The recommended `CPE + URI `__ update that + contains a fix for this vulnerability. + upstream_fix (str): + The upstream OS patch, packages or KB that + fixes the vulnerability. + """ + + installed_inventory_item_id: str = proto.Field( + proto.STRING, + number=1, + ) + available_inventory_item_id: str = proto.Field( + proto.STRING, + number=2, + ) + fixed_cpe_uri: str = proto.Field( + proto.STRING, + number=3, + ) + upstream_fix: str = proto.Field( + proto.STRING, + number=4, + ) + + details: "VulnerabilityReport.Vulnerability.Details" = proto.Field( + proto.MESSAGE, + number=1, + message="VulnerabilityReport.Vulnerability.Details", + ) + installed_inventory_item_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + available_inventory_item_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + items: MutableSequence[ + "VulnerabilityReport.Vulnerability.Item" + ] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="VulnerabilityReport.Vulnerability.Item", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + vulnerabilities: MutableSequence[Vulnerability] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Vulnerability, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class GetVulnerabilityReportRequest(proto.Message): + r"""A request message for getting the vulnerability report for + the specified VM. + + Attributes: + name (str): + Required. API resource name for vulnerability resource. + + Format: + ``projects/{project}/locations/{location}/instances/{instance}/vulnerabilityReport`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. For ``{instance}``, either + Compute Engine ``instance-id`` or ``instance-name`` can be + provided. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListVulnerabilityReportsRequest(proto.Message): + r"""A request message for listing vulnerability reports for all + VM instances in the specified location. + + Attributes: + parent (str): + Required. The parent resource name. + + Format: + ``projects/{project}/locations/{location}/instances/-`` + + For ``{project}``, either ``project-number`` or + ``project-id`` can be provided. + page_size (int): + The maximum number of results to return. + page_token (str): + A pagination token returned from a previous call to + ``ListVulnerabilityReports`` that indicates where this + listing should continue from. + filter (str): + If provided, this field specifies the criteria that must be + met by a ``vulnerabilityReport`` API resource to be included + in the response. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListVulnerabilityReportsResponse(proto.Message): + r"""A response message for listing vulnerability reports for all + VM instances in the specified location. + + Attributes: + vulnerability_reports (MutableSequence[google.cloud.osconfig_v1alpha.types.VulnerabilityReport]): + List of vulnerabilityReport objects. + next_page_token (str): + The pagination token to retrieve the next + page of vulnerabilityReports object. + """ + + @property + def raw_page(self): + return self + + vulnerability_reports: MutableSequence["VulnerabilityReport"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VulnerabilityReport", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CVSSv3(proto.Message): + r"""Common Vulnerability Scoring System version 3. + For details, see + https://www.first.org/cvss/specification-document + + Attributes: + base_score (float): + The base score is a function of the base + metric scores. + https://www.first.org/cvss/specification-document#Base-Metrics + exploitability_score (float): + The Exploitability sub-score equation is + derived from the Base Exploitability metrics. + https://www.first.org/cvss/specification-document#2-1-Exploitability-Metrics + impact_score (float): + The Impact sub-score equation is derived from + the Base Impact metrics. + attack_vector (google.cloud.osconfig_v1alpha.types.CVSSv3.AttackVector): + This metric reflects the context by which + vulnerability exploitation is possible. + attack_complexity (google.cloud.osconfig_v1alpha.types.CVSSv3.AttackComplexity): + This metric describes the conditions beyond + the attacker's control that must exist in order + to exploit the vulnerability. + privileges_required (google.cloud.osconfig_v1alpha.types.CVSSv3.PrivilegesRequired): + This metric describes the level of privileges + an attacker must possess before successfully + exploiting the vulnerability. + user_interaction (google.cloud.osconfig_v1alpha.types.CVSSv3.UserInteraction): + This metric captures the requirement for a + human user, other than the attacker, to + participate in the successful compromise of the + vulnerable component. + scope (google.cloud.osconfig_v1alpha.types.CVSSv3.Scope): + The Scope metric captures whether a + vulnerability in one vulnerable component + impacts resources in components beyond its + security scope. + confidentiality_impact (google.cloud.osconfig_v1alpha.types.CVSSv3.Impact): + This metric measures the impact to the + confidentiality of the information resources + managed by a software component due to a + successfully exploited vulnerability. + integrity_impact (google.cloud.osconfig_v1alpha.types.CVSSv3.Impact): + This metric measures the impact to integrity + of a successfully exploited vulnerability. + availability_impact (google.cloud.osconfig_v1alpha.types.CVSSv3.Impact): + This metric measures the impact to the + availability of the impacted component resulting + from a successfully exploited vulnerability. + """ + + class AttackVector(proto.Enum): + r"""This metric reflects the context by which vulnerability + exploitation is possible. + + Values: + ATTACK_VECTOR_UNSPECIFIED (0): + Invalid value. + ATTACK_VECTOR_NETWORK (1): + The vulnerable component is bound to the + network stack and the set of possible attackers + extends beyond the other options listed below, + up to and including the entire Internet. + ATTACK_VECTOR_ADJACENT (2): + The vulnerable component is bound to the + network stack, but the attack is limited at the + protocol level to a logically adjacent topology. + ATTACK_VECTOR_LOCAL (3): + The vulnerable component is not bound to the + network stack and the attacker's path is via + read/write/execute capabilities. + ATTACK_VECTOR_PHYSICAL (4): + The attack requires the attacker to + physically touch or manipulate the vulnerable + component. + """ + ATTACK_VECTOR_UNSPECIFIED = 0 + ATTACK_VECTOR_NETWORK = 1 + ATTACK_VECTOR_ADJACENT = 2 + ATTACK_VECTOR_LOCAL = 3 + ATTACK_VECTOR_PHYSICAL = 4 + + class AttackComplexity(proto.Enum): + r"""This metric describes the conditions beyond the attacker's + control that must exist in order to exploit the vulnerability. + + Values: + ATTACK_COMPLEXITY_UNSPECIFIED (0): + Invalid value. + ATTACK_COMPLEXITY_LOW (1): + Specialized access conditions or extenuating + circumstances do not exist. An attacker can + expect repeatable success when attacking the + vulnerable component. + ATTACK_COMPLEXITY_HIGH (2): + A successful attack depends on conditions + beyond the attacker's control. That is, a + successful attack cannot be accomplished at + will, but requires the attacker to invest in + some measurable amount of effort in preparation + or execution against the vulnerable component + before a successful attack can be expected. + """ + ATTACK_COMPLEXITY_UNSPECIFIED = 0 + ATTACK_COMPLEXITY_LOW = 1 + ATTACK_COMPLEXITY_HIGH = 2 + + class PrivilegesRequired(proto.Enum): + r"""This metric describes the level of privileges an attacker + must possess before successfully exploiting the vulnerability. + + Values: + PRIVILEGES_REQUIRED_UNSPECIFIED (0): + Invalid value. + PRIVILEGES_REQUIRED_NONE (1): + The attacker is unauthorized prior to attack, + and therefore does not require any access to + settings or files of the vulnerable system to + carry out an attack. + PRIVILEGES_REQUIRED_LOW (2): + The attacker requires privileges that provide + basic user capabilities that could normally + affect only settings and files owned by a user. + Alternatively, an attacker with Low privileges + has the ability to access only non-sensitive + resources. + PRIVILEGES_REQUIRED_HIGH (3): + The attacker requires privileges that provide + significant (e.g., administrative) control over + the vulnerable component allowing access to + component-wide settings and files. + """ + PRIVILEGES_REQUIRED_UNSPECIFIED = 0 + PRIVILEGES_REQUIRED_NONE = 1 + PRIVILEGES_REQUIRED_LOW = 2 + PRIVILEGES_REQUIRED_HIGH = 3 + + class UserInteraction(proto.Enum): + r"""This metric captures the requirement for a human user, other + than the attacker, to participate in the successful compromise + of the vulnerable component. + + Values: + USER_INTERACTION_UNSPECIFIED (0): + Invalid value. + USER_INTERACTION_NONE (1): + The vulnerable system can be exploited + without interaction from any user. + USER_INTERACTION_REQUIRED (2): + Successful exploitation of this vulnerability + requires a user to take some action before the + vulnerability can be exploited. + """ + USER_INTERACTION_UNSPECIFIED = 0 + USER_INTERACTION_NONE = 1 + USER_INTERACTION_REQUIRED = 2 + + class Scope(proto.Enum): + r"""The Scope metric captures whether a vulnerability in one + vulnerable component impacts resources in components beyond its + security scope. + + Values: + SCOPE_UNSPECIFIED (0): + Invalid value. + SCOPE_UNCHANGED (1): + An exploited vulnerability can only affect + resources managed by the same security + authority. + SCOPE_CHANGED (2): + An exploited vulnerability can affect + resources beyond the security scope managed by + the security authority of the vulnerable + component. + """ + SCOPE_UNSPECIFIED = 0 + SCOPE_UNCHANGED = 1 + SCOPE_CHANGED = 2 + + class Impact(proto.Enum): + r"""The Impact metrics capture the effects of a successfully + exploited vulnerability on the component that suffers the worst + outcome that is most directly and predictably associated with + the attack. + + Values: + IMPACT_UNSPECIFIED (0): + Invalid value. + IMPACT_HIGH (1): + High impact. + IMPACT_LOW (2): + Low impact. + IMPACT_NONE (3): + No impact. + """ + IMPACT_UNSPECIFIED = 0 + IMPACT_HIGH = 1 + IMPACT_LOW = 2 + IMPACT_NONE = 3 + + base_score: float = proto.Field( + proto.FLOAT, + number=1, + ) + exploitability_score: float = proto.Field( + proto.FLOAT, + number=2, + ) + impact_score: float = proto.Field( + proto.FLOAT, + number=3, + ) + attack_vector: AttackVector = proto.Field( + proto.ENUM, + number=5, + enum=AttackVector, + ) + attack_complexity: AttackComplexity = proto.Field( + proto.ENUM, + number=6, + enum=AttackComplexity, + ) + privileges_required: PrivilegesRequired = proto.Field( + proto.ENUM, + number=7, + enum=PrivilegesRequired, + ) + user_interaction: UserInteraction = proto.Field( + proto.ENUM, + number=8, + enum=UserInteraction, + ) + scope: Scope = proto.Field( + proto.ENUM, + number=9, + enum=Scope, + ) + confidentiality_impact: Impact = proto.Field( + proto.ENUM, + number=10, + enum=Impact, + ) + integrity_impact: Impact = proto.Field( + proto.ENUM, + number=11, + enum=Impact, + ) + availability_impact: Impact = proto.Field( + proto.ENUM, + number=12, + enum=Impact, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-os-config/mypy.ini b/packages/google-cloud-os-config/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-os-config/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-os-config/noxfile.py b/packages/google-cloud-os-config/noxfile.py new file mode 100644 index 000000000000..be54712bfa8f --- /dev/null +++ b/packages/google-cloud-os-config/noxfile.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-os-config/renovate.json b/packages/google-cloud-os-config/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-os-config/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-os-config/samples/AUTHORING_GUIDE.md b/packages/google-cloud-os-config/samples/AUTHORING_GUIDE.md new file mode 100644 index 000000000000..55c97b32f4c1 --- /dev/null +++ b/packages/google-cloud-os-config/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/google-cloud-os-config/samples/CONTRIBUTING.md b/packages/google-cloud-os-config/samples/CONTRIBUTING.md new file mode 100644 index 000000000000..34c882b6f1a3 --- /dev/null +++ b/packages/google-cloud-os-config/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/packages/google-cloud-os-config/scripts/decrypt-secrets.sh b/packages/google-cloud-os-config/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-os-config/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-os-config/scripts/fixup_keywords.py b/packages/google-cloud-os-config/scripts/fixup_keywords.py new file mode 100644 index 000000000000..45b27c52d4cb --- /dev/null +++ b/packages/google-cloud-os-config/scripts/fixup_keywords.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class osconfigCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'cancel_patch_job': ('name', ), + 'create_patch_deployment': ('parent', 'patch_deployment_id', 'patch_deployment', ), + 'delete_patch_deployment': ('name', ), + 'execute_patch_job': ('parent', 'instance_filter', 'description', 'patch_config', 'duration', 'dry_run', 'display_name', ), + 'get_patch_deployment': ('name', ), + 'get_patch_job': ('name', ), + 'list_patch_deployments': ('parent', 'page_size', 'page_token', ), + 'list_patch_job_instance_details': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_patch_jobs': ('parent', 'page_size', 'page_token', 'filter', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=osconfigCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the osconfig client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-os-config/scripts/fixup_osconfig_v1_keywords.py b/packages/google-cloud-os-config/scripts/fixup_osconfig_v1_keywords.py new file mode 100644 index 000000000000..bf31df5c4554 --- /dev/null +++ b/packages/google-cloud-os-config/scripts/fixup_osconfig_v1_keywords.py @@ -0,0 +1,199 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class osconfigCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'cancel_patch_job': ('name', ), + 'create_os_policy_assignment': ('parent', 'os_policy_assignment', 'os_policy_assignment_id', ), + 'create_patch_deployment': ('parent', 'patch_deployment_id', 'patch_deployment', ), + 'delete_os_policy_assignment': ('name', ), + 'delete_patch_deployment': ('name', ), + 'execute_patch_job': ('parent', 'instance_filter', 'description', 'patch_config', 'duration', 'dry_run', 'display_name', 'rollout', ), + 'get_inventory': ('name', 'view', ), + 'get_os_policy_assignment': ('name', ), + 'get_os_policy_assignment_report': ('name', ), + 'get_patch_deployment': ('name', ), + 'get_patch_job': ('name', ), + 'get_vulnerability_report': ('name', ), + 'list_inventories': ('parent', 'view', 'page_size', 'page_token', 'filter', ), + 'list_os_policy_assignment_reports': ('parent', 'page_size', 'filter', 'page_token', ), + 'list_os_policy_assignment_revisions': ('name', 'page_size', 'page_token', ), + 'list_os_policy_assignments': ('parent', 'page_size', 'page_token', ), + 'list_patch_deployments': ('parent', 'page_size', 'page_token', ), + 'list_patch_job_instance_details': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_patch_jobs': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_vulnerability_reports': ('parent', 'page_size', 'page_token', 'filter', ), + 'pause_patch_deployment': ('name', ), + 'resume_patch_deployment': ('name', ), + 'update_os_policy_assignment': ('os_policy_assignment', 'update_mask', ), + 'update_patch_deployment': ('patch_deployment', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=osconfigCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the osconfig client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-os-config/scripts/fixup_osconfig_v1alpha_keywords.py b/packages/google-cloud-os-config/scripts/fixup_osconfig_v1alpha_keywords.py new file mode 100644 index 000000000000..4fcc9d04bf17 --- /dev/null +++ b/packages/google-cloud-os-config/scripts/fixup_osconfig_v1alpha_keywords.py @@ -0,0 +1,189 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class osconfigCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_os_policy_assignment': ('parent', 'os_policy_assignment', 'os_policy_assignment_id', ), + 'delete_os_policy_assignment': ('name', ), + 'get_instance_os_policies_compliance': ('name', ), + 'get_inventory': ('name', 'view', ), + 'get_os_policy_assignment': ('name', ), + 'get_os_policy_assignment_report': ('name', ), + 'get_vulnerability_report': ('name', ), + 'list_instance_os_policies_compliances': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_inventories': ('parent', 'view', 'page_size', 'page_token', 'filter', ), + 'list_os_policy_assignment_reports': ('parent', 'page_size', 'filter', 'page_token', ), + 'list_os_policy_assignment_revisions': ('name', 'page_size', 'page_token', ), + 'list_os_policy_assignments': ('parent', 'page_size', 'page_token', ), + 'list_vulnerability_reports': ('parent', 'page_size', 'page_token', 'filter', ), + 'update_os_policy_assignment': ('os_policy_assignment', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=osconfigCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the osconfig client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-os-config/scripts/readme-gen/readme_gen.py b/packages/google-cloud-os-config/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..1acc119835b5 --- /dev/null +++ b/packages/google-cloud-os-config/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-os-config/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-os-config/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-os-config/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-os-config/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-os-config/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-os-config/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-os-config/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-os-config/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-os-config/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-os-config/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-os-config/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-os-config/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-os-config/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-os-config/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-os-config/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-os-config/setup.cfg b/packages/google-cloud-os-config/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-os-config/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-os-config/setup.py b/packages/google-cloud-os-config/setup.py new file mode 100644 index 000000000000..1f618ba44ec4 --- /dev/null +++ b/packages/google-cloud-os-config/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-os-config" + + +description = "Google Cloud Os Config API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/osconfig/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-os-config/testing/.gitignore b/packages/google-cloud-os-config/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-os-config/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-os-config/testing/constraints-3.10.txt b/packages/google-cloud-os-config/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-os-config/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-os-config/testing/constraints-3.11.txt b/packages/google-cloud-os-config/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-os-config/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-os-config/testing/constraints-3.12.txt b/packages/google-cloud-os-config/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-os-config/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-os-config/testing/constraints-3.7.txt b/packages/google-cloud-os-config/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-os-config/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-os-config/testing/constraints-3.8.txt b/packages/google-cloud-os-config/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-os-config/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-os-config/testing/constraints-3.9.txt b/packages/google-cloud-os-config/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-os-config/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-os-config/tests/__init__.py b/packages/google-cloud-os-config/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-os-config/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-os-config/tests/system/__init__.py b/packages/google-cloud-os-config/tests/system/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-os-config/tests/system/smoke_test.py b/packages/google-cloud-os-config/tests/system/smoke_test.py new file mode 100644 index 000000000000..e617c84f3f26 --- /dev/null +++ b/packages/google-cloud-os-config/tests/system/smoke_test.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + +from google.cloud import osconfig_v1 + + +@pytest.fixture(scope="session") +def project_id(): + return os.environ["PROJECT_ID"] + + +@pytest.mark.parametrize("transport", ["grpc", "rest"]) +def test_list_patch_jobs(project_id: str, transport: str): + client = osconfig_v1.OsConfigServiceClient(transport=transport) + + parent = client.common_project_path(project_id) + client.list_patch_jobs(parent=parent) + + # The purpose of this smoke test is to test the communication with the API server, + # rather than API-specific functionality. + # If the smoke test fails, we won't reach this line. + assert True diff --git a/packages/google-cloud-os-config/tests/unit/__init__.py b/packages/google-cloud-os-config/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-os-config/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-os-config/tests/unit/gapic/__init__.py b/packages/google-cloud-os-config/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-os-config/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/__init__.py b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_service.py b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_service.py new file mode 100644 index 000000000000..5129b1a2846e --- /dev/null +++ b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_service.py @@ -0,0 +1,8851 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import datetime_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.osconfig_v1.services.os_config_service import ( + OsConfigServiceAsyncClient, + OsConfigServiceClient, + pagers, + transports, +) +from google.cloud.osconfig_v1.types import ( + osconfig_common, + patch_deployments, + patch_jobs, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert OsConfigServiceClient._get_default_mtls_endpoint(None) is None + assert ( + OsConfigServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + OsConfigServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + OsConfigServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OsConfigServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OsConfigServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OsConfigServiceClient, "grpc"), + (OsConfigServiceAsyncClient, "grpc_asyncio"), + (OsConfigServiceClient, "rest"), + ], +) +def test_os_config_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "osconfig.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://osconfig.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.OsConfigServiceGrpcTransport, "grpc"), + (transports.OsConfigServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.OsConfigServiceRestTransport, "rest"), + ], +) +def test_os_config_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OsConfigServiceClient, "grpc"), + (OsConfigServiceAsyncClient, "grpc_asyncio"), + (OsConfigServiceClient, "rest"), + ], +) +def test_os_config_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "osconfig.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://osconfig.googleapis.com" + ) + + +def test_os_config_service_client_get_transport_class(): + transport = OsConfigServiceClient.get_transport_class() + available_transports = [ + transports.OsConfigServiceGrpcTransport, + transports.OsConfigServiceRestTransport, + ] + assert transport in available_transports + + transport = OsConfigServiceClient.get_transport_class("grpc") + assert transport == transports.OsConfigServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (OsConfigServiceClient, transports.OsConfigServiceGrpcTransport, "grpc"), + ( + OsConfigServiceAsyncClient, + transports.OsConfigServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (OsConfigServiceClient, transports.OsConfigServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + OsConfigServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigServiceClient), +) +@mock.patch.object( + OsConfigServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigServiceAsyncClient), +) +def test_os_config_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(OsConfigServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(OsConfigServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + OsConfigServiceClient, + transports.OsConfigServiceGrpcTransport, + "grpc", + "true", + ), + ( + OsConfigServiceAsyncClient, + transports.OsConfigServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + OsConfigServiceClient, + transports.OsConfigServiceGrpcTransport, + "grpc", + "false", + ), + ( + OsConfigServiceAsyncClient, + transports.OsConfigServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + OsConfigServiceClient, + transports.OsConfigServiceRestTransport, + "rest", + "true", + ), + ( + OsConfigServiceClient, + transports.OsConfigServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + OsConfigServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigServiceClient), +) +@mock.patch.object( + OsConfigServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_os_config_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [OsConfigServiceClient, OsConfigServiceAsyncClient] +) +@mock.patch.object( + OsConfigServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigServiceClient), +) +@mock.patch.object( + OsConfigServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigServiceAsyncClient), +) +def test_os_config_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (OsConfigServiceClient, transports.OsConfigServiceGrpcTransport, "grpc"), + ( + OsConfigServiceAsyncClient, + transports.OsConfigServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (OsConfigServiceClient, transports.OsConfigServiceRestTransport, "rest"), + ], +) +def test_os_config_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + OsConfigServiceClient, + transports.OsConfigServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + OsConfigServiceAsyncClient, + transports.OsConfigServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (OsConfigServiceClient, transports.OsConfigServiceRestTransport, "rest", None), + ], +) +def test_os_config_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_os_config_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.osconfig_v1.services.os_config_service.transports.OsConfigServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = OsConfigServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + OsConfigServiceClient, + transports.OsConfigServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + OsConfigServiceAsyncClient, + transports.OsConfigServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_os_config_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "osconfig.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="osconfig.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_jobs.ExecutePatchJobRequest, + dict, + ], +) +def test_execute_patch_job(request_type, transport: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_patch_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_jobs.PatchJob( + name="name_value", + display_name="display_name_value", + description="description_value", + state=patch_jobs.PatchJob.State.STARTED, + dry_run=True, + error_message="error_message_value", + percent_complete=0.1705, + patch_deployment="patch_deployment_value", + ) + response = client.execute_patch_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.ExecutePatchJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_jobs.PatchJob) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == patch_jobs.PatchJob.State.STARTED + assert response.dry_run is True + assert response.error_message == "error_message_value" + assert math.isclose(response.percent_complete, 0.1705, rel_tol=1e-6) + assert response.patch_deployment == "patch_deployment_value" + + +def test_execute_patch_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_patch_job), "__call__" + ) as call: + client.execute_patch_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.ExecutePatchJobRequest() + + +@pytest.mark.asyncio +async def test_execute_patch_job_async( + transport: str = "grpc_asyncio", request_type=patch_jobs.ExecutePatchJobRequest +): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_patch_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_jobs.PatchJob( + name="name_value", + display_name="display_name_value", + description="description_value", + state=patch_jobs.PatchJob.State.STARTED, + dry_run=True, + error_message="error_message_value", + percent_complete=0.1705, + patch_deployment="patch_deployment_value", + ) + ) + response = await client.execute_patch_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.ExecutePatchJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_jobs.PatchJob) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == patch_jobs.PatchJob.State.STARTED + assert response.dry_run is True + assert response.error_message == "error_message_value" + assert math.isclose(response.percent_complete, 0.1705, rel_tol=1e-6) + assert response.patch_deployment == "patch_deployment_value" + + +@pytest.mark.asyncio +async def test_execute_patch_job_async_from_dict(): + await test_execute_patch_job_async(request_type=dict) + + +def test_execute_patch_job_field_headers(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_jobs.ExecutePatchJobRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_patch_job), "__call__" + ) as call: + call.return_value = patch_jobs.PatchJob() + client.execute_patch_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_execute_patch_job_field_headers_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_jobs.ExecutePatchJobRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_patch_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(patch_jobs.PatchJob()) + await client.execute_patch_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + patch_jobs.GetPatchJobRequest, + dict, + ], +) +def test_get_patch_job(request_type, transport: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_patch_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = patch_jobs.PatchJob( + name="name_value", + display_name="display_name_value", + description="description_value", + state=patch_jobs.PatchJob.State.STARTED, + dry_run=True, + error_message="error_message_value", + percent_complete=0.1705, + patch_deployment="patch_deployment_value", + ) + response = client.get_patch_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.GetPatchJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_jobs.PatchJob) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == patch_jobs.PatchJob.State.STARTED + assert response.dry_run is True + assert response.error_message == "error_message_value" + assert math.isclose(response.percent_complete, 0.1705, rel_tol=1e-6) + assert response.patch_deployment == "patch_deployment_value" + + +def test_get_patch_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_patch_job), "__call__") as call: + client.get_patch_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.GetPatchJobRequest() + + +@pytest.mark.asyncio +async def test_get_patch_job_async( + transport: str = "grpc_asyncio", request_type=patch_jobs.GetPatchJobRequest +): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_patch_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_jobs.PatchJob( + name="name_value", + display_name="display_name_value", + description="description_value", + state=patch_jobs.PatchJob.State.STARTED, + dry_run=True, + error_message="error_message_value", + percent_complete=0.1705, + patch_deployment="patch_deployment_value", + ) + ) + response = await client.get_patch_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.GetPatchJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_jobs.PatchJob) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == patch_jobs.PatchJob.State.STARTED + assert response.dry_run is True + assert response.error_message == "error_message_value" + assert math.isclose(response.percent_complete, 0.1705, rel_tol=1e-6) + assert response.patch_deployment == "patch_deployment_value" + + +@pytest.mark.asyncio +async def test_get_patch_job_async_from_dict(): + await test_get_patch_job_async(request_type=dict) + + +def test_get_patch_job_field_headers(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_jobs.GetPatchJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_patch_job), "__call__") as call: + call.return_value = patch_jobs.PatchJob() + client.get_patch_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_patch_job_field_headers_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_jobs.GetPatchJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_patch_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(patch_jobs.PatchJob()) + await client.get_patch_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_patch_job_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_patch_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = patch_jobs.PatchJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_patch_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_patch_job_flattened_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_patch_job( + patch_jobs.GetPatchJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_patch_job_flattened_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_patch_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = patch_jobs.PatchJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(patch_jobs.PatchJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_patch_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_patch_job_flattened_error_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_patch_job( + patch_jobs.GetPatchJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_jobs.CancelPatchJobRequest, + dict, + ], +) +def test_cancel_patch_job(request_type, transport: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_patch_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = patch_jobs.PatchJob( + name="name_value", + display_name="display_name_value", + description="description_value", + state=patch_jobs.PatchJob.State.STARTED, + dry_run=True, + error_message="error_message_value", + percent_complete=0.1705, + patch_deployment="patch_deployment_value", + ) + response = client.cancel_patch_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.CancelPatchJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_jobs.PatchJob) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == patch_jobs.PatchJob.State.STARTED + assert response.dry_run is True + assert response.error_message == "error_message_value" + assert math.isclose(response.percent_complete, 0.1705, rel_tol=1e-6) + assert response.patch_deployment == "patch_deployment_value" + + +def test_cancel_patch_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_patch_job), "__call__") as call: + client.cancel_patch_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.CancelPatchJobRequest() + + +@pytest.mark.asyncio +async def test_cancel_patch_job_async( + transport: str = "grpc_asyncio", request_type=patch_jobs.CancelPatchJobRequest +): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_patch_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_jobs.PatchJob( + name="name_value", + display_name="display_name_value", + description="description_value", + state=patch_jobs.PatchJob.State.STARTED, + dry_run=True, + error_message="error_message_value", + percent_complete=0.1705, + patch_deployment="patch_deployment_value", + ) + ) + response = await client.cancel_patch_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.CancelPatchJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_jobs.PatchJob) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == patch_jobs.PatchJob.State.STARTED + assert response.dry_run is True + assert response.error_message == "error_message_value" + assert math.isclose(response.percent_complete, 0.1705, rel_tol=1e-6) + assert response.patch_deployment == "patch_deployment_value" + + +@pytest.mark.asyncio +async def test_cancel_patch_job_async_from_dict(): + await test_cancel_patch_job_async(request_type=dict) + + +def test_cancel_patch_job_field_headers(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_jobs.CancelPatchJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_patch_job), "__call__") as call: + call.return_value = patch_jobs.PatchJob() + client.cancel_patch_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_patch_job_field_headers_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_jobs.CancelPatchJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_patch_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(patch_jobs.PatchJob()) + await client.cancel_patch_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + patch_jobs.ListPatchJobsRequest, + dict, + ], +) +def test_list_patch_jobs(request_type, transport: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_patch_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = patch_jobs.ListPatchJobsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_patch_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.ListPatchJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPatchJobsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_patch_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_patch_jobs), "__call__") as call: + client.list_patch_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.ListPatchJobsRequest() + + +@pytest.mark.asyncio +async def test_list_patch_jobs_async( + transport: str = "grpc_asyncio", request_type=patch_jobs.ListPatchJobsRequest +): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_patch_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_jobs.ListPatchJobsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_patch_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.ListPatchJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPatchJobsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_patch_jobs_async_from_dict(): + await test_list_patch_jobs_async(request_type=dict) + + +def test_list_patch_jobs_field_headers(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_jobs.ListPatchJobsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_patch_jobs), "__call__") as call: + call.return_value = patch_jobs.ListPatchJobsResponse() + client.list_patch_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_patch_jobs_field_headers_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_jobs.ListPatchJobsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_patch_jobs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_jobs.ListPatchJobsResponse() + ) + await client.list_patch_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_patch_jobs_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_patch_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = patch_jobs.ListPatchJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_patch_jobs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_patch_jobs_flattened_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_patch_jobs( + patch_jobs.ListPatchJobsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_patch_jobs_flattened_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_patch_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = patch_jobs.ListPatchJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_jobs.ListPatchJobsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_patch_jobs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_patch_jobs_flattened_error_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_patch_jobs( + patch_jobs.ListPatchJobsRequest(), + parent="parent_value", + ) + + +def test_list_patch_jobs_pager(transport_name: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_patch_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + ], + next_page_token="abc", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[], + next_page_token="def", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + ], + next_page_token="ghi", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_patch_jobs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, patch_jobs.PatchJob) for i in results) + + +def test_list_patch_jobs_pages(transport_name: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_patch_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + ], + next_page_token="abc", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[], + next_page_token="def", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + ], + next_page_token="ghi", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_patch_jobs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_patch_jobs_async_pager(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_jobs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + ], + next_page_token="abc", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[], + next_page_token="def", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + ], + next_page_token="ghi", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_patch_jobs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, patch_jobs.PatchJob) for i in responses) + + +@pytest.mark.asyncio +async def test_list_patch_jobs_async_pages(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_jobs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + ], + next_page_token="abc", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[], + next_page_token="def", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + ], + next_page_token="ghi", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_patch_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + patch_jobs.ListPatchJobInstanceDetailsRequest, + dict, + ], +) +def test_list_patch_job_instance_details(request_type, transport: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_job_instance_details), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_jobs.ListPatchJobInstanceDetailsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_patch_job_instance_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.ListPatchJobInstanceDetailsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPatchJobInstanceDetailsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_patch_job_instance_details_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_job_instance_details), "__call__" + ) as call: + client.list_patch_job_instance_details() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.ListPatchJobInstanceDetailsRequest() + + +@pytest.mark.asyncio +async def test_list_patch_job_instance_details_async( + transport: str = "grpc_asyncio", + request_type=patch_jobs.ListPatchJobInstanceDetailsRequest, +): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_job_instance_details), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_jobs.ListPatchJobInstanceDetailsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_patch_job_instance_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == patch_jobs.ListPatchJobInstanceDetailsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPatchJobInstanceDetailsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_patch_job_instance_details_async_from_dict(): + await test_list_patch_job_instance_details_async(request_type=dict) + + +def test_list_patch_job_instance_details_field_headers(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_jobs.ListPatchJobInstanceDetailsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_job_instance_details), "__call__" + ) as call: + call.return_value = patch_jobs.ListPatchJobInstanceDetailsResponse() + client.list_patch_job_instance_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_patch_job_instance_details_field_headers_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_jobs.ListPatchJobInstanceDetailsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_job_instance_details), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_jobs.ListPatchJobInstanceDetailsResponse() + ) + await client.list_patch_job_instance_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_patch_job_instance_details_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_job_instance_details), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_jobs.ListPatchJobInstanceDetailsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_patch_job_instance_details( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_patch_job_instance_details_flattened_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_patch_job_instance_details( + patch_jobs.ListPatchJobInstanceDetailsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_patch_job_instance_details_flattened_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_job_instance_details), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_jobs.ListPatchJobInstanceDetailsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_jobs.ListPatchJobInstanceDetailsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_patch_job_instance_details( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_patch_job_instance_details_flattened_error_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_patch_job_instance_details( + patch_jobs.ListPatchJobInstanceDetailsRequest(), + parent="parent_value", + ) + + +def test_list_patch_job_instance_details_pager(transport_name: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_job_instance_details), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + ], + next_page_token="abc", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[], + next_page_token="def", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + ], + next_page_token="ghi", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_patch_job_instance_details(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, patch_jobs.PatchJobInstanceDetails) for i in results) + + +def test_list_patch_job_instance_details_pages(transport_name: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_job_instance_details), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + ], + next_page_token="abc", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[], + next_page_token="def", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + ], + next_page_token="ghi", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + ], + ), + RuntimeError, + ) + pages = list(client.list_patch_job_instance_details(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_patch_job_instance_details_async_pager(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_job_instance_details), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + ], + next_page_token="abc", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[], + next_page_token="def", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + ], + next_page_token="ghi", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_patch_job_instance_details( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, patch_jobs.PatchJobInstanceDetails) for i in responses) + + +@pytest.mark.asyncio +async def test_list_patch_job_instance_details_async_pages(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_job_instance_details), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + ], + next_page_token="abc", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[], + next_page_token="def", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + ], + next_page_token="ghi", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_patch_job_instance_details(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + patch_deployments.CreatePatchDeploymentRequest, + dict, + ], +) +def test_create_patch_deployment(request_type, transport: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + response = client.create_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.CreatePatchDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +def test_create_patch_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_patch_deployment), "__call__" + ) as call: + client.create_patch_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.CreatePatchDeploymentRequest() + + +@pytest.mark.asyncio +async def test_create_patch_deployment_async( + transport: str = "grpc_asyncio", + request_type=patch_deployments.CreatePatchDeploymentRequest, +): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + ) + response = await client.create_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.CreatePatchDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +@pytest.mark.asyncio +async def test_create_patch_deployment_async_from_dict(): + await test_create_patch_deployment_async(request_type=dict) + + +def test_create_patch_deployment_field_headers(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_deployments.CreatePatchDeploymentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_patch_deployment), "__call__" + ) as call: + call.return_value = patch_deployments.PatchDeployment() + client.create_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_patch_deployment_field_headers_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_deployments.CreatePatchDeploymentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_patch_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment() + ) + await client.create_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_patch_deployment_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_patch_deployment( + parent="parent_value", + patch_deployment=patch_deployments.PatchDeployment(name="name_value"), + patch_deployment_id="patch_deployment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].patch_deployment + mock_val = patch_deployments.PatchDeployment(name="name_value") + assert arg == mock_val + arg = args[0].patch_deployment_id + mock_val = "patch_deployment_id_value" + assert arg == mock_val + + +def test_create_patch_deployment_flattened_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_patch_deployment( + patch_deployments.CreatePatchDeploymentRequest(), + parent="parent_value", + patch_deployment=patch_deployments.PatchDeployment(name="name_value"), + patch_deployment_id="patch_deployment_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_patch_deployment_flattened_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_patch_deployment( + parent="parent_value", + patch_deployment=patch_deployments.PatchDeployment(name="name_value"), + patch_deployment_id="patch_deployment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].patch_deployment + mock_val = patch_deployments.PatchDeployment(name="name_value") + assert arg == mock_val + arg = args[0].patch_deployment_id + mock_val = "patch_deployment_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_patch_deployment_flattened_error_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_patch_deployment( + patch_deployments.CreatePatchDeploymentRequest(), + parent="parent_value", + patch_deployment=patch_deployments.PatchDeployment(name="name_value"), + patch_deployment_id="patch_deployment_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_deployments.GetPatchDeploymentRequest, + dict, + ], +) +def test_get_patch_deployment(request_type, transport: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + response = client.get_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.GetPatchDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +def test_get_patch_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_patch_deployment), "__call__" + ) as call: + client.get_patch_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.GetPatchDeploymentRequest() + + +@pytest.mark.asyncio +async def test_get_patch_deployment_async( + transport: str = "grpc_asyncio", + request_type=patch_deployments.GetPatchDeploymentRequest, +): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + ) + response = await client.get_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.GetPatchDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +@pytest.mark.asyncio +async def test_get_patch_deployment_async_from_dict(): + await test_get_patch_deployment_async(request_type=dict) + + +def test_get_patch_deployment_field_headers(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_deployments.GetPatchDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_patch_deployment), "__call__" + ) as call: + call.return_value = patch_deployments.PatchDeployment() + client.get_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_patch_deployment_field_headers_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_deployments.GetPatchDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_patch_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment() + ) + await client.get_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_patch_deployment_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_patch_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_patch_deployment_flattened_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_patch_deployment( + patch_deployments.GetPatchDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_patch_deployment_flattened_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_patch_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_patch_deployment_flattened_error_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_patch_deployment( + patch_deployments.GetPatchDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_deployments.ListPatchDeploymentsRequest, + dict, + ], +) +def test_list_patch_deployments(request_type, transport: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_deployments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.ListPatchDeploymentsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_patch_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.ListPatchDeploymentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPatchDeploymentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_patch_deployments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_deployments), "__call__" + ) as call: + client.list_patch_deployments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.ListPatchDeploymentsRequest() + + +@pytest.mark.asyncio +async def test_list_patch_deployments_async( + transport: str = "grpc_asyncio", + request_type=patch_deployments.ListPatchDeploymentsRequest, +): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_deployments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.ListPatchDeploymentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_patch_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.ListPatchDeploymentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPatchDeploymentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_patch_deployments_async_from_dict(): + await test_list_patch_deployments_async(request_type=dict) + + +def test_list_patch_deployments_field_headers(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_deployments.ListPatchDeploymentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_deployments), "__call__" + ) as call: + call.return_value = patch_deployments.ListPatchDeploymentsResponse() + client.list_patch_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_patch_deployments_field_headers_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_deployments.ListPatchDeploymentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_deployments), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.ListPatchDeploymentsResponse() + ) + await client.list_patch_deployments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_patch_deployments_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_deployments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.ListPatchDeploymentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_patch_deployments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_patch_deployments_flattened_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_patch_deployments( + patch_deployments.ListPatchDeploymentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_patch_deployments_flattened_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_deployments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.ListPatchDeploymentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.ListPatchDeploymentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_patch_deployments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_patch_deployments_flattened_error_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_patch_deployments( + patch_deployments.ListPatchDeploymentsRequest(), + parent="parent_value", + ) + + +def test_list_patch_deployments_pager(transport_name: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_deployments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + ], + next_page_token="abc", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[], + next_page_token="def", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + ], + next_page_token="ghi", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_patch_deployments(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, patch_deployments.PatchDeployment) for i in results) + + +def test_list_patch_deployments_pages(transport_name: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_deployments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + ], + next_page_token="abc", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[], + next_page_token="def", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + ], + next_page_token="ghi", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_patch_deployments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_patch_deployments_async_pager(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_deployments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + ], + next_page_token="abc", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[], + next_page_token="def", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + ], + next_page_token="ghi", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_patch_deployments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, patch_deployments.PatchDeployment) for i in responses) + + +@pytest.mark.asyncio +async def test_list_patch_deployments_async_pages(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_deployments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + ], + next_page_token="abc", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[], + next_page_token="def", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + ], + next_page_token="ghi", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_patch_deployments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + patch_deployments.DeletePatchDeploymentRequest, + dict, + ], +) +def test_delete_patch_deployment(request_type, transport: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.DeletePatchDeploymentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_patch_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_patch_deployment), "__call__" + ) as call: + client.delete_patch_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.DeletePatchDeploymentRequest() + + +@pytest.mark.asyncio +async def test_delete_patch_deployment_async( + transport: str = "grpc_asyncio", + request_type=patch_deployments.DeletePatchDeploymentRequest, +): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.DeletePatchDeploymentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_patch_deployment_async_from_dict(): + await test_delete_patch_deployment_async(request_type=dict) + + +def test_delete_patch_deployment_field_headers(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_deployments.DeletePatchDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_patch_deployment), "__call__" + ) as call: + call.return_value = None + client.delete_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_patch_deployment_field_headers_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_deployments.DeletePatchDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_patch_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_patch_deployment_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_patch_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_patch_deployment_flattened_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_patch_deployment( + patch_deployments.DeletePatchDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_patch_deployment_flattened_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_patch_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_patch_deployment_flattened_error_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_patch_deployment( + patch_deployments.DeletePatchDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_deployments.UpdatePatchDeploymentRequest, + dict, + ], +) +def test_update_patch_deployment(request_type, transport: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + response = client.update_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.UpdatePatchDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +def test_update_patch_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_patch_deployment), "__call__" + ) as call: + client.update_patch_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.UpdatePatchDeploymentRequest() + + +@pytest.mark.asyncio +async def test_update_patch_deployment_async( + transport: str = "grpc_asyncio", + request_type=patch_deployments.UpdatePatchDeploymentRequest, +): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + ) + response = await client.update_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.UpdatePatchDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +@pytest.mark.asyncio +async def test_update_patch_deployment_async_from_dict(): + await test_update_patch_deployment_async(request_type=dict) + + +def test_update_patch_deployment_field_headers(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_deployments.UpdatePatchDeploymentRequest() + + request.patch_deployment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_patch_deployment), "__call__" + ) as call: + call.return_value = patch_deployments.PatchDeployment() + client.update_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "patch_deployment.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_patch_deployment_field_headers_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_deployments.UpdatePatchDeploymentRequest() + + request.patch_deployment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_patch_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment() + ) + await client.update_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "patch_deployment.name=name_value", + ) in kw["metadata"] + + +def test_update_patch_deployment_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_patch_deployment( + patch_deployment=patch_deployments.PatchDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].patch_deployment + mock_val = patch_deployments.PatchDeployment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_patch_deployment_flattened_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_patch_deployment( + patch_deployments.UpdatePatchDeploymentRequest(), + patch_deployment=patch_deployments.PatchDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_patch_deployment_flattened_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_patch_deployment( + patch_deployment=patch_deployments.PatchDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].patch_deployment + mock_val = patch_deployments.PatchDeployment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_patch_deployment_flattened_error_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_patch_deployment( + patch_deployments.UpdatePatchDeploymentRequest(), + patch_deployment=patch_deployments.PatchDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_deployments.PausePatchDeploymentRequest, + dict, + ], +) +def test_pause_patch_deployment(request_type, transport: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + response = client.pause_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.PausePatchDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +def test_pause_patch_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_patch_deployment), "__call__" + ) as call: + client.pause_patch_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.PausePatchDeploymentRequest() + + +@pytest.mark.asyncio +async def test_pause_patch_deployment_async( + transport: str = "grpc_asyncio", + request_type=patch_deployments.PausePatchDeploymentRequest, +): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + ) + response = await client.pause_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.PausePatchDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +@pytest.mark.asyncio +async def test_pause_patch_deployment_async_from_dict(): + await test_pause_patch_deployment_async(request_type=dict) + + +def test_pause_patch_deployment_field_headers(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_deployments.PausePatchDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_patch_deployment), "__call__" + ) as call: + call.return_value = patch_deployments.PatchDeployment() + client.pause_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_pause_patch_deployment_field_headers_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_deployments.PausePatchDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_patch_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment() + ) + await client.pause_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_pause_patch_deployment_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pause_patch_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_pause_patch_deployment_flattened_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_patch_deployment( + patch_deployments.PausePatchDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_pause_patch_deployment_flattened_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pause_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pause_patch_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_pause_patch_deployment_flattened_error_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pause_patch_deployment( + patch_deployments.PausePatchDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_deployments.ResumePatchDeploymentRequest, + dict, + ], +) +def test_resume_patch_deployment(request_type, transport: str = "grpc"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + response = client.resume_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.ResumePatchDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +def test_resume_patch_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_patch_deployment), "__call__" + ) as call: + client.resume_patch_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.ResumePatchDeploymentRequest() + + +@pytest.mark.asyncio +async def test_resume_patch_deployment_async( + transport: str = "grpc_asyncio", + request_type=patch_deployments.ResumePatchDeploymentRequest, +): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + ) + response = await client.resume_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == patch_deployments.ResumePatchDeploymentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +@pytest.mark.asyncio +async def test_resume_patch_deployment_async_from_dict(): + await test_resume_patch_deployment_async(request_type=dict) + + +def test_resume_patch_deployment_field_headers(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_deployments.ResumePatchDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_patch_deployment), "__call__" + ) as call: + call.return_value = patch_deployments.PatchDeployment() + client.resume_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_resume_patch_deployment_field_headers_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = patch_deployments.ResumePatchDeploymentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_patch_deployment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment() + ) + await client.resume_patch_deployment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_resume_patch_deployment_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resume_patch_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_resume_patch_deployment_flattened_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_patch_deployment( + patch_deployments.ResumePatchDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_resume_patch_deployment_flattened_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.resume_patch_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = patch_deployments.PatchDeployment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + patch_deployments.PatchDeployment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resume_patch_deployment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_resume_patch_deployment_flattened_error_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resume_patch_deployment( + patch_deployments.ResumePatchDeploymentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_jobs.ExecutePatchJobRequest, + dict, + ], +) +def test_execute_patch_job_rest(request_type): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_jobs.PatchJob( + name="name_value", + display_name="display_name_value", + description="description_value", + state=patch_jobs.PatchJob.State.STARTED, + dry_run=True, + error_message="error_message_value", + percent_complete=0.1705, + patch_deployment="patch_deployment_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_jobs.PatchJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.execute_patch_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_jobs.PatchJob) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == patch_jobs.PatchJob.State.STARTED + assert response.dry_run is True + assert response.error_message == "error_message_value" + assert math.isclose(response.percent_complete, 0.1705, rel_tol=1e-6) + assert response.patch_deployment == "patch_deployment_value" + + +def test_execute_patch_job_rest_required_fields( + request_type=patch_jobs.ExecutePatchJobRequest, +): + transport_class = transports.OsConfigServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).execute_patch_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).execute_patch_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = patch_jobs.PatchJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = patch_jobs.PatchJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.execute_patch_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_execute_patch_job_rest_unset_required_fields(): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.execute_patch_job._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "instanceFilter", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_execute_patch_job_rest_interceptors(null_interceptor): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigServiceRestInterceptor(), + ) + client = OsConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "post_execute_patch_job" + ) as post, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "pre_execute_patch_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = patch_jobs.ExecutePatchJobRequest.pb( + patch_jobs.ExecutePatchJobRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = patch_jobs.PatchJob.to_json(patch_jobs.PatchJob()) + + request = patch_jobs.ExecutePatchJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = patch_jobs.PatchJob() + + client.execute_patch_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_execute_patch_job_rest_bad_request( + transport: str = "rest", request_type=patch_jobs.ExecutePatchJobRequest +): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.execute_patch_job(request) + + +def test_execute_patch_job_rest_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_jobs.GetPatchJobRequest, + dict, + ], +) +def test_get_patch_job_rest(request_type): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/patchJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_jobs.PatchJob( + name="name_value", + display_name="display_name_value", + description="description_value", + state=patch_jobs.PatchJob.State.STARTED, + dry_run=True, + error_message="error_message_value", + percent_complete=0.1705, + patch_deployment="patch_deployment_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_jobs.PatchJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_patch_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_jobs.PatchJob) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == patch_jobs.PatchJob.State.STARTED + assert response.dry_run is True + assert response.error_message == "error_message_value" + assert math.isclose(response.percent_complete, 0.1705, rel_tol=1e-6) + assert response.patch_deployment == "patch_deployment_value" + + +def test_get_patch_job_rest_required_fields(request_type=patch_jobs.GetPatchJobRequest): + transport_class = transports.OsConfigServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_patch_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_patch_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = patch_jobs.PatchJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = patch_jobs.PatchJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_patch_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_patch_job_rest_unset_required_fields(): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_patch_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_patch_job_rest_interceptors(null_interceptor): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigServiceRestInterceptor(), + ) + client = OsConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "post_get_patch_job" + ) as post, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "pre_get_patch_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = patch_jobs.GetPatchJobRequest.pb(patch_jobs.GetPatchJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = patch_jobs.PatchJob.to_json(patch_jobs.PatchJob()) + + request = patch_jobs.GetPatchJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = patch_jobs.PatchJob() + + client.get_patch_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_patch_job_rest_bad_request( + transport: str = "rest", request_type=patch_jobs.GetPatchJobRequest +): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/patchJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_patch_job(request) + + +def test_get_patch_job_rest_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_jobs.PatchJob() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/patchJobs/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_jobs.PatchJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_patch_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/patchJobs/*}" % client.transport._host, args[1] + ) + + +def test_get_patch_job_rest_flattened_error(transport: str = "rest"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_patch_job( + patch_jobs.GetPatchJobRequest(), + name="name_value", + ) + + +def test_get_patch_job_rest_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_jobs.CancelPatchJobRequest, + dict, + ], +) +def test_cancel_patch_job_rest(request_type): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/patchJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_jobs.PatchJob( + name="name_value", + display_name="display_name_value", + description="description_value", + state=patch_jobs.PatchJob.State.STARTED, + dry_run=True, + error_message="error_message_value", + percent_complete=0.1705, + patch_deployment="patch_deployment_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_jobs.PatchJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.cancel_patch_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_jobs.PatchJob) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.state == patch_jobs.PatchJob.State.STARTED + assert response.dry_run is True + assert response.error_message == "error_message_value" + assert math.isclose(response.percent_complete, 0.1705, rel_tol=1e-6) + assert response.patch_deployment == "patch_deployment_value" + + +def test_cancel_patch_job_rest_required_fields( + request_type=patch_jobs.CancelPatchJobRequest, +): + transport_class = transports.OsConfigServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_patch_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_patch_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = patch_jobs.PatchJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = patch_jobs.PatchJob.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_patch_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_cancel_patch_job_rest_unset_required_fields(): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.cancel_patch_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_patch_job_rest_interceptors(null_interceptor): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigServiceRestInterceptor(), + ) + client = OsConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "post_cancel_patch_job" + ) as post, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "pre_cancel_patch_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = patch_jobs.CancelPatchJobRequest.pb( + patch_jobs.CancelPatchJobRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = patch_jobs.PatchJob.to_json(patch_jobs.PatchJob()) + + request = patch_jobs.CancelPatchJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = patch_jobs.PatchJob() + + client.cancel_patch_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_cancel_patch_job_rest_bad_request( + transport: str = "rest", request_type=patch_jobs.CancelPatchJobRequest +): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/patchJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_patch_job(request) + + +def test_cancel_patch_job_rest_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_jobs.ListPatchJobsRequest, + dict, + ], +) +def test_list_patch_jobs_rest(request_type): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_jobs.ListPatchJobsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_jobs.ListPatchJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_patch_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPatchJobsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_patch_jobs_rest_required_fields( + request_type=patch_jobs.ListPatchJobsRequest, +): + transport_class = transports.OsConfigServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_patch_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_patch_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = patch_jobs.ListPatchJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = patch_jobs.ListPatchJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_patch_jobs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_patch_jobs_rest_unset_required_fields(): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_patch_jobs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_patch_jobs_rest_interceptors(null_interceptor): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigServiceRestInterceptor(), + ) + client = OsConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "post_list_patch_jobs" + ) as post, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "pre_list_patch_jobs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = patch_jobs.ListPatchJobsRequest.pb( + patch_jobs.ListPatchJobsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = patch_jobs.ListPatchJobsResponse.to_json( + patch_jobs.ListPatchJobsResponse() + ) + + request = patch_jobs.ListPatchJobsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = patch_jobs.ListPatchJobsResponse() + + client.list_patch_jobs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_patch_jobs_rest_bad_request( + transport: str = "rest", request_type=patch_jobs.ListPatchJobsRequest +): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_patch_jobs(request) + + +def test_list_patch_jobs_rest_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_jobs.ListPatchJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_jobs.ListPatchJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_patch_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*}/patchJobs" % client.transport._host, args[1] + ) + + +def test_list_patch_jobs_rest_flattened_error(transport: str = "rest"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_patch_jobs( + patch_jobs.ListPatchJobsRequest(), + parent="parent_value", + ) + + +def test_list_patch_jobs_rest_pager(transport: str = "rest"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + ], + next_page_token="abc", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[], + next_page_token="def", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + ], + next_page_token="ghi", + ), + patch_jobs.ListPatchJobsResponse( + patch_jobs=[ + patch_jobs.PatchJob(), + patch_jobs.PatchJob(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(patch_jobs.ListPatchJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.list_patch_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, patch_jobs.PatchJob) for i in results) + + pages = list(client.list_patch_jobs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + patch_jobs.ListPatchJobInstanceDetailsRequest, + dict, + ], +) +def test_list_patch_job_instance_details_rest(request_type): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/patchJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_jobs.ListPatchJobInstanceDetailsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_jobs.ListPatchJobInstanceDetailsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_patch_job_instance_details(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPatchJobInstanceDetailsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_patch_job_instance_details_rest_required_fields( + request_type=patch_jobs.ListPatchJobInstanceDetailsRequest, +): + transport_class = transports.OsConfigServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_patch_job_instance_details._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_patch_job_instance_details._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = patch_jobs.ListPatchJobInstanceDetailsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = patch_jobs.ListPatchJobInstanceDetailsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_patch_job_instance_details(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_patch_job_instance_details_rest_unset_required_fields(): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_patch_job_instance_details._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_patch_job_instance_details_rest_interceptors(null_interceptor): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigServiceRestInterceptor(), + ) + client = OsConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigServiceRestInterceptor, + "post_list_patch_job_instance_details", + ) as post, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "pre_list_patch_job_instance_details" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = patch_jobs.ListPatchJobInstanceDetailsRequest.pb( + patch_jobs.ListPatchJobInstanceDetailsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + patch_jobs.ListPatchJobInstanceDetailsResponse.to_json( + patch_jobs.ListPatchJobInstanceDetailsResponse() + ) + ) + + request = patch_jobs.ListPatchJobInstanceDetailsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = patch_jobs.ListPatchJobInstanceDetailsResponse() + + client.list_patch_job_instance_details( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_patch_job_instance_details_rest_bad_request( + transport: str = "rest", request_type=patch_jobs.ListPatchJobInstanceDetailsRequest +): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/patchJobs/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_patch_job_instance_details(request) + + +def test_list_patch_job_instance_details_rest_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_jobs.ListPatchJobInstanceDetailsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/patchJobs/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_jobs.ListPatchJobInstanceDetailsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_patch_job_instance_details(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/patchJobs/*}/instanceDetails" + % client.transport._host, + args[1], + ) + + +def test_list_patch_job_instance_details_rest_flattened_error(transport: str = "rest"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_patch_job_instance_details( + patch_jobs.ListPatchJobInstanceDetailsRequest(), + parent="parent_value", + ) + + +def test_list_patch_job_instance_details_rest_pager(transport: str = "rest"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + ], + next_page_token="abc", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[], + next_page_token="def", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + ], + next_page_token="ghi", + ), + patch_jobs.ListPatchJobInstanceDetailsResponse( + patch_job_instance_details=[ + patch_jobs.PatchJobInstanceDetails(), + patch_jobs.PatchJobInstanceDetails(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + patch_jobs.ListPatchJobInstanceDetailsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/patchJobs/sample2"} + + pager = client.list_patch_job_instance_details(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, patch_jobs.PatchJobInstanceDetails) for i in results) + + pages = list( + client.list_patch_job_instance_details(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + patch_deployments.CreatePatchDeploymentRequest, + dict, + ], +) +def test_create_patch_deployment_rest(request_type): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["patch_deployment"] = { + "name": "name_value", + "description": "description_value", + "instance_filter": { + "all_": True, + "group_labels": [{"labels": {}}], + "zones": ["zones_value1", "zones_value2"], + "instances": ["instances_value1", "instances_value2"], + "instance_name_prefixes": [ + "instance_name_prefixes_value1", + "instance_name_prefixes_value2", + ], + }, + "patch_config": { + "reboot_config": 1, + "apt": { + "type_": 1, + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_packages": [ + "exclusive_packages_value1", + "exclusive_packages_value2", + ], + }, + "yum": { + "security": True, + "minimal": True, + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_packages": [ + "exclusive_packages_value1", + "exclusive_packages_value2", + ], + }, + "goo": {}, + "zypper": { + "with_optional": True, + "with_update": True, + "categories": ["categories_value1", "categories_value2"], + "severities": ["severities_value1", "severities_value2"], + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_patches": [ + "exclusive_patches_value1", + "exclusive_patches_value2", + ], + }, + "windows_update": { + "classifications": [1], + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_patches": [ + "exclusive_patches_value1", + "exclusive_patches_value2", + ], + }, + "pre_step": { + "linux_exec_step_config": { + "local_path": "local_path_value", + "gcs_object": { + "bucket": "bucket_value", + "object_": "object__value", + "generation_number": 1812, + }, + "allowed_success_codes": [2222, 2223], + "interpreter": 1, + }, + "windows_exec_step_config": {}, + }, + "post_step": {}, + "mig_instances_allowed": True, + }, + "duration": {"seconds": 751, "nanos": 543}, + "one_time_schedule": {"execute_time": {"seconds": 751, "nanos": 543}}, + "recurring_schedule": { + "time_zone": {"id": "id_value", "version": "version_value"}, + "start_time": {}, + "end_time": {}, + "time_of_day": {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543}, + "frequency": 1, + "weekly": {"day_of_week": 1}, + "monthly": { + "week_day_of_month": { + "week_ordinal": 1268, + "day_of_week": 1, + "day_offset": 1060, + }, + "month_day": 963, + }, + "last_execute_time": {}, + "next_execute_time": {}, + }, + "create_time": {}, + "update_time": {}, + "last_execute_time": {}, + "rollout": {"mode": 1, "disruption_budget": {"fixed": 528, "percent": 753}}, + "state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_patch_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +def test_create_patch_deployment_rest_required_fields( + request_type=patch_deployments.CreatePatchDeploymentRequest, +): + transport_class = transports.OsConfigServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["patch_deployment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "patchDeploymentId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_patch_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "patchDeploymentId" in jsonified_request + assert jsonified_request["patchDeploymentId"] == request_init["patch_deployment_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["patchDeploymentId"] = "patch_deployment_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_patch_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("patch_deployment_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "patchDeploymentId" in jsonified_request + assert jsonified_request["patchDeploymentId"] == "patch_deployment_id_value" + + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_patch_deployment(request) + + expected_params = [ + ( + "patchDeploymentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_patch_deployment_rest_unset_required_fields(): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_patch_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("patchDeploymentId",)) + & set( + ( + "parent", + "patchDeploymentId", + "patchDeployment", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_patch_deployment_rest_interceptors(null_interceptor): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigServiceRestInterceptor(), + ) + client = OsConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "post_create_patch_deployment" + ) as post, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "pre_create_patch_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = patch_deployments.CreatePatchDeploymentRequest.pb( + patch_deployments.CreatePatchDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = patch_deployments.PatchDeployment.to_json( + patch_deployments.PatchDeployment() + ) + + request = patch_deployments.CreatePatchDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = patch_deployments.PatchDeployment() + + client.create_patch_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_patch_deployment_rest_bad_request( + transport: str = "rest", request_type=patch_deployments.CreatePatchDeploymentRequest +): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["patch_deployment"] = { + "name": "name_value", + "description": "description_value", + "instance_filter": { + "all_": True, + "group_labels": [{"labels": {}}], + "zones": ["zones_value1", "zones_value2"], + "instances": ["instances_value1", "instances_value2"], + "instance_name_prefixes": [ + "instance_name_prefixes_value1", + "instance_name_prefixes_value2", + ], + }, + "patch_config": { + "reboot_config": 1, + "apt": { + "type_": 1, + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_packages": [ + "exclusive_packages_value1", + "exclusive_packages_value2", + ], + }, + "yum": { + "security": True, + "minimal": True, + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_packages": [ + "exclusive_packages_value1", + "exclusive_packages_value2", + ], + }, + "goo": {}, + "zypper": { + "with_optional": True, + "with_update": True, + "categories": ["categories_value1", "categories_value2"], + "severities": ["severities_value1", "severities_value2"], + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_patches": [ + "exclusive_patches_value1", + "exclusive_patches_value2", + ], + }, + "windows_update": { + "classifications": [1], + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_patches": [ + "exclusive_patches_value1", + "exclusive_patches_value2", + ], + }, + "pre_step": { + "linux_exec_step_config": { + "local_path": "local_path_value", + "gcs_object": { + "bucket": "bucket_value", + "object_": "object__value", + "generation_number": 1812, + }, + "allowed_success_codes": [2222, 2223], + "interpreter": 1, + }, + "windows_exec_step_config": {}, + }, + "post_step": {}, + "mig_instances_allowed": True, + }, + "duration": {"seconds": 751, "nanos": 543}, + "one_time_schedule": {"execute_time": {"seconds": 751, "nanos": 543}}, + "recurring_schedule": { + "time_zone": {"id": "id_value", "version": "version_value"}, + "start_time": {}, + "end_time": {}, + "time_of_day": {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543}, + "frequency": 1, + "weekly": {"day_of_week": 1}, + "monthly": { + "week_day_of_month": { + "week_ordinal": 1268, + "day_of_week": 1, + "day_offset": 1060, + }, + "month_day": 963, + }, + "last_execute_time": {}, + "next_execute_time": {}, + }, + "create_time": {}, + "update_time": {}, + "last_execute_time": {}, + "rollout": {"mode": 1, "disruption_budget": {"fixed": 528, "percent": 753}}, + "state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_patch_deployment(request) + + +def test_create_patch_deployment_rest_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + patch_deployment=patch_deployments.PatchDeployment(name="name_value"), + patch_deployment_id="patch_deployment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_patch_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*}/patchDeployments" % client.transport._host, + args[1], + ) + + +def test_create_patch_deployment_rest_flattened_error(transport: str = "rest"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_patch_deployment( + patch_deployments.CreatePatchDeploymentRequest(), + parent="parent_value", + patch_deployment=patch_deployments.PatchDeployment(name="name_value"), + patch_deployment_id="patch_deployment_id_value", + ) + + +def test_create_patch_deployment_rest_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_deployments.GetPatchDeploymentRequest, + dict, + ], +) +def test_get_patch_deployment_rest(request_type): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/patchDeployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_patch_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +def test_get_patch_deployment_rest_required_fields( + request_type=patch_deployments.GetPatchDeploymentRequest, +): + transport_class = transports.OsConfigServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_patch_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_patch_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_patch_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_patch_deployment_rest_unset_required_fields(): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_patch_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_patch_deployment_rest_interceptors(null_interceptor): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigServiceRestInterceptor(), + ) + client = OsConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "post_get_patch_deployment" + ) as post, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "pre_get_patch_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = patch_deployments.GetPatchDeploymentRequest.pb( + patch_deployments.GetPatchDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = patch_deployments.PatchDeployment.to_json( + patch_deployments.PatchDeployment() + ) + + request = patch_deployments.GetPatchDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = patch_deployments.PatchDeployment() + + client.get_patch_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_patch_deployment_rest_bad_request( + transport: str = "rest", request_type=patch_deployments.GetPatchDeploymentRequest +): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/patchDeployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_patch_deployment(request) + + +def test_get_patch_deployment_rest_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/patchDeployments/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_patch_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/patchDeployments/*}" % client.transport._host, + args[1], + ) + + +def test_get_patch_deployment_rest_flattened_error(transport: str = "rest"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_patch_deployment( + patch_deployments.GetPatchDeploymentRequest(), + name="name_value", + ) + + +def test_get_patch_deployment_rest_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_deployments.ListPatchDeploymentsRequest, + dict, + ], +) +def test_list_patch_deployments_rest(request_type): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_deployments.ListPatchDeploymentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_deployments.ListPatchDeploymentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_patch_deployments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPatchDeploymentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_patch_deployments_rest_required_fields( + request_type=patch_deployments.ListPatchDeploymentsRequest, +): + transport_class = transports.OsConfigServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_patch_deployments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_patch_deployments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = patch_deployments.ListPatchDeploymentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = patch_deployments.ListPatchDeploymentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_patch_deployments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_patch_deployments_rest_unset_required_fields(): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_patch_deployments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_patch_deployments_rest_interceptors(null_interceptor): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigServiceRestInterceptor(), + ) + client = OsConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "post_list_patch_deployments" + ) as post, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "pre_list_patch_deployments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = patch_deployments.ListPatchDeploymentsRequest.pb( + patch_deployments.ListPatchDeploymentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + patch_deployments.ListPatchDeploymentsResponse.to_json( + patch_deployments.ListPatchDeploymentsResponse() + ) + ) + + request = patch_deployments.ListPatchDeploymentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = patch_deployments.ListPatchDeploymentsResponse() + + client.list_patch_deployments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_patch_deployments_rest_bad_request( + transport: str = "rest", request_type=patch_deployments.ListPatchDeploymentsRequest +): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_patch_deployments(request) + + +def test_list_patch_deployments_rest_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_deployments.ListPatchDeploymentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_deployments.ListPatchDeploymentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_patch_deployments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*}/patchDeployments" % client.transport._host, + args[1], + ) + + +def test_list_patch_deployments_rest_flattened_error(transport: str = "rest"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_patch_deployments( + patch_deployments.ListPatchDeploymentsRequest(), + parent="parent_value", + ) + + +def test_list_patch_deployments_rest_pager(transport: str = "rest"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + ], + next_page_token="abc", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[], + next_page_token="def", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + ], + next_page_token="ghi", + ), + patch_deployments.ListPatchDeploymentsResponse( + patch_deployments=[ + patch_deployments.PatchDeployment(), + patch_deployments.PatchDeployment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + patch_deployments.ListPatchDeploymentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.list_patch_deployments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, patch_deployments.PatchDeployment) for i in results) + + pages = list(client.list_patch_deployments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + patch_deployments.DeletePatchDeploymentRequest, + dict, + ], +) +def test_delete_patch_deployment_rest(request_type): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/patchDeployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_patch_deployment(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_patch_deployment_rest_required_fields( + request_type=patch_deployments.DeletePatchDeploymentRequest, +): + transport_class = transports.OsConfigServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_patch_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_patch_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_patch_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_patch_deployment_rest_unset_required_fields(): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_patch_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_patch_deployment_rest_interceptors(null_interceptor): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigServiceRestInterceptor(), + ) + client = OsConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "pre_delete_patch_deployment" + ) as pre: + pre.assert_not_called() + pb_message = patch_deployments.DeletePatchDeploymentRequest.pb( + patch_deployments.DeletePatchDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = patch_deployments.DeletePatchDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_patch_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_patch_deployment_rest_bad_request( + transport: str = "rest", request_type=patch_deployments.DeletePatchDeploymentRequest +): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/patchDeployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_patch_deployment(request) + + +def test_delete_patch_deployment_rest_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/patchDeployments/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_patch_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/patchDeployments/*}" % client.transport._host, + args[1], + ) + + +def test_delete_patch_deployment_rest_flattened_error(transport: str = "rest"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_patch_deployment( + patch_deployments.DeletePatchDeploymentRequest(), + name="name_value", + ) + + +def test_delete_patch_deployment_rest_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_deployments.UpdatePatchDeploymentRequest, + dict, + ], +) +def test_update_patch_deployment_rest(request_type): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "patch_deployment": {"name": "projects/sample1/patchDeployments/sample2"} + } + request_init["patch_deployment"] = { + "name": "projects/sample1/patchDeployments/sample2", + "description": "description_value", + "instance_filter": { + "all_": True, + "group_labels": [{"labels": {}}], + "zones": ["zones_value1", "zones_value2"], + "instances": ["instances_value1", "instances_value2"], + "instance_name_prefixes": [ + "instance_name_prefixes_value1", + "instance_name_prefixes_value2", + ], + }, + "patch_config": { + "reboot_config": 1, + "apt": { + "type_": 1, + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_packages": [ + "exclusive_packages_value1", + "exclusive_packages_value2", + ], + }, + "yum": { + "security": True, + "minimal": True, + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_packages": [ + "exclusive_packages_value1", + "exclusive_packages_value2", + ], + }, + "goo": {}, + "zypper": { + "with_optional": True, + "with_update": True, + "categories": ["categories_value1", "categories_value2"], + "severities": ["severities_value1", "severities_value2"], + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_patches": [ + "exclusive_patches_value1", + "exclusive_patches_value2", + ], + }, + "windows_update": { + "classifications": [1], + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_patches": [ + "exclusive_patches_value1", + "exclusive_patches_value2", + ], + }, + "pre_step": { + "linux_exec_step_config": { + "local_path": "local_path_value", + "gcs_object": { + "bucket": "bucket_value", + "object_": "object__value", + "generation_number": 1812, + }, + "allowed_success_codes": [2222, 2223], + "interpreter": 1, + }, + "windows_exec_step_config": {}, + }, + "post_step": {}, + "mig_instances_allowed": True, + }, + "duration": {"seconds": 751, "nanos": 543}, + "one_time_schedule": {"execute_time": {"seconds": 751, "nanos": 543}}, + "recurring_schedule": { + "time_zone": {"id": "id_value", "version": "version_value"}, + "start_time": {}, + "end_time": {}, + "time_of_day": {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543}, + "frequency": 1, + "weekly": {"day_of_week": 1}, + "monthly": { + "week_day_of_month": { + "week_ordinal": 1268, + "day_of_week": 1, + "day_offset": 1060, + }, + "month_day": 963, + }, + "last_execute_time": {}, + "next_execute_time": {}, + }, + "create_time": {}, + "update_time": {}, + "last_execute_time": {}, + "rollout": {"mode": 1, "disruption_budget": {"fixed": 528, "percent": 753}}, + "state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_patch_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +def test_update_patch_deployment_rest_required_fields( + request_type=patch_deployments.UpdatePatchDeploymentRequest, +): + transport_class = transports.OsConfigServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_patch_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_patch_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_patch_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_patch_deployment_rest_unset_required_fields(): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_patch_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("patchDeployment",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_patch_deployment_rest_interceptors(null_interceptor): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigServiceRestInterceptor(), + ) + client = OsConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "post_update_patch_deployment" + ) as post, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "pre_update_patch_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = patch_deployments.UpdatePatchDeploymentRequest.pb( + patch_deployments.UpdatePatchDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = patch_deployments.PatchDeployment.to_json( + patch_deployments.PatchDeployment() + ) + + request = patch_deployments.UpdatePatchDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = patch_deployments.PatchDeployment() + + client.update_patch_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_patch_deployment_rest_bad_request( + transport: str = "rest", request_type=patch_deployments.UpdatePatchDeploymentRequest +): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "patch_deployment": {"name": "projects/sample1/patchDeployments/sample2"} + } + request_init["patch_deployment"] = { + "name": "projects/sample1/patchDeployments/sample2", + "description": "description_value", + "instance_filter": { + "all_": True, + "group_labels": [{"labels": {}}], + "zones": ["zones_value1", "zones_value2"], + "instances": ["instances_value1", "instances_value2"], + "instance_name_prefixes": [ + "instance_name_prefixes_value1", + "instance_name_prefixes_value2", + ], + }, + "patch_config": { + "reboot_config": 1, + "apt": { + "type_": 1, + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_packages": [ + "exclusive_packages_value1", + "exclusive_packages_value2", + ], + }, + "yum": { + "security": True, + "minimal": True, + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_packages": [ + "exclusive_packages_value1", + "exclusive_packages_value2", + ], + }, + "goo": {}, + "zypper": { + "with_optional": True, + "with_update": True, + "categories": ["categories_value1", "categories_value2"], + "severities": ["severities_value1", "severities_value2"], + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_patches": [ + "exclusive_patches_value1", + "exclusive_patches_value2", + ], + }, + "windows_update": { + "classifications": [1], + "excludes": ["excludes_value1", "excludes_value2"], + "exclusive_patches": [ + "exclusive_patches_value1", + "exclusive_patches_value2", + ], + }, + "pre_step": { + "linux_exec_step_config": { + "local_path": "local_path_value", + "gcs_object": { + "bucket": "bucket_value", + "object_": "object__value", + "generation_number": 1812, + }, + "allowed_success_codes": [2222, 2223], + "interpreter": 1, + }, + "windows_exec_step_config": {}, + }, + "post_step": {}, + "mig_instances_allowed": True, + }, + "duration": {"seconds": 751, "nanos": 543}, + "one_time_schedule": {"execute_time": {"seconds": 751, "nanos": 543}}, + "recurring_schedule": { + "time_zone": {"id": "id_value", "version": "version_value"}, + "start_time": {}, + "end_time": {}, + "time_of_day": {"hours": 561, "minutes": 773, "seconds": 751, "nanos": 543}, + "frequency": 1, + "weekly": {"day_of_week": 1}, + "monthly": { + "week_day_of_month": { + "week_ordinal": 1268, + "day_of_week": 1, + "day_offset": 1060, + }, + "month_day": 963, + }, + "last_execute_time": {}, + "next_execute_time": {}, + }, + "create_time": {}, + "update_time": {}, + "last_execute_time": {}, + "rollout": {"mode": 1, "disruption_budget": {"fixed": 528, "percent": 753}}, + "state": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_patch_deployment(request) + + +def test_update_patch_deployment_rest_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "patch_deployment": {"name": "projects/sample1/patchDeployments/sample2"} + } + + # get truthy value for each flattened field + mock_args = dict( + patch_deployment=patch_deployments.PatchDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_patch_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{patch_deployment.name=projects/*/patchDeployments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_patch_deployment_rest_flattened_error(transport: str = "rest"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_patch_deployment( + patch_deployments.UpdatePatchDeploymentRequest(), + patch_deployment=patch_deployments.PatchDeployment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_patch_deployment_rest_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_deployments.PausePatchDeploymentRequest, + dict, + ], +) +def test_pause_patch_deployment_rest(request_type): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/patchDeployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.pause_patch_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +def test_pause_patch_deployment_rest_required_fields( + request_type=patch_deployments.PausePatchDeploymentRequest, +): + transport_class = transports.OsConfigServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pause_patch_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pause_patch_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.pause_patch_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_pause_patch_deployment_rest_unset_required_fields(): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.pause_patch_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_pause_patch_deployment_rest_interceptors(null_interceptor): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigServiceRestInterceptor(), + ) + client = OsConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "post_pause_patch_deployment" + ) as post, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "pre_pause_patch_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = patch_deployments.PausePatchDeploymentRequest.pb( + patch_deployments.PausePatchDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = patch_deployments.PatchDeployment.to_json( + patch_deployments.PatchDeployment() + ) + + request = patch_deployments.PausePatchDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = patch_deployments.PatchDeployment() + + client.pause_patch_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_pause_patch_deployment_rest_bad_request( + transport: str = "rest", request_type=patch_deployments.PausePatchDeploymentRequest +): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/patchDeployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.pause_patch_deployment(request) + + +def test_pause_patch_deployment_rest_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/patchDeployments/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.pause_patch_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/patchDeployments/*}:pause" % client.transport._host, + args[1], + ) + + +def test_pause_patch_deployment_rest_flattened_error(transport: str = "rest"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_patch_deployment( + patch_deployments.PausePatchDeploymentRequest(), + name="name_value", + ) + + +def test_pause_patch_deployment_rest_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + patch_deployments.ResumePatchDeploymentRequest, + dict, + ], +) +def test_resume_patch_deployment_rest(request_type): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/patchDeployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment( + name="name_value", + description="description_value", + state=patch_deployments.PatchDeployment.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.resume_patch_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, patch_deployments.PatchDeployment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == patch_deployments.PatchDeployment.State.ACTIVE + + +def test_resume_patch_deployment_rest_required_fields( + request_type=patch_deployments.ResumePatchDeploymentRequest, +): + transport_class = transports.OsConfigServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resume_patch_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resume_patch_deployment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.resume_patch_deployment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_resume_patch_deployment_rest_unset_required_fields(): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.resume_patch_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resume_patch_deployment_rest_interceptors(null_interceptor): + transport = transports.OsConfigServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigServiceRestInterceptor(), + ) + client = OsConfigServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "post_resume_patch_deployment" + ) as post, mock.patch.object( + transports.OsConfigServiceRestInterceptor, "pre_resume_patch_deployment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = patch_deployments.ResumePatchDeploymentRequest.pb( + patch_deployments.ResumePatchDeploymentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = patch_deployments.PatchDeployment.to_json( + patch_deployments.PatchDeployment() + ) + + request = patch_deployments.ResumePatchDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = patch_deployments.PatchDeployment() + + client.resume_patch_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resume_patch_deployment_rest_bad_request( + transport: str = "rest", request_type=patch_deployments.ResumePatchDeploymentRequest +): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/patchDeployments/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resume_patch_deployment(request) + + +def test_resume_patch_deployment_rest_flattened(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = patch_deployments.PatchDeployment() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/patchDeployments/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = patch_deployments.PatchDeployment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.resume_patch_deployment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/patchDeployments/*}:resume" + % client.transport._host, + args[1], + ) + + +def test_resume_patch_deployment_rest_flattened_error(transport: str = "rest"): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_patch_deployment( + patch_deployments.ResumePatchDeploymentRequest(), + name="name_value", + ) + + +def test_resume_patch_deployment_rest_error(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.OsConfigServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.OsConfigServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OsConfigServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.OsConfigServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OsConfigServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OsConfigServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.OsConfigServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OsConfigServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.OsConfigServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = OsConfigServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.OsConfigServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.OsConfigServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigServiceGrpcTransport, + transports.OsConfigServiceGrpcAsyncIOTransport, + transports.OsConfigServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = OsConfigServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.OsConfigServiceGrpcTransport, + ) + + +def test_os_config_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.OsConfigServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_os_config_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.osconfig_v1.services.os_config_service.transports.OsConfigServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.OsConfigServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "execute_patch_job", + "get_patch_job", + "cancel_patch_job", + "list_patch_jobs", + "list_patch_job_instance_details", + "create_patch_deployment", + "get_patch_deployment", + "list_patch_deployments", + "delete_patch_deployment", + "update_patch_deployment", + "pause_patch_deployment", + "resume_patch_deployment", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_os_config_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.osconfig_v1.services.os_config_service.transports.OsConfigServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OsConfigServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_os_config_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.osconfig_v1.services.os_config_service.transports.OsConfigServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OsConfigServiceTransport() + adc.assert_called_once() + + +def test_os_config_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + OsConfigServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigServiceGrpcTransport, + transports.OsConfigServiceGrpcAsyncIOTransport, + ], +) +def test_os_config_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigServiceGrpcTransport, + transports.OsConfigServiceGrpcAsyncIOTransport, + transports.OsConfigServiceRestTransport, + ], +) +def test_os_config_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.OsConfigServiceGrpcTransport, grpc_helpers), + (transports.OsConfigServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_os_config_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "osconfig.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="osconfig.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigServiceGrpcTransport, + transports.OsConfigServiceGrpcAsyncIOTransport, + ], +) +def test_os_config_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_os_config_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.OsConfigServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_os_config_service_host_no_port(transport_name): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="osconfig.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "osconfig.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://osconfig.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_os_config_service_host_with_port(transport_name): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="osconfig.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "osconfig.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://osconfig.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_os_config_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = OsConfigServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = OsConfigServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.execute_patch_job._session + session2 = client2.transport.execute_patch_job._session + assert session1 != session2 + session1 = client1.transport.get_patch_job._session + session2 = client2.transport.get_patch_job._session + assert session1 != session2 + session1 = client1.transport.cancel_patch_job._session + session2 = client2.transport.cancel_patch_job._session + assert session1 != session2 + session1 = client1.transport.list_patch_jobs._session + session2 = client2.transport.list_patch_jobs._session + assert session1 != session2 + session1 = client1.transport.list_patch_job_instance_details._session + session2 = client2.transport.list_patch_job_instance_details._session + assert session1 != session2 + session1 = client1.transport.create_patch_deployment._session + session2 = client2.transport.create_patch_deployment._session + assert session1 != session2 + session1 = client1.transport.get_patch_deployment._session + session2 = client2.transport.get_patch_deployment._session + assert session1 != session2 + session1 = client1.transport.list_patch_deployments._session + session2 = client2.transport.list_patch_deployments._session + assert session1 != session2 + session1 = client1.transport.delete_patch_deployment._session + session2 = client2.transport.delete_patch_deployment._session + assert session1 != session2 + session1 = client1.transport.update_patch_deployment._session + session2 = client2.transport.update_patch_deployment._session + assert session1 != session2 + session1 = client1.transport.pause_patch_deployment._session + session2 = client2.transport.pause_patch_deployment._session + assert session1 != session2 + session1 = client1.transport.resume_patch_deployment._session + session2 = client2.transport.resume_patch_deployment._session + assert session1 != session2 + + +def test_os_config_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.OsConfigServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_os_config_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.OsConfigServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigServiceGrpcTransport, + transports.OsConfigServiceGrpcAsyncIOTransport, + ], +) +def test_os_config_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigServiceGrpcTransport, + transports.OsConfigServiceGrpcAsyncIOTransport, + ], +) +def test_os_config_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_instance_path(): + project = "squid" + zone = "clam" + instance = "whelk" + expected = "projects/{project}/zones/{zone}/instances/{instance}".format( + project=project, + zone=zone, + instance=instance, + ) + actual = OsConfigServiceClient.instance_path(project, zone, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "octopus", + "zone": "oyster", + "instance": "nudibranch", + } + path = OsConfigServiceClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigServiceClient.parse_instance_path(path) + assert expected == actual + + +def test_patch_deployment_path(): + project = "cuttlefish" + patch_deployment = "mussel" + expected = "projects/{project}/patchDeployments/{patch_deployment}".format( + project=project, + patch_deployment=patch_deployment, + ) + actual = OsConfigServiceClient.patch_deployment_path(project, patch_deployment) + assert expected == actual + + +def test_parse_patch_deployment_path(): + expected = { + "project": "winkle", + "patch_deployment": "nautilus", + } + path = OsConfigServiceClient.patch_deployment_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigServiceClient.parse_patch_deployment_path(path) + assert expected == actual + + +def test_patch_job_path(): + project = "scallop" + patch_job = "abalone" + expected = "projects/{project}/patchJobs/{patch_job}".format( + project=project, + patch_job=patch_job, + ) + actual = OsConfigServiceClient.patch_job_path(project, patch_job) + assert expected == actual + + +def test_parse_patch_job_path(): + expected = { + "project": "squid", + "patch_job": "clam", + } + path = OsConfigServiceClient.patch_job_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigServiceClient.parse_patch_job_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = OsConfigServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = OsConfigServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = OsConfigServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = OsConfigServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = OsConfigServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = OsConfigServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = OsConfigServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = OsConfigServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = OsConfigServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = OsConfigServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.OsConfigServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.OsConfigServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = OsConfigServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = OsConfigServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = OsConfigServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (OsConfigServiceClient, transports.OsConfigServiceGrpcTransport), + (OsConfigServiceAsyncClient, transports.OsConfigServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_zonal_service.py b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_zonal_service.py new file mode 100644 index 000000000000..8a068793b89d --- /dev/null +++ b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1/test_os_config_zonal_service.py @@ -0,0 +1,10138 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.osconfig_v1.services.os_config_zonal_service import ( + OsConfigZonalServiceAsyncClient, + OsConfigZonalServiceClient, + pagers, + transports, +) +from google.cloud.osconfig_v1.types import ( + inventory, + os_policy, + os_policy_assignment_reports, + os_policy_assignments, + osconfig_common, + vulnerability, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert OsConfigZonalServiceClient._get_default_mtls_endpoint(None) is None + assert ( + OsConfigZonalServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + OsConfigZonalServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + OsConfigZonalServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OsConfigZonalServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OsConfigZonalServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OsConfigZonalServiceClient, "grpc"), + (OsConfigZonalServiceAsyncClient, "grpc_asyncio"), + (OsConfigZonalServiceClient, "rest"), + ], +) +def test_os_config_zonal_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "osconfig.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://osconfig.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.OsConfigZonalServiceGrpcTransport, "grpc"), + (transports.OsConfigZonalServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.OsConfigZonalServiceRestTransport, "rest"), + ], +) +def test_os_config_zonal_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OsConfigZonalServiceClient, "grpc"), + (OsConfigZonalServiceAsyncClient, "grpc_asyncio"), + (OsConfigZonalServiceClient, "rest"), + ], +) +def test_os_config_zonal_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "osconfig.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://osconfig.googleapis.com" + ) + + +def test_os_config_zonal_service_client_get_transport_class(): + transport = OsConfigZonalServiceClient.get_transport_class() + available_transports = [ + transports.OsConfigZonalServiceGrpcTransport, + transports.OsConfigZonalServiceRestTransport, + ] + assert transport in available_transports + + transport = OsConfigZonalServiceClient.get_transport_class("grpc") + assert transport == transports.OsConfigZonalServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceGrpcTransport, + "grpc", + ), + ( + OsConfigZonalServiceAsyncClient, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + OsConfigZonalServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigZonalServiceClient), +) +@mock.patch.object( + OsConfigZonalServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigZonalServiceAsyncClient), +) +def test_os_config_zonal_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(OsConfigZonalServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(OsConfigZonalServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceGrpcTransport, + "grpc", + "true", + ), + ( + OsConfigZonalServiceAsyncClient, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceGrpcTransport, + "grpc", + "false", + ), + ( + OsConfigZonalServiceAsyncClient, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceRestTransport, + "rest", + "true", + ), + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + OsConfigZonalServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigZonalServiceClient), +) +@mock.patch.object( + OsConfigZonalServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigZonalServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_os_config_zonal_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [OsConfigZonalServiceClient, OsConfigZonalServiceAsyncClient] +) +@mock.patch.object( + OsConfigZonalServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigZonalServiceClient), +) +@mock.patch.object( + OsConfigZonalServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigZonalServiceAsyncClient), +) +def test_os_config_zonal_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceGrpcTransport, + "grpc", + ), + ( + OsConfigZonalServiceAsyncClient, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceRestTransport, + "rest", + ), + ], +) +def test_os_config_zonal_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + OsConfigZonalServiceAsyncClient, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceRestTransport, + "rest", + None, + ), + ], +) +def test_os_config_zonal_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_os_config_zonal_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.osconfig_v1.services.os_config_zonal_service.transports.OsConfigZonalServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = OsConfigZonalServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + OsConfigZonalServiceAsyncClient, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_os_config_zonal_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "osconfig.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="osconfig.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.CreateOSPolicyAssignmentRequest, + dict, + ], +) +def test_create_os_policy_assignment(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.CreateOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_os_policy_assignment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_os_policy_assignment), "__call__" + ) as call: + client.create_os_policy_assignment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.CreateOSPolicyAssignmentRequest() + + +@pytest.mark.asyncio +async def test_create_os_policy_assignment_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignments.CreateOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.CreateOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_os_policy_assignment_async_from_dict(): + await test_create_os_policy_assignment_async(request_type=dict) + + +def test_create_os_policy_assignment_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.CreateOSPolicyAssignmentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_os_policy_assignment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_os_policy_assignment_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.CreateOSPolicyAssignmentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_os_policy_assignment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_os_policy_assignment_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_os_policy_assignment( + parent="parent_value", + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + os_policy_assignment_id="os_policy_assignment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].os_policy_assignment + mock_val = os_policy_assignments.OSPolicyAssignment(name="name_value") + assert arg == mock_val + arg = args[0].os_policy_assignment_id + mock_val = "os_policy_assignment_id_value" + assert arg == mock_val + + +def test_create_os_policy_assignment_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_os_policy_assignment( + os_policy_assignments.CreateOSPolicyAssignmentRequest(), + parent="parent_value", + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + os_policy_assignment_id="os_policy_assignment_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_os_policy_assignment_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_os_policy_assignment( + parent="parent_value", + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + os_policy_assignment_id="os_policy_assignment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].os_policy_assignment + mock_val = os_policy_assignments.OSPolicyAssignment(name="name_value") + assert arg == mock_val + arg = args[0].os_policy_assignment_id + mock_val = "os_policy_assignment_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_os_policy_assignment_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_os_policy_assignment( + os_policy_assignments.CreateOSPolicyAssignmentRequest(), + parent="parent_value", + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + os_policy_assignment_id="os_policy_assignment_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.UpdateOSPolicyAssignmentRequest, + dict, + ], +) +def test_update_os_policy_assignment(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.UpdateOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_os_policy_assignment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_os_policy_assignment), "__call__" + ) as call: + client.update_os_policy_assignment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.UpdateOSPolicyAssignmentRequest() + + +@pytest.mark.asyncio +async def test_update_os_policy_assignment_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignments.UpdateOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.UpdateOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_os_policy_assignment_async_from_dict(): + await test_update_os_policy_assignment_async(request_type=dict) + + +def test_update_os_policy_assignment_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.UpdateOSPolicyAssignmentRequest() + + request.os_policy_assignment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_os_policy_assignment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "os_policy_assignment.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_os_policy_assignment_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.UpdateOSPolicyAssignmentRequest() + + request.os_policy_assignment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_os_policy_assignment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "os_policy_assignment.name=name_value", + ) in kw["metadata"] + + +def test_update_os_policy_assignment_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_os_policy_assignment( + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].os_policy_assignment + mock_val = os_policy_assignments.OSPolicyAssignment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_os_policy_assignment_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_os_policy_assignment( + os_policy_assignments.UpdateOSPolicyAssignmentRequest(), + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_os_policy_assignment_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_os_policy_assignment( + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].os_policy_assignment + mock_val = os_policy_assignments.OSPolicyAssignment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_os_policy_assignment_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_os_policy_assignment( + os_policy_assignments.UpdateOSPolicyAssignmentRequest(), + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.GetOSPolicyAssignmentRequest, + dict, + ], +) +def test_get_os_policy_assignment(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignments.OSPolicyAssignment( + name="name_value", + description="description_value", + revision_id="revision_id_value", + etag="etag_value", + rollout_state=os_policy_assignments.OSPolicyAssignment.RolloutState.IN_PROGRESS, + baseline=True, + deleted=True, + reconciling=True, + uid="uid_value", + ) + response = client.get_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.GetOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, os_policy_assignments.OSPolicyAssignment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.revision_id == "revision_id_value" + assert response.etag == "etag_value" + assert ( + response.rollout_state + == os_policy_assignments.OSPolicyAssignment.RolloutState.IN_PROGRESS + ) + assert response.baseline is True + assert response.deleted is True + assert response.reconciling is True + assert response.uid == "uid_value" + + +def test_get_os_policy_assignment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment), "__call__" + ) as call: + client.get_os_policy_assignment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.GetOSPolicyAssignmentRequest() + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignments.GetOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.OSPolicyAssignment( + name="name_value", + description="description_value", + revision_id="revision_id_value", + etag="etag_value", + rollout_state=os_policy_assignments.OSPolicyAssignment.RolloutState.IN_PROGRESS, + baseline=True, + deleted=True, + reconciling=True, + uid="uid_value", + ) + ) + response = await client.get_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.GetOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, os_policy_assignments.OSPolicyAssignment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.revision_id == "revision_id_value" + assert response.etag == "etag_value" + assert ( + response.rollout_state + == os_policy_assignments.OSPolicyAssignment.RolloutState.IN_PROGRESS + ) + assert response.baseline is True + assert response.deleted is True + assert response.reconciling is True + assert response.uid == "uid_value" + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_async_from_dict(): + await test_get_os_policy_assignment_async(request_type=dict) + + +def test_get_os_policy_assignment_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.GetOSPolicyAssignmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment), "__call__" + ) as call: + call.return_value = os_policy_assignments.OSPolicyAssignment() + client.get_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.GetOSPolicyAssignmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.OSPolicyAssignment() + ) + await client.get_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_os_policy_assignment_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignments.OSPolicyAssignment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_os_policy_assignment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_os_policy_assignment_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_os_policy_assignment( + os_policy_assignments.GetOSPolicyAssignmentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignments.OSPolicyAssignment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.OSPolicyAssignment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_os_policy_assignment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_os_policy_assignment( + os_policy_assignments.GetOSPolicyAssignmentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.ListOSPolicyAssignmentsRequest, + dict, + ], +) +def test_list_os_policy_assignments(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_os_policy_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.ListOSPolicyAssignmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_os_policy_assignments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + client.list_os_policy_assignments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.ListOSPolicyAssignmentsRequest() + + +@pytest.mark.asyncio +async def test_list_os_policy_assignments_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignments.ListOSPolicyAssignmentsRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.ListOSPolicyAssignmentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_os_policy_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.ListOSPolicyAssignmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_os_policy_assignments_async_from_dict(): + await test_list_os_policy_assignments_async(request_type=dict) + + +def test_list_os_policy_assignments_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.ListOSPolicyAssignmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + call.return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse() + client.list_os_policy_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_os_policy_assignments_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.ListOSPolicyAssignmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.ListOSPolicyAssignmentsResponse() + ) + await client.list_os_policy_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_os_policy_assignments_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_os_policy_assignments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_os_policy_assignments_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_os_policy_assignments( + os_policy_assignments.ListOSPolicyAssignmentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_os_policy_assignments_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.ListOSPolicyAssignmentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_os_policy_assignments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_os_policy_assignments_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_os_policy_assignments( + os_policy_assignments.ListOSPolicyAssignmentsRequest(), + parent="parent_value", + ) + + +def test_list_os_policy_assignments_pager(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_os_policy_assignments(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, os_policy_assignments.OSPolicyAssignment) for i in results + ) + + +def test_list_os_policy_assignments_pages(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_os_policy_assignments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_os_policy_assignments_async_pager(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_os_policy_assignments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, os_policy_assignments.OSPolicyAssignment) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_os_policy_assignments_async_pages(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_os_policy_assignments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, + dict, + ], +) +def test_list_os_policy_assignment_revisions(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = client.list_os_policy_assignment_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentRevisionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_os_policy_assignment_revisions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + client.list_os_policy_assignment_revisions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest() + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_revisions_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_os_policy_assignment_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentRevisionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_revisions_async_from_dict(): + await test_list_os_policy_assignment_revisions_async(request_type=dict) + + +def test_list_os_policy_assignment_revisions_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + call.return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + ) + client.list_os_policy_assignment_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_revisions_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + ) + await client.list_os_policy_assignment_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_list_os_policy_assignment_revisions_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_os_policy_assignment_revisions( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_list_os_policy_assignment_revisions_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_os_policy_assignment_revisions( + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_revisions_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_os_policy_assignment_revisions( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_revisions_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_os_policy_assignment_revisions( + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest(), + name="name_value", + ) + + +def test_list_os_policy_assignment_revisions_pager(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), + ) + pager = client.list_os_policy_assignment_revisions(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, os_policy_assignments.OSPolicyAssignment) for i in results + ) + + +def test_list_os_policy_assignment_revisions_pages(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_os_policy_assignment_revisions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_revisions_async_pager(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_os_policy_assignment_revisions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, os_policy_assignments.OSPolicyAssignment) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_revisions_async_pages(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_os_policy_assignment_revisions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.DeleteOSPolicyAssignmentRequest, + dict, + ], +) +def test_delete_os_policy_assignment(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.DeleteOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_os_policy_assignment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_os_policy_assignment), "__call__" + ) as call: + client.delete_os_policy_assignment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.DeleteOSPolicyAssignmentRequest() + + +@pytest.mark.asyncio +async def test_delete_os_policy_assignment_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignments.DeleteOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.DeleteOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_os_policy_assignment_async_from_dict(): + await test_delete_os_policy_assignment_async(request_type=dict) + + +def test_delete_os_policy_assignment_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.DeleteOSPolicyAssignmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_os_policy_assignment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_os_policy_assignment_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.DeleteOSPolicyAssignmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_os_policy_assignment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_os_policy_assignment_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_os_policy_assignment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_os_policy_assignment_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_os_policy_assignment( + os_policy_assignments.DeleteOSPolicyAssignmentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_os_policy_assignment_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_os_policy_assignment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_os_policy_assignment_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_os_policy_assignment( + os_policy_assignments.DeleteOSPolicyAssignmentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, + dict, + ], +) +def test_get_os_policy_assignment_report(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignment_reports.OSPolicyAssignmentReport( + name="name_value", + instance="instance_value", + os_policy_assignment="os_policy_assignment_value", + last_run_id="last_run_id_value", + ) + response = client.get_os_policy_assignment_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert ( + args[0] == os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, os_policy_assignment_reports.OSPolicyAssignmentReport) + assert response.name == "name_value" + assert response.instance == "instance_value" + assert response.os_policy_assignment == "os_policy_assignment_value" + assert response.last_run_id == "last_run_id_value" + + +def test_get_os_policy_assignment_report_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment_report), "__call__" + ) as call: + client.get_os_policy_assignment_report() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] == os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest() + ) + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_report_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignment_reports.OSPolicyAssignmentReport( + name="name_value", + instance="instance_value", + os_policy_assignment="os_policy_assignment_value", + last_run_id="last_run_id_value", + ) + ) + response = await client.get_os_policy_assignment_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert ( + args[0] == os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, os_policy_assignment_reports.OSPolicyAssignmentReport) + assert response.name == "name_value" + assert response.instance == "instance_value" + assert response.os_policy_assignment == "os_policy_assignment_value" + assert response.last_run_id == "last_run_id_value" + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_report_async_from_dict(): + await test_get_os_policy_assignment_report_async(request_type=dict) + + +def test_get_os_policy_assignment_report_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment_report), "__call__" + ) as call: + call.return_value = os_policy_assignment_reports.OSPolicyAssignmentReport() + client.get_os_policy_assignment_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_report_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment_report), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignment_reports.OSPolicyAssignmentReport() + ) + await client.get_os_policy_assignment_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_os_policy_assignment_report_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignment_reports.OSPolicyAssignmentReport() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_os_policy_assignment_report( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_os_policy_assignment_report_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_os_policy_assignment_report( + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_report_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignment_reports.OSPolicyAssignmentReport() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignment_reports.OSPolicyAssignmentReport() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_os_policy_assignment_report( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_report_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_os_policy_assignment_report( + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, + dict, + ], +) +def test_list_os_policy_assignment_reports(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + next_page_token="next_page_token_value", + ) + ) + response = client.list_os_policy_assignment_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentReportsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_os_policy_assignment_reports_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + client.list_os_policy_assignment_reports() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest() + ) + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_reports_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_os_policy_assignment_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentReportsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_reports_async_from_dict(): + await test_list_os_policy_assignment_reports_async(request_type=dict) + + +def test_list_os_policy_assignment_reports_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + call.return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + client.list_os_policy_assignment_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_reports_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + await client.list_os_policy_assignment_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_os_policy_assignment_reports_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_os_policy_assignment_reports( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_os_policy_assignment_reports_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_os_policy_assignment_reports( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_reports_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_os_policy_assignment_reports( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_reports_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_os_policy_assignment_reports( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest(), + parent="parent_value", + ) + + +def test_list_os_policy_assignment_reports_pager(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="abc", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[], + next_page_token="def", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="ghi", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_os_policy_assignment_reports(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, os_policy_assignment_reports.OSPolicyAssignmentReport) + for i in results + ) + + +def test_list_os_policy_assignment_reports_pages(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="abc", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[], + next_page_token="def", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="ghi", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + ), + RuntimeError, + ) + pages = list(client.list_os_policy_assignment_reports(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_reports_async_pager(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="abc", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[], + next_page_token="def", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="ghi", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_os_policy_assignment_reports( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, os_policy_assignment_reports.OSPolicyAssignmentReport) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_reports_async_pages(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="abc", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[], + next_page_token="def", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="ghi", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_os_policy_assignment_reports(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + inventory.GetInventoryRequest, + dict, + ], +) +def test_get_inventory(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_inventory), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = inventory.Inventory( + name="name_value", + ) + response = client.get_inventory(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == inventory.GetInventoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, inventory.Inventory) + assert response.name == "name_value" + + +def test_get_inventory_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_inventory), "__call__") as call: + client.get_inventory() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == inventory.GetInventoryRequest() + + +@pytest.mark.asyncio +async def test_get_inventory_async( + transport: str = "grpc_asyncio", request_type=inventory.GetInventoryRequest +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_inventory), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + inventory.Inventory( + name="name_value", + ) + ) + response = await client.get_inventory(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == inventory.GetInventoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, inventory.Inventory) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_inventory_async_from_dict(): + await test_get_inventory_async(request_type=dict) + + +def test_get_inventory_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = inventory.GetInventoryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_inventory), "__call__") as call: + call.return_value = inventory.Inventory() + client.get_inventory(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_inventory_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = inventory.GetInventoryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_inventory), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(inventory.Inventory()) + await client.get_inventory(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_inventory_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_inventory), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = inventory.Inventory() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_inventory( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_inventory_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inventory( + inventory.GetInventoryRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_inventory_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_inventory), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = inventory.Inventory() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(inventory.Inventory()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_inventory( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_inventory_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_inventory( + inventory.GetInventoryRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + inventory.ListInventoriesRequest, + dict, + ], +) +def test_list_inventories(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = inventory.ListInventoriesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_inventories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == inventory.ListInventoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInventoriesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_inventories_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + client.list_inventories() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == inventory.ListInventoriesRequest() + + +@pytest.mark.asyncio +async def test_list_inventories_async( + transport: str = "grpc_asyncio", request_type=inventory.ListInventoriesRequest +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + inventory.ListInventoriesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_inventories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == inventory.ListInventoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInventoriesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_inventories_async_from_dict(): + await test_list_inventories_async(request_type=dict) + + +def test_list_inventories_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = inventory.ListInventoriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + call.return_value = inventory.ListInventoriesResponse() + client.list_inventories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_inventories_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = inventory.ListInventoriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + inventory.ListInventoriesResponse() + ) + await client.list_inventories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_inventories_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = inventory.ListInventoriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_inventories( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_inventories_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inventories( + inventory.ListInventoriesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_inventories_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = inventory.ListInventoriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + inventory.ListInventoriesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_inventories( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_inventories_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_inventories( + inventory.ListInventoriesRequest(), + parent="parent_value", + ) + + +def test_list_inventories_pager(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + inventory.Inventory(), + ], + next_page_token="abc", + ), + inventory.ListInventoriesResponse( + inventories=[], + next_page_token="def", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + ], + next_page_token="ghi", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_inventories(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, inventory.Inventory) for i in results) + + +def test_list_inventories_pages(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + inventory.Inventory(), + ], + next_page_token="abc", + ), + inventory.ListInventoriesResponse( + inventories=[], + next_page_token="def", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + ], + next_page_token="ghi", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + ], + ), + RuntimeError, + ) + pages = list(client.list_inventories(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_inventories_async_pager(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inventories), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + inventory.Inventory(), + ], + next_page_token="abc", + ), + inventory.ListInventoriesResponse( + inventories=[], + next_page_token="def", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + ], + next_page_token="ghi", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_inventories( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, inventory.Inventory) for i in responses) + + +@pytest.mark.asyncio +async def test_list_inventories_async_pages(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inventories), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + inventory.Inventory(), + ], + next_page_token="abc", + ), + inventory.ListInventoriesResponse( + inventories=[], + next_page_token="def", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + ], + next_page_token="ghi", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_inventories(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vulnerability.GetVulnerabilityReportRequest, + dict, + ], +) +def test_get_vulnerability_report(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vulnerability_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vulnerability.VulnerabilityReport( + name="name_value", + ) + response = client.get_vulnerability_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vulnerability.GetVulnerabilityReportRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vulnerability.VulnerabilityReport) + assert response.name == "name_value" + + +def test_get_vulnerability_report_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vulnerability_report), "__call__" + ) as call: + client.get_vulnerability_report() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vulnerability.GetVulnerabilityReportRequest() + + +@pytest.mark.asyncio +async def test_get_vulnerability_report_async( + transport: str = "grpc_asyncio", + request_type=vulnerability.GetVulnerabilityReportRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vulnerability_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vulnerability.VulnerabilityReport( + name="name_value", + ) + ) + response = await client.get_vulnerability_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vulnerability.GetVulnerabilityReportRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vulnerability.VulnerabilityReport) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_vulnerability_report_async_from_dict(): + await test_get_vulnerability_report_async(request_type=dict) + + +def test_get_vulnerability_report_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vulnerability.GetVulnerabilityReportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vulnerability_report), "__call__" + ) as call: + call.return_value = vulnerability.VulnerabilityReport() + client.get_vulnerability_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_vulnerability_report_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vulnerability.GetVulnerabilityReportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vulnerability_report), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vulnerability.VulnerabilityReport() + ) + await client.get_vulnerability_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_vulnerability_report_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vulnerability_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vulnerability.VulnerabilityReport() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_vulnerability_report( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_vulnerability_report_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_vulnerability_report( + vulnerability.GetVulnerabilityReportRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_vulnerability_report_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vulnerability_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vulnerability.VulnerabilityReport() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vulnerability.VulnerabilityReport() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_vulnerability_report( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_vulnerability_report_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_vulnerability_report( + vulnerability.GetVulnerabilityReportRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vulnerability.ListVulnerabilityReportsRequest, + dict, + ], +) +def test_list_vulnerability_reports(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vulnerability.ListVulnerabilityReportsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_vulnerability_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vulnerability.ListVulnerabilityReportsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVulnerabilityReportsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_vulnerability_reports_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + client.list_vulnerability_reports() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vulnerability.ListVulnerabilityReportsRequest() + + +@pytest.mark.asyncio +async def test_list_vulnerability_reports_async( + transport: str = "grpc_asyncio", + request_type=vulnerability.ListVulnerabilityReportsRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vulnerability.ListVulnerabilityReportsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_vulnerability_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vulnerability.ListVulnerabilityReportsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVulnerabilityReportsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_vulnerability_reports_async_from_dict(): + await test_list_vulnerability_reports_async(request_type=dict) + + +def test_list_vulnerability_reports_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vulnerability.ListVulnerabilityReportsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + call.return_value = vulnerability.ListVulnerabilityReportsResponse() + client.list_vulnerability_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_vulnerability_reports_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vulnerability.ListVulnerabilityReportsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vulnerability.ListVulnerabilityReportsResponse() + ) + await client.list_vulnerability_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_vulnerability_reports_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vulnerability.ListVulnerabilityReportsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_vulnerability_reports( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_vulnerability_reports_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_vulnerability_reports( + vulnerability.ListVulnerabilityReportsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_vulnerability_reports_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vulnerability.ListVulnerabilityReportsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vulnerability.ListVulnerabilityReportsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_vulnerability_reports( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_vulnerability_reports_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_vulnerability_reports( + vulnerability.ListVulnerabilityReportsRequest(), + parent="parent_value", + ) + + +def test_list_vulnerability_reports_pager(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + next_page_token="abc", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[], + next_page_token="def", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + ], + next_page_token="ghi", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_vulnerability_reports(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vulnerability.VulnerabilityReport) for i in results) + + +def test_list_vulnerability_reports_pages(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + next_page_token="abc", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[], + next_page_token="def", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + ], + next_page_token="ghi", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + ), + RuntimeError, + ) + pages = list(client.list_vulnerability_reports(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_vulnerability_reports_async_pager(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + next_page_token="abc", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[], + next_page_token="def", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + ], + next_page_token="ghi", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_vulnerability_reports( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, vulnerability.VulnerabilityReport) for i in responses) + + +@pytest.mark.asyncio +async def test_list_vulnerability_reports_async_pages(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + next_page_token="abc", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[], + next_page_token="def", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + ], + next_page_token="ghi", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_vulnerability_reports(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.CreateOSPolicyAssignmentRequest, + dict, + ], +) +def test_create_os_policy_assignment_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["os_policy_assignment"] = { + "name": "name_value", + "description": "description_value", + "os_policies": [ + { + "id": "id_value", + "description": "description_value", + "mode": 1, + "resource_groups": [ + { + "inventory_filters": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + "resources": [ + { + "id": "id_value", + "pkg": { + "desired_state": 1, + "apt": {"name": "name_value"}, + "deb": { + "source": { + "remote": { + "uri": "uri_value", + "sha256_checksum": "sha256_checksum_value", + }, + "gcs": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + "local_path": "local_path_value", + "allow_insecure": True, + }, + "pull_deps": True, + }, + "yum": {"name": "name_value"}, + "zypper": {"name": "name_value"}, + "rpm": {"source": {}, "pull_deps": True}, + "googet": {"name": "name_value"}, + "msi": { + "source": {}, + "properties": [ + "properties_value1", + "properties_value2", + ], + }, + }, + "repository": { + "apt": { + "archive_type": 1, + "uri": "uri_value", + "distribution": "distribution_value", + "components": [ + "components_value1", + "components_value2", + ], + "gpg_key": "gpg_key_value", + }, + "yum": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "zypper": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "goo": {"name": "name_value", "url": "url_value"}, + }, + "exec_": { + "validate": { + "file": {}, + "script": "script_value", + "args": ["args_value1", "args_value2"], + "interpreter": 1, + "output_file_path": "output_file_path_value", + }, + "enforce": {}, + }, + "file": { + "file": {}, + "content": "content_value", + "path": "path_value", + "state": 1, + "permissions": "permissions_value", + }, + } + ], + } + ], + "allow_no_resource_group_match": True, + } + ], + "instance_filter": { + "all_": True, + "inclusion_labels": [{"labels": {}}], + "exclusion_labels": {}, + "inventories": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + }, + "rollout": { + "disruption_budget": {"fixed": 528, "percent": 753}, + "min_wait_duration": {"seconds": 751, "nanos": 543}, + }, + "revision_id": "revision_id_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + "etag": "etag_value", + "rollout_state": 1, + "baseline": True, + "deleted": True, + "reconciling": True, + "uid": "uid_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_os_policy_assignment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_os_policy_assignment_rest_required_fields( + request_type=os_policy_assignments.CreateOSPolicyAssignmentRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["os_policy_assignment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "osPolicyAssignmentId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_os_policy_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "osPolicyAssignmentId" in jsonified_request + assert ( + jsonified_request["osPolicyAssignmentId"] + == request_init["os_policy_assignment_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["osPolicyAssignmentId"] = "os_policy_assignment_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_os_policy_assignment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("os_policy_assignment_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "osPolicyAssignmentId" in jsonified_request + assert jsonified_request["osPolicyAssignmentId"] == "os_policy_assignment_id_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_os_policy_assignment(request) + + expected_params = [ + ( + "osPolicyAssignmentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_os_policy_assignment_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_os_policy_assignment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("osPolicyAssignmentId",)) + & set( + ( + "parent", + "osPolicyAssignment", + "osPolicyAssignmentId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_os_policy_assignment_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_create_os_policy_assignment", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "pre_create_os_policy_assignment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = os_policy_assignments.CreateOSPolicyAssignmentRequest.pb( + os_policy_assignments.CreateOSPolicyAssignmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = os_policy_assignments.CreateOSPolicyAssignmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_os_policy_assignment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_os_policy_assignment_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignments.CreateOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["os_policy_assignment"] = { + "name": "name_value", + "description": "description_value", + "os_policies": [ + { + "id": "id_value", + "description": "description_value", + "mode": 1, + "resource_groups": [ + { + "inventory_filters": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + "resources": [ + { + "id": "id_value", + "pkg": { + "desired_state": 1, + "apt": {"name": "name_value"}, + "deb": { + "source": { + "remote": { + "uri": "uri_value", + "sha256_checksum": "sha256_checksum_value", + }, + "gcs": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + "local_path": "local_path_value", + "allow_insecure": True, + }, + "pull_deps": True, + }, + "yum": {"name": "name_value"}, + "zypper": {"name": "name_value"}, + "rpm": {"source": {}, "pull_deps": True}, + "googet": {"name": "name_value"}, + "msi": { + "source": {}, + "properties": [ + "properties_value1", + "properties_value2", + ], + }, + }, + "repository": { + "apt": { + "archive_type": 1, + "uri": "uri_value", + "distribution": "distribution_value", + "components": [ + "components_value1", + "components_value2", + ], + "gpg_key": "gpg_key_value", + }, + "yum": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "zypper": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "goo": {"name": "name_value", "url": "url_value"}, + }, + "exec_": { + "validate": { + "file": {}, + "script": "script_value", + "args": ["args_value1", "args_value2"], + "interpreter": 1, + "output_file_path": "output_file_path_value", + }, + "enforce": {}, + }, + "file": { + "file": {}, + "content": "content_value", + "path": "path_value", + "state": 1, + "permissions": "permissions_value", + }, + } + ], + } + ], + "allow_no_resource_group_match": True, + } + ], + "instance_filter": { + "all_": True, + "inclusion_labels": [{"labels": {}}], + "exclusion_labels": {}, + "inventories": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + }, + "rollout": { + "disruption_budget": {"fixed": 528, "percent": 753}, + "min_wait_duration": {"seconds": 751, "nanos": 543}, + }, + "revision_id": "revision_id_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + "etag": "etag_value", + "rollout_state": 1, + "baseline": True, + "deleted": True, + "reconciling": True, + "uid": "uid_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_os_policy_assignment(request) + + +def test_create_os_policy_assignment_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + os_policy_assignment_id="os_policy_assignment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_os_policy_assignment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/osPolicyAssignments" + % client.transport._host, + args[1], + ) + + +def test_create_os_policy_assignment_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_os_policy_assignment( + os_policy_assignments.CreateOSPolicyAssignmentRequest(), + parent="parent_value", + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + os_policy_assignment_id="os_policy_assignment_id_value", + ) + + +def test_create_os_policy_assignment_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.UpdateOSPolicyAssignmentRequest, + dict, + ], +) +def test_update_os_policy_assignment_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "os_policy_assignment": { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + } + request_init["os_policy_assignment"] = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3", + "description": "description_value", + "os_policies": [ + { + "id": "id_value", + "description": "description_value", + "mode": 1, + "resource_groups": [ + { + "inventory_filters": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + "resources": [ + { + "id": "id_value", + "pkg": { + "desired_state": 1, + "apt": {"name": "name_value"}, + "deb": { + "source": { + "remote": { + "uri": "uri_value", + "sha256_checksum": "sha256_checksum_value", + }, + "gcs": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + "local_path": "local_path_value", + "allow_insecure": True, + }, + "pull_deps": True, + }, + "yum": {"name": "name_value"}, + "zypper": {"name": "name_value"}, + "rpm": {"source": {}, "pull_deps": True}, + "googet": {"name": "name_value"}, + "msi": { + "source": {}, + "properties": [ + "properties_value1", + "properties_value2", + ], + }, + }, + "repository": { + "apt": { + "archive_type": 1, + "uri": "uri_value", + "distribution": "distribution_value", + "components": [ + "components_value1", + "components_value2", + ], + "gpg_key": "gpg_key_value", + }, + "yum": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "zypper": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "goo": {"name": "name_value", "url": "url_value"}, + }, + "exec_": { + "validate": { + "file": {}, + "script": "script_value", + "args": ["args_value1", "args_value2"], + "interpreter": 1, + "output_file_path": "output_file_path_value", + }, + "enforce": {}, + }, + "file": { + "file": {}, + "content": "content_value", + "path": "path_value", + "state": 1, + "permissions": "permissions_value", + }, + } + ], + } + ], + "allow_no_resource_group_match": True, + } + ], + "instance_filter": { + "all_": True, + "inclusion_labels": [{"labels": {}}], + "exclusion_labels": {}, + "inventories": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + }, + "rollout": { + "disruption_budget": {"fixed": 528, "percent": 753}, + "min_wait_duration": {"seconds": 751, "nanos": 543}, + }, + "revision_id": "revision_id_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + "etag": "etag_value", + "rollout_state": 1, + "baseline": True, + "deleted": True, + "reconciling": True, + "uid": "uid_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_os_policy_assignment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_os_policy_assignment_rest_required_fields( + request_type=os_policy_assignments.UpdateOSPolicyAssignmentRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_os_policy_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_os_policy_assignment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_os_policy_assignment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_os_policy_assignment_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_os_policy_assignment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("osPolicyAssignment",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_os_policy_assignment_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_update_os_policy_assignment", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "pre_update_os_policy_assignment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = os_policy_assignments.UpdateOSPolicyAssignmentRequest.pb( + os_policy_assignments.UpdateOSPolicyAssignmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = os_policy_assignments.UpdateOSPolicyAssignmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_os_policy_assignment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_os_policy_assignment_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignments.UpdateOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "os_policy_assignment": { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + } + request_init["os_policy_assignment"] = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3", + "description": "description_value", + "os_policies": [ + { + "id": "id_value", + "description": "description_value", + "mode": 1, + "resource_groups": [ + { + "inventory_filters": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + "resources": [ + { + "id": "id_value", + "pkg": { + "desired_state": 1, + "apt": {"name": "name_value"}, + "deb": { + "source": { + "remote": { + "uri": "uri_value", + "sha256_checksum": "sha256_checksum_value", + }, + "gcs": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + "local_path": "local_path_value", + "allow_insecure": True, + }, + "pull_deps": True, + }, + "yum": {"name": "name_value"}, + "zypper": {"name": "name_value"}, + "rpm": {"source": {}, "pull_deps": True}, + "googet": {"name": "name_value"}, + "msi": { + "source": {}, + "properties": [ + "properties_value1", + "properties_value2", + ], + }, + }, + "repository": { + "apt": { + "archive_type": 1, + "uri": "uri_value", + "distribution": "distribution_value", + "components": [ + "components_value1", + "components_value2", + ], + "gpg_key": "gpg_key_value", + }, + "yum": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "zypper": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "goo": {"name": "name_value", "url": "url_value"}, + }, + "exec_": { + "validate": { + "file": {}, + "script": "script_value", + "args": ["args_value1", "args_value2"], + "interpreter": 1, + "output_file_path": "output_file_path_value", + }, + "enforce": {}, + }, + "file": { + "file": {}, + "content": "content_value", + "path": "path_value", + "state": 1, + "permissions": "permissions_value", + }, + } + ], + } + ], + "allow_no_resource_group_match": True, + } + ], + "instance_filter": { + "all_": True, + "inclusion_labels": [{"labels": {}}], + "exclusion_labels": {}, + "inventories": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + }, + "rollout": { + "disruption_budget": {"fixed": 528, "percent": 753}, + "min_wait_duration": {"seconds": 751, "nanos": 543}, + }, + "revision_id": "revision_id_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + "etag": "etag_value", + "rollout_state": 1, + "baseline": True, + "deleted": True, + "reconciling": True, + "uid": "uid_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_os_policy_assignment(request) + + +def test_update_os_policy_assignment_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "os_policy_assignment": { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_os_policy_assignment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{os_policy_assignment.name=projects/*/locations/*/osPolicyAssignments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_os_policy_assignment_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_os_policy_assignment( + os_policy_assignments.UpdateOSPolicyAssignmentRequest(), + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_os_policy_assignment_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.GetOSPolicyAssignmentRequest, + dict, + ], +) +def test_get_os_policy_assignment_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.OSPolicyAssignment( + name="name_value", + description="description_value", + revision_id="revision_id_value", + etag="etag_value", + rollout_state=os_policy_assignments.OSPolicyAssignment.RolloutState.IN_PROGRESS, + baseline=True, + deleted=True, + reconciling=True, + uid="uid_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = os_policy_assignments.OSPolicyAssignment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_os_policy_assignment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, os_policy_assignments.OSPolicyAssignment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.revision_id == "revision_id_value" + assert response.etag == "etag_value" + assert ( + response.rollout_state + == os_policy_assignments.OSPolicyAssignment.RolloutState.IN_PROGRESS + ) + assert response.baseline is True + assert response.deleted is True + assert response.reconciling is True + assert response.uid == "uid_value" + + +def test_get_os_policy_assignment_rest_required_fields( + request_type=os_policy_assignments.GetOSPolicyAssignmentRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_os_policy_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_os_policy_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.OSPolicyAssignment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = os_policy_assignments.OSPolicyAssignment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_os_policy_assignment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_os_policy_assignment_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_os_policy_assignment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_os_policy_assignment_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "post_get_os_policy_assignment" + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "pre_get_os_policy_assignment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = os_policy_assignments.GetOSPolicyAssignmentRequest.pb( + os_policy_assignments.GetOSPolicyAssignmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = os_policy_assignments.OSPolicyAssignment.to_json( + os_policy_assignments.OSPolicyAssignment() + ) + + request = os_policy_assignments.GetOSPolicyAssignmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = os_policy_assignments.OSPolicyAssignment() + + client.get_os_policy_assignment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_os_policy_assignment_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignments.GetOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_os_policy_assignment(request) + + +def test_get_os_policy_assignment_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.OSPolicyAssignment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = os_policy_assignments.OSPolicyAssignment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_os_policy_assignment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/osPolicyAssignments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_os_policy_assignment_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_os_policy_assignment( + os_policy_assignments.GetOSPolicyAssignmentRequest(), + name="name_value", + ) + + +def test_get_os_policy_assignment_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.ListOSPolicyAssignmentsRequest, + dict, + ], +) +def test_list_os_policy_assignments_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_os_policy_assignments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_os_policy_assignments_rest_required_fields( + request_type=os_policy_assignments.ListOSPolicyAssignmentsRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_os_policy_assignments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_os_policy_assignments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_os_policy_assignments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_os_policy_assignments_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_os_policy_assignments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_os_policy_assignments_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_list_os_policy_assignments", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "pre_list_os_policy_assignments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = os_policy_assignments.ListOSPolicyAssignmentsRequest.pb( + os_policy_assignments.ListOSPolicyAssignmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + os_policy_assignments.ListOSPolicyAssignmentsResponse.to_json( + os_policy_assignments.ListOSPolicyAssignmentsResponse() + ) + ) + + request = os_policy_assignments.ListOSPolicyAssignmentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse() + + client.list_os_policy_assignments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_os_policy_assignments_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignments.ListOSPolicyAssignmentsRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_os_policy_assignments(request) + + +def test_list_os_policy_assignments_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_os_policy_assignments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/osPolicyAssignments" + % client.transport._host, + args[1], + ) + + +def test_list_os_policy_assignments_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_os_policy_assignments( + os_policy_assignments.ListOSPolicyAssignmentsRequest(), + parent="parent_value", + ) + + +def test_list_os_policy_assignments_rest_pager(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + os_policy_assignments.ListOSPolicyAssignmentsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_os_policy_assignments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, os_policy_assignments.OSPolicyAssignment) for i in results + ) + + pages = list(client.list_os_policy_assignments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, + dict, + ], +) +def test_list_os_policy_assignment_revisions_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_os_policy_assignment_revisions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentRevisionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_os_policy_assignment_revisions_rest_required_fields( + request_type=os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_os_policy_assignment_revisions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_os_policy_assignment_revisions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_os_policy_assignment_revisions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_os_policy_assignment_revisions_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_os_policy_assignment_revisions._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_os_policy_assignment_revisions_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_list_os_policy_assignment_revisions", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "pre_list_os_policy_assignment_revisions", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest.pb( + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.to_json( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + ) + ) + + request = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + ) + + client.list_os_policy_assignment_revisions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_os_policy_assignment_revisions_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_os_policy_assignment_revisions(request) + + +def test_list_os_policy_assignment_revisions_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_os_policy_assignment_revisions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/osPolicyAssignments/*}:listRevisions" + % client.transport._host, + args[1], + ) + + +def test_list_os_policy_assignment_revisions_rest_flattened_error( + transport: str = "rest", +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_os_policy_assignment_revisions( + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest(), + name="name_value", + ) + + +def test_list_os_policy_assignment_revisions_rest_pager(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + + pager = client.list_os_policy_assignment_revisions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, os_policy_assignments.OSPolicyAssignment) for i in results + ) + + pages = list( + client.list_os_policy_assignment_revisions(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.DeleteOSPolicyAssignmentRequest, + dict, + ], +) +def test_delete_os_policy_assignment_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_os_policy_assignment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_os_policy_assignment_rest_required_fields( + request_type=os_policy_assignments.DeleteOSPolicyAssignmentRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_os_policy_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_os_policy_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_os_policy_assignment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_os_policy_assignment_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_os_policy_assignment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_os_policy_assignment_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_delete_os_policy_assignment", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "pre_delete_os_policy_assignment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = os_policy_assignments.DeleteOSPolicyAssignmentRequest.pb( + os_policy_assignments.DeleteOSPolicyAssignmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = os_policy_assignments.DeleteOSPolicyAssignmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_os_policy_assignment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_os_policy_assignment_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignments.DeleteOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_os_policy_assignment(request) + + +def test_delete_os_policy_assignment_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_os_policy_assignment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/osPolicyAssignments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_os_policy_assignment_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_os_policy_assignment( + os_policy_assignments.DeleteOSPolicyAssignmentRequest(), + name="name_value", + ) + + +def test_delete_os_policy_assignment_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, + dict, + ], +) +def test_get_os_policy_assignment_report_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/osPolicyAssignments/sample4/report" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignment_reports.OSPolicyAssignmentReport( + name="name_value", + instance="instance_value", + os_policy_assignment="os_policy_assignment_value", + last_run_id="last_run_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = os_policy_assignment_reports.OSPolicyAssignmentReport.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_os_policy_assignment_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, os_policy_assignment_reports.OSPolicyAssignmentReport) + assert response.name == "name_value" + assert response.instance == "instance_value" + assert response.os_policy_assignment == "os_policy_assignment_value" + assert response.last_run_id == "last_run_id_value" + + +def test_get_os_policy_assignment_report_rest_required_fields( + request_type=os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_os_policy_assignment_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_os_policy_assignment_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = os_policy_assignment_reports.OSPolicyAssignmentReport() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = os_policy_assignment_reports.OSPolicyAssignmentReport.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_os_policy_assignment_report(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_os_policy_assignment_report_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_os_policy_assignment_report._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_os_policy_assignment_report_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_get_os_policy_assignment_report", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "pre_get_os_policy_assignment_report", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest.pb( + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + os_policy_assignment_reports.OSPolicyAssignmentReport.to_json( + os_policy_assignment_reports.OSPolicyAssignmentReport() + ) + ) + + request = os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = os_policy_assignment_reports.OSPolicyAssignmentReport() + + client.get_os_policy_assignment_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_os_policy_assignment_report_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/osPolicyAssignments/sample4/report" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_os_policy_assignment_report(request) + + +def test_get_os_policy_assignment_report_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignment_reports.OSPolicyAssignmentReport() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3/osPolicyAssignments/sample4/report" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = os_policy_assignment_reports.OSPolicyAssignmentReport.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_os_policy_assignment_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*/osPolicyAssignments/*/report}" + % client.transport._host, + args[1], + ) + + +def test_get_os_policy_assignment_report_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_os_policy_assignment_report( + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest(), + name="name_value", + ) + + +def test_get_os_policy_assignment_report_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, + dict, + ], +) +def test_list_os_policy_assignment_reports_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/osPolicyAssignments/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + next_page_token="next_page_token_value", + ) + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_os_policy_assignment_reports(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentReportsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_os_policy_assignment_reports_rest_required_fields( + request_type=os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_os_policy_assignment_reports._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_os_policy_assignment_reports._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_os_policy_assignment_reports(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_os_policy_assignment_reports_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_os_policy_assignment_reports._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_os_policy_assignment_reports_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_list_os_policy_assignment_reports", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "pre_list_os_policy_assignment_reports", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest.pb( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.to_json( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + ) + + request = os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + + client.list_os_policy_assignment_reports( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_os_policy_assignment_reports_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/osPolicyAssignments/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_os_policy_assignment_reports(request) + + +def test_list_os_policy_assignment_reports_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3/osPolicyAssignments/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_os_policy_assignment_reports(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/instances/*/osPolicyAssignments/*}/reports" + % client.transport._host, + args[1], + ) + + +def test_list_os_policy_assignment_reports_rest_flattened_error( + transport: str = "rest", +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_os_policy_assignment_reports( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest(), + parent="parent_value", + ) + + +def test_list_os_policy_assignment_reports_rest_pager(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="abc", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[], + next_page_token="def", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="ghi", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.to_json( + x + ) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3/osPolicyAssignments/sample4" + } + + pager = client.list_os_policy_assignment_reports(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, os_policy_assignment_reports.OSPolicyAssignmentReport) + for i in results + ) + + pages = list( + client.list_os_policy_assignment_reports(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + inventory.GetInventoryRequest, + dict, + ], +) +def test_get_inventory_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/inventory" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = inventory.Inventory( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = inventory.Inventory.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_inventory(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, inventory.Inventory) + assert response.name == "name_value" + + +def test_get_inventory_rest_required_fields(request_type=inventory.GetInventoryRequest): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_inventory._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_inventory._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = inventory.Inventory() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = inventory.Inventory.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_inventory(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_inventory_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_inventory._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_inventory_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "post_get_inventory" + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "pre_get_inventory" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = inventory.GetInventoryRequest.pb(inventory.GetInventoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = inventory.Inventory.to_json(inventory.Inventory()) + + request = inventory.GetInventoryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = inventory.Inventory() + + client.get_inventory( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_inventory_rest_bad_request( + transport: str = "rest", request_type=inventory.GetInventoryRequest +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/inventory" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_inventory(request) + + +def test_get_inventory_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = inventory.Inventory() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3/inventory" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = inventory.Inventory.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_inventory(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*/inventory}" + % client.transport._host, + args[1], + ) + + +def test_get_inventory_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inventory( + inventory.GetInventoryRequest(), + name="name_value", + ) + + +def test_get_inventory_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + inventory.ListInventoriesRequest, + dict, + ], +) +def test_list_inventories_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = inventory.ListInventoriesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = inventory.ListInventoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_inventories(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInventoriesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_inventories_rest_required_fields( + request_type=inventory.ListInventoriesRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_inventories._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_inventories._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = inventory.ListInventoriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = inventory.ListInventoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_inventories(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_inventories_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_inventories._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_inventories_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "post_list_inventories" + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "pre_list_inventories" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = inventory.ListInventoriesRequest.pb( + inventory.ListInventoriesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = inventory.ListInventoriesResponse.to_json( + inventory.ListInventoriesResponse() + ) + + request = inventory.ListInventoriesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = inventory.ListInventoriesResponse() + + client.list_inventories( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_inventories_rest_bad_request( + transport: str = "rest", request_type=inventory.ListInventoriesRequest +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_inventories(request) + + +def test_list_inventories_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = inventory.ListInventoriesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = inventory.ListInventoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_inventories(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/instances/*}/inventories" + % client.transport._host, + args[1], + ) + + +def test_list_inventories_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inventories( + inventory.ListInventoriesRequest(), + parent="parent_value", + ) + + +def test_list_inventories_rest_pager(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + inventory.Inventory(), + ], + next_page_token="abc", + ), + inventory.ListInventoriesResponse( + inventories=[], + next_page_token="def", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + ], + next_page_token="ghi", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(inventory.ListInventoriesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3" + } + + pager = client.list_inventories(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, inventory.Inventory) for i in results) + + pages = list(client.list_inventories(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vulnerability.GetVulnerabilityReportRequest, + dict, + ], +) +def test_get_vulnerability_report_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/vulnerabilityReport" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vulnerability.VulnerabilityReport( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = vulnerability.VulnerabilityReport.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_vulnerability_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vulnerability.VulnerabilityReport) + assert response.name == "name_value" + + +def test_get_vulnerability_report_rest_required_fields( + request_type=vulnerability.GetVulnerabilityReportRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_vulnerability_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_vulnerability_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vulnerability.VulnerabilityReport() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = vulnerability.VulnerabilityReport.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_vulnerability_report(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_vulnerability_report_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_vulnerability_report._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_vulnerability_report_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "post_get_vulnerability_report" + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "pre_get_vulnerability_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vulnerability.GetVulnerabilityReportRequest.pb( + vulnerability.GetVulnerabilityReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vulnerability.VulnerabilityReport.to_json( + vulnerability.VulnerabilityReport() + ) + + request = vulnerability.GetVulnerabilityReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vulnerability.VulnerabilityReport() + + client.get_vulnerability_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_vulnerability_report_rest_bad_request( + transport: str = "rest", request_type=vulnerability.GetVulnerabilityReportRequest +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/vulnerabilityReport" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_vulnerability_report(request) + + +def test_get_vulnerability_report_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vulnerability.VulnerabilityReport() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3/vulnerabilityReport" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = vulnerability.VulnerabilityReport.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_vulnerability_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*/vulnerabilityReport}" + % client.transport._host, + args[1], + ) + + +def test_get_vulnerability_report_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_vulnerability_report( + vulnerability.GetVulnerabilityReportRequest(), + name="name_value", + ) + + +def test_get_vulnerability_report_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vulnerability.ListVulnerabilityReportsRequest, + dict, + ], +) +def test_list_vulnerability_reports_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vulnerability.ListVulnerabilityReportsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = vulnerability.ListVulnerabilityReportsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_vulnerability_reports(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVulnerabilityReportsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_vulnerability_reports_rest_required_fields( + request_type=vulnerability.ListVulnerabilityReportsRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_vulnerability_reports._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_vulnerability_reports._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vulnerability.ListVulnerabilityReportsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = vulnerability.ListVulnerabilityReportsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_vulnerability_reports(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_vulnerability_reports_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_vulnerability_reports._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_vulnerability_reports_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_list_vulnerability_reports", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "pre_list_vulnerability_reports" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vulnerability.ListVulnerabilityReportsRequest.pb( + vulnerability.ListVulnerabilityReportsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + vulnerability.ListVulnerabilityReportsResponse.to_json( + vulnerability.ListVulnerabilityReportsResponse() + ) + ) + + request = vulnerability.ListVulnerabilityReportsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vulnerability.ListVulnerabilityReportsResponse() + + client.list_vulnerability_reports( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_vulnerability_reports_rest_bad_request( + transport: str = "rest", request_type=vulnerability.ListVulnerabilityReportsRequest +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_vulnerability_reports(request) + + +def test_list_vulnerability_reports_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vulnerability.ListVulnerabilityReportsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = vulnerability.ListVulnerabilityReportsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_vulnerability_reports(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/instances/*}/vulnerabilityReports" + % client.transport._host, + args[1], + ) + + +def test_list_vulnerability_reports_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_vulnerability_reports( + vulnerability.ListVulnerabilityReportsRequest(), + parent="parent_value", + ) + + +def test_list_vulnerability_reports_rest_pager(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + next_page_token="abc", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[], + next_page_token="def", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + ], + next_page_token="ghi", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vulnerability.ListVulnerabilityReportsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3" + } + + pager = client.list_vulnerability_reports(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vulnerability.VulnerabilityReport) for i in results) + + pages = list(client.list_vulnerability_reports(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.OsConfigZonalServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.OsConfigZonalServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OsConfigZonalServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.OsConfigZonalServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OsConfigZonalServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OsConfigZonalServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.OsConfigZonalServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OsConfigZonalServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.OsConfigZonalServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = OsConfigZonalServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.OsConfigZonalServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.OsConfigZonalServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigZonalServiceGrpcTransport, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + transports.OsConfigZonalServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = OsConfigZonalServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.OsConfigZonalServiceGrpcTransport, + ) + + +def test_os_config_zonal_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.OsConfigZonalServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_os_config_zonal_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.osconfig_v1.services.os_config_zonal_service.transports.OsConfigZonalServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.OsConfigZonalServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_os_policy_assignment", + "update_os_policy_assignment", + "get_os_policy_assignment", + "list_os_policy_assignments", + "list_os_policy_assignment_revisions", + "delete_os_policy_assignment", + "get_os_policy_assignment_report", + "list_os_policy_assignment_reports", + "get_inventory", + "list_inventories", + "get_vulnerability_report", + "list_vulnerability_reports", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_os_config_zonal_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.osconfig_v1.services.os_config_zonal_service.transports.OsConfigZonalServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OsConfigZonalServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_os_config_zonal_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.osconfig_v1.services.os_config_zonal_service.transports.OsConfigZonalServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OsConfigZonalServiceTransport() + adc.assert_called_once() + + +def test_os_config_zonal_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + OsConfigZonalServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigZonalServiceGrpcTransport, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + ], +) +def test_os_config_zonal_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigZonalServiceGrpcTransport, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + transports.OsConfigZonalServiceRestTransport, + ], +) +def test_os_config_zonal_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.OsConfigZonalServiceGrpcTransport, grpc_helpers), + (transports.OsConfigZonalServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_os_config_zonal_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "osconfig.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="osconfig.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigZonalServiceGrpcTransport, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + ], +) +def test_os_config_zonal_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_os_config_zonal_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.OsConfigZonalServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_os_config_zonal_service_rest_lro_client(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_os_config_zonal_service_host_no_port(transport_name): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="osconfig.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "osconfig.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://osconfig.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_os_config_zonal_service_host_with_port(transport_name): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="osconfig.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "osconfig.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://osconfig.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_os_config_zonal_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = OsConfigZonalServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = OsConfigZonalServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_os_policy_assignment._session + session2 = client2.transport.create_os_policy_assignment._session + assert session1 != session2 + session1 = client1.transport.update_os_policy_assignment._session + session2 = client2.transport.update_os_policy_assignment._session + assert session1 != session2 + session1 = client1.transport.get_os_policy_assignment._session + session2 = client2.transport.get_os_policy_assignment._session + assert session1 != session2 + session1 = client1.transport.list_os_policy_assignments._session + session2 = client2.transport.list_os_policy_assignments._session + assert session1 != session2 + session1 = client1.transport.list_os_policy_assignment_revisions._session + session2 = client2.transport.list_os_policy_assignment_revisions._session + assert session1 != session2 + session1 = client1.transport.delete_os_policy_assignment._session + session2 = client2.transport.delete_os_policy_assignment._session + assert session1 != session2 + session1 = client1.transport.get_os_policy_assignment_report._session + session2 = client2.transport.get_os_policy_assignment_report._session + assert session1 != session2 + session1 = client1.transport.list_os_policy_assignment_reports._session + session2 = client2.transport.list_os_policy_assignment_reports._session + assert session1 != session2 + session1 = client1.transport.get_inventory._session + session2 = client2.transport.get_inventory._session + assert session1 != session2 + session1 = client1.transport.list_inventories._session + session2 = client2.transport.list_inventories._session + assert session1 != session2 + session1 = client1.transport.get_vulnerability_report._session + session2 = client2.transport.get_vulnerability_report._session + assert session1 != session2 + session1 = client1.transport.list_vulnerability_reports._session + session2 = client2.transport.list_vulnerability_reports._session + assert session1 != session2 + + +def test_os_config_zonal_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.OsConfigZonalServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_os_config_zonal_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.OsConfigZonalServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigZonalServiceGrpcTransport, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + ], +) +def test_os_config_zonal_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigZonalServiceGrpcTransport, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + ], +) +def test_os_config_zonal_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_os_config_zonal_service_grpc_lro_client(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_os_config_zonal_service_grpc_lro_async_client(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_instance_path(): + project = "squid" + zone = "clam" + instance = "whelk" + expected = "projects/{project}/zones/{zone}/instances/{instance}".format( + project=project, + zone=zone, + instance=instance, + ) + actual = OsConfigZonalServiceClient.instance_path(project, zone, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "octopus", + "zone": "oyster", + "instance": "nudibranch", + } + path = OsConfigZonalServiceClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_instance_path(path) + assert expected == actual + + +def test_instance_os_policy_assignment_path(): + project = "cuttlefish" + location = "mussel" + instance = "winkle" + assignment = "nautilus" + expected = "projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}".format( + project=project, + location=location, + instance=instance, + assignment=assignment, + ) + actual = OsConfigZonalServiceClient.instance_os_policy_assignment_path( + project, location, instance, assignment + ) + assert expected == actual + + +def test_parse_instance_os_policy_assignment_path(): + expected = { + "project": "scallop", + "location": "abalone", + "instance": "squid", + "assignment": "clam", + } + path = OsConfigZonalServiceClient.instance_os_policy_assignment_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_instance_os_policy_assignment_path(path) + assert expected == actual + + +def test_inventory_path(): + project = "whelk" + location = "octopus" + instance = "oyster" + expected = ( + "projects/{project}/locations/{location}/instances/{instance}/inventory".format( + project=project, + location=location, + instance=instance, + ) + ) + actual = OsConfigZonalServiceClient.inventory_path(project, location, instance) + assert expected == actual + + +def test_parse_inventory_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "instance": "mussel", + } + path = OsConfigZonalServiceClient.inventory_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_inventory_path(path) + assert expected == actual + + +def test_os_policy_assignment_path(): + project = "winkle" + location = "nautilus" + os_policy_assignment = "scallop" + expected = "projects/{project}/locations/{location}/osPolicyAssignments/{os_policy_assignment}".format( + project=project, + location=location, + os_policy_assignment=os_policy_assignment, + ) + actual = OsConfigZonalServiceClient.os_policy_assignment_path( + project, location, os_policy_assignment + ) + assert expected == actual + + +def test_parse_os_policy_assignment_path(): + expected = { + "project": "abalone", + "location": "squid", + "os_policy_assignment": "clam", + } + path = OsConfigZonalServiceClient.os_policy_assignment_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_os_policy_assignment_path(path) + assert expected == actual + + +def test_os_policy_assignment_report_path(): + project = "whelk" + location = "octopus" + instance = "oyster" + assignment = "nudibranch" + expected = "projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/report".format( + project=project, + location=location, + instance=instance, + assignment=assignment, + ) + actual = OsConfigZonalServiceClient.os_policy_assignment_report_path( + project, location, instance, assignment + ) + assert expected == actual + + +def test_parse_os_policy_assignment_report_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "instance": "winkle", + "assignment": "nautilus", + } + path = OsConfigZonalServiceClient.os_policy_assignment_report_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_os_policy_assignment_report_path(path) + assert expected == actual + + +def test_vulnerability_report_path(): + project = "scallop" + location = "abalone" + instance = "squid" + expected = "projects/{project}/locations/{location}/instances/{instance}/vulnerabilityReport".format( + project=project, + location=location, + instance=instance, + ) + actual = OsConfigZonalServiceClient.vulnerability_report_path( + project, location, instance + ) + assert expected == actual + + +def test_parse_vulnerability_report_path(): + expected = { + "project": "clam", + "location": "whelk", + "instance": "octopus", + } + path = OsConfigZonalServiceClient.vulnerability_report_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_vulnerability_report_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = OsConfigZonalServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = OsConfigZonalServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = OsConfigZonalServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = OsConfigZonalServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = OsConfigZonalServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = OsConfigZonalServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = OsConfigZonalServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = OsConfigZonalServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = OsConfigZonalServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = OsConfigZonalServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.OsConfigZonalServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.OsConfigZonalServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = OsConfigZonalServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (OsConfigZonalServiceClient, transports.OsConfigZonalServiceGrpcTransport), + ( + OsConfigZonalServiceAsyncClient, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1alpha/__init__.py b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1alpha/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1alpha/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1alpha/test_os_config_zonal_service.py b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1alpha/test_os_config_zonal_service.py new file mode 100644 index 000000000000..f2249e1b398d --- /dev/null +++ b/packages/google-cloud-os-config/tests/unit/gapic/osconfig_v1alpha/test_os_config_zonal_service.py @@ -0,0 +1,11642 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.osconfig_v1alpha.services.os_config_zonal_service import ( + OsConfigZonalServiceAsyncClient, + OsConfigZonalServiceClient, + pagers, + transports, +) +from google.cloud.osconfig_v1alpha.types import ( + config_common, + instance_os_policies_compliance, + inventory, + os_policy, + os_policy_assignment_reports, + os_policy_assignments, + osconfig_common, + vulnerability, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert OsConfigZonalServiceClient._get_default_mtls_endpoint(None) is None + assert ( + OsConfigZonalServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + OsConfigZonalServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + OsConfigZonalServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OsConfigZonalServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + OsConfigZonalServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OsConfigZonalServiceClient, "grpc"), + (OsConfigZonalServiceAsyncClient, "grpc_asyncio"), + (OsConfigZonalServiceClient, "rest"), + ], +) +def test_os_config_zonal_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "osconfig.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://osconfig.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.OsConfigZonalServiceGrpcTransport, "grpc"), + (transports.OsConfigZonalServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.OsConfigZonalServiceRestTransport, "rest"), + ], +) +def test_os_config_zonal_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OsConfigZonalServiceClient, "grpc"), + (OsConfigZonalServiceAsyncClient, "grpc_asyncio"), + (OsConfigZonalServiceClient, "rest"), + ], +) +def test_os_config_zonal_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "osconfig.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://osconfig.googleapis.com" + ) + + +def test_os_config_zonal_service_client_get_transport_class(): + transport = OsConfigZonalServiceClient.get_transport_class() + available_transports = [ + transports.OsConfigZonalServiceGrpcTransport, + transports.OsConfigZonalServiceRestTransport, + ] + assert transport in available_transports + + transport = OsConfigZonalServiceClient.get_transport_class("grpc") + assert transport == transports.OsConfigZonalServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceGrpcTransport, + "grpc", + ), + ( + OsConfigZonalServiceAsyncClient, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + OsConfigZonalServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigZonalServiceClient), +) +@mock.patch.object( + OsConfigZonalServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigZonalServiceAsyncClient), +) +def test_os_config_zonal_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(OsConfigZonalServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(OsConfigZonalServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceGrpcTransport, + "grpc", + "true", + ), + ( + OsConfigZonalServiceAsyncClient, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceGrpcTransport, + "grpc", + "false", + ), + ( + OsConfigZonalServiceAsyncClient, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceRestTransport, + "rest", + "true", + ), + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + OsConfigZonalServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigZonalServiceClient), +) +@mock.patch.object( + OsConfigZonalServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigZonalServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_os_config_zonal_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [OsConfigZonalServiceClient, OsConfigZonalServiceAsyncClient] +) +@mock.patch.object( + OsConfigZonalServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigZonalServiceClient), +) +@mock.patch.object( + OsConfigZonalServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OsConfigZonalServiceAsyncClient), +) +def test_os_config_zonal_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceGrpcTransport, + "grpc", + ), + ( + OsConfigZonalServiceAsyncClient, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceRestTransport, + "rest", + ), + ], +) +def test_os_config_zonal_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + OsConfigZonalServiceAsyncClient, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceRestTransport, + "rest", + None, + ), + ], +) +def test_os_config_zonal_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_os_config_zonal_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.osconfig_v1alpha.services.os_config_zonal_service.transports.OsConfigZonalServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = OsConfigZonalServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + OsConfigZonalServiceClient, + transports.OsConfigZonalServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + OsConfigZonalServiceAsyncClient, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_os_config_zonal_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "osconfig.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="osconfig.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.CreateOSPolicyAssignmentRequest, + dict, + ], +) +def test_create_os_policy_assignment(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.CreateOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_os_policy_assignment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_os_policy_assignment), "__call__" + ) as call: + client.create_os_policy_assignment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.CreateOSPolicyAssignmentRequest() + + +@pytest.mark.asyncio +async def test_create_os_policy_assignment_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignments.CreateOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.CreateOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_os_policy_assignment_async_from_dict(): + await test_create_os_policy_assignment_async(request_type=dict) + + +def test_create_os_policy_assignment_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.CreateOSPolicyAssignmentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_os_policy_assignment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_os_policy_assignment_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.CreateOSPolicyAssignmentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_os_policy_assignment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_os_policy_assignment_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_os_policy_assignment( + parent="parent_value", + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + os_policy_assignment_id="os_policy_assignment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].os_policy_assignment + mock_val = os_policy_assignments.OSPolicyAssignment(name="name_value") + assert arg == mock_val + arg = args[0].os_policy_assignment_id + mock_val = "os_policy_assignment_id_value" + assert arg == mock_val + + +def test_create_os_policy_assignment_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_os_policy_assignment( + os_policy_assignments.CreateOSPolicyAssignmentRequest(), + parent="parent_value", + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + os_policy_assignment_id="os_policy_assignment_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_os_policy_assignment_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_os_policy_assignment( + parent="parent_value", + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + os_policy_assignment_id="os_policy_assignment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].os_policy_assignment + mock_val = os_policy_assignments.OSPolicyAssignment(name="name_value") + assert arg == mock_val + arg = args[0].os_policy_assignment_id + mock_val = "os_policy_assignment_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_os_policy_assignment_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_os_policy_assignment( + os_policy_assignments.CreateOSPolicyAssignmentRequest(), + parent="parent_value", + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + os_policy_assignment_id="os_policy_assignment_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.UpdateOSPolicyAssignmentRequest, + dict, + ], +) +def test_update_os_policy_assignment(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.UpdateOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_os_policy_assignment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_os_policy_assignment), "__call__" + ) as call: + client.update_os_policy_assignment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.UpdateOSPolicyAssignmentRequest() + + +@pytest.mark.asyncio +async def test_update_os_policy_assignment_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignments.UpdateOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.UpdateOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_os_policy_assignment_async_from_dict(): + await test_update_os_policy_assignment_async(request_type=dict) + + +def test_update_os_policy_assignment_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.UpdateOSPolicyAssignmentRequest() + + request.os_policy_assignment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_os_policy_assignment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "os_policy_assignment.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_os_policy_assignment_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.UpdateOSPolicyAssignmentRequest() + + request.os_policy_assignment.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_os_policy_assignment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "os_policy_assignment.name=name_value", + ) in kw["metadata"] + + +def test_update_os_policy_assignment_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_os_policy_assignment( + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].os_policy_assignment + mock_val = os_policy_assignments.OSPolicyAssignment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_os_policy_assignment_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_os_policy_assignment( + os_policy_assignments.UpdateOSPolicyAssignmentRequest(), + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_os_policy_assignment_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_os_policy_assignment( + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].os_policy_assignment + mock_val = os_policy_assignments.OSPolicyAssignment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_os_policy_assignment_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_os_policy_assignment( + os_policy_assignments.UpdateOSPolicyAssignmentRequest(), + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.GetOSPolicyAssignmentRequest, + dict, + ], +) +def test_get_os_policy_assignment(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignments.OSPolicyAssignment( + name="name_value", + description="description_value", + revision_id="revision_id_value", + etag="etag_value", + rollout_state=os_policy_assignments.OSPolicyAssignment.RolloutState.IN_PROGRESS, + baseline=True, + deleted=True, + reconciling=True, + uid="uid_value", + ) + response = client.get_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.GetOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, os_policy_assignments.OSPolicyAssignment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.revision_id == "revision_id_value" + assert response.etag == "etag_value" + assert ( + response.rollout_state + == os_policy_assignments.OSPolicyAssignment.RolloutState.IN_PROGRESS + ) + assert response.baseline is True + assert response.deleted is True + assert response.reconciling is True + assert response.uid == "uid_value" + + +def test_get_os_policy_assignment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment), "__call__" + ) as call: + client.get_os_policy_assignment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.GetOSPolicyAssignmentRequest() + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignments.GetOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.OSPolicyAssignment( + name="name_value", + description="description_value", + revision_id="revision_id_value", + etag="etag_value", + rollout_state=os_policy_assignments.OSPolicyAssignment.RolloutState.IN_PROGRESS, + baseline=True, + deleted=True, + reconciling=True, + uid="uid_value", + ) + ) + response = await client.get_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.GetOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, os_policy_assignments.OSPolicyAssignment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.revision_id == "revision_id_value" + assert response.etag == "etag_value" + assert ( + response.rollout_state + == os_policy_assignments.OSPolicyAssignment.RolloutState.IN_PROGRESS + ) + assert response.baseline is True + assert response.deleted is True + assert response.reconciling is True + assert response.uid == "uid_value" + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_async_from_dict(): + await test_get_os_policy_assignment_async(request_type=dict) + + +def test_get_os_policy_assignment_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.GetOSPolicyAssignmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment), "__call__" + ) as call: + call.return_value = os_policy_assignments.OSPolicyAssignment() + client.get_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.GetOSPolicyAssignmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.OSPolicyAssignment() + ) + await client.get_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_os_policy_assignment_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignments.OSPolicyAssignment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_os_policy_assignment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_os_policy_assignment_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_os_policy_assignment( + os_policy_assignments.GetOSPolicyAssignmentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignments.OSPolicyAssignment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.OSPolicyAssignment() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_os_policy_assignment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_os_policy_assignment( + os_policy_assignments.GetOSPolicyAssignmentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.ListOSPolicyAssignmentsRequest, + dict, + ], +) +def test_list_os_policy_assignments(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_os_policy_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.ListOSPolicyAssignmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_os_policy_assignments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + client.list_os_policy_assignments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.ListOSPolicyAssignmentsRequest() + + +@pytest.mark.asyncio +async def test_list_os_policy_assignments_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignments.ListOSPolicyAssignmentsRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.ListOSPolicyAssignmentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_os_policy_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.ListOSPolicyAssignmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_os_policy_assignments_async_from_dict(): + await test_list_os_policy_assignments_async(request_type=dict) + + +def test_list_os_policy_assignments_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.ListOSPolicyAssignmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + call.return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse() + client.list_os_policy_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_os_policy_assignments_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.ListOSPolicyAssignmentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.ListOSPolicyAssignmentsResponse() + ) + await client.list_os_policy_assignments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_os_policy_assignments_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_os_policy_assignments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_os_policy_assignments_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_os_policy_assignments( + os_policy_assignments.ListOSPolicyAssignmentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_os_policy_assignments_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.ListOSPolicyAssignmentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_os_policy_assignments( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_os_policy_assignments_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_os_policy_assignments( + os_policy_assignments.ListOSPolicyAssignmentsRequest(), + parent="parent_value", + ) + + +def test_list_os_policy_assignments_pager(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_os_policy_assignments(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, os_policy_assignments.OSPolicyAssignment) for i in results + ) + + +def test_list_os_policy_assignments_pages(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_os_policy_assignments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_os_policy_assignments_async_pager(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_os_policy_assignments( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, os_policy_assignments.OSPolicyAssignment) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_os_policy_assignments_async_pages(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_os_policy_assignments(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, + dict, + ], +) +def test_list_os_policy_assignment_revisions(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = client.list_os_policy_assignment_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentRevisionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_os_policy_assignment_revisions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + client.list_os_policy_assignment_revisions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest() + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_revisions_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_os_policy_assignment_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentRevisionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_revisions_async_from_dict(): + await test_list_os_policy_assignment_revisions_async(request_type=dict) + + +def test_list_os_policy_assignment_revisions_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + call.return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + ) + client.list_os_policy_assignment_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_revisions_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + ) + await client.list_os_policy_assignment_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_list_os_policy_assignment_revisions_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_os_policy_assignment_revisions( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_list_os_policy_assignment_revisions_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_os_policy_assignment_revisions( + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_revisions_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_os_policy_assignment_revisions( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_revisions_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_os_policy_assignment_revisions( + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest(), + name="name_value", + ) + + +def test_list_os_policy_assignment_revisions_pager(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), + ) + pager = client.list_os_policy_assignment_revisions(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, os_policy_assignments.OSPolicyAssignment) for i in results + ) + + +def test_list_os_policy_assignment_revisions_pages(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + pages = list(client.list_os_policy_assignment_revisions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_revisions_async_pager(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_os_policy_assignment_revisions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, os_policy_assignments.OSPolicyAssignment) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_revisions_async_pages(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_revisions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_os_policy_assignment_revisions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.DeleteOSPolicyAssignmentRequest, + dict, + ], +) +def test_delete_os_policy_assignment(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.DeleteOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_os_policy_assignment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_os_policy_assignment), "__call__" + ) as call: + client.delete_os_policy_assignment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.DeleteOSPolicyAssignmentRequest() + + +@pytest.mark.asyncio +async def test_delete_os_policy_assignment_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignments.DeleteOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == os_policy_assignments.DeleteOSPolicyAssignmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_os_policy_assignment_async_from_dict(): + await test_delete_os_policy_assignment_async(request_type=dict) + + +def test_delete_os_policy_assignment_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.DeleteOSPolicyAssignmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_os_policy_assignment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_os_policy_assignment_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignments.DeleteOSPolicyAssignmentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_os_policy_assignment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_os_policy_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_os_policy_assignment_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_os_policy_assignment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_os_policy_assignment_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_os_policy_assignment( + os_policy_assignments.DeleteOSPolicyAssignmentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_os_policy_assignment_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_os_policy_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_os_policy_assignment( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_os_policy_assignment_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_os_policy_assignment( + os_policy_assignments.DeleteOSPolicyAssignmentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest, + dict, + ], +) +def test_get_instance_os_policies_compliance(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_os_policies_compliance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + instance_os_policies_compliance.InstanceOSPoliciesCompliance( + name="name_value", + instance="instance_value", + state=config_common.OSPolicyComplianceState.COMPLIANT, + detailed_state="detailed_state_value", + detailed_state_reason="detailed_state_reason_value", + last_compliance_run_id="last_compliance_run_id_value", + ) + ) + response = client.get_instance_os_policies_compliance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance( + response, instance_os_policies_compliance.InstanceOSPoliciesCompliance + ) + assert response.name == "name_value" + assert response.instance == "instance_value" + assert response.state == config_common.OSPolicyComplianceState.COMPLIANT + assert response.detailed_state == "detailed_state_value" + assert response.detailed_state_reason == "detailed_state_reason_value" + assert response.last_compliance_run_id == "last_compliance_run_id_value" + + +def test_get_instance_os_policies_compliance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_os_policies_compliance), "__call__" + ) as call: + client.get_instance_os_policies_compliance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest() + ) + + +@pytest.mark.asyncio +async def test_get_instance_os_policies_compliance_async( + transport: str = "grpc_asyncio", + request_type=instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_os_policies_compliance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instance_os_policies_compliance.InstanceOSPoliciesCompliance( + name="name_value", + instance="instance_value", + state=config_common.OSPolicyComplianceState.COMPLIANT, + detailed_state="detailed_state_value", + detailed_state_reason="detailed_state_reason_value", + last_compliance_run_id="last_compliance_run_id_value", + ) + ) + response = await client.get_instance_os_policies_compliance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance( + response, instance_os_policies_compliance.InstanceOSPoliciesCompliance + ) + assert response.name == "name_value" + assert response.instance == "instance_value" + assert response.state == config_common.OSPolicyComplianceState.COMPLIANT + assert response.detailed_state == "detailed_state_value" + assert response.detailed_state_reason == "detailed_state_reason_value" + assert response.last_compliance_run_id == "last_compliance_run_id_value" + + +@pytest.mark.asyncio +async def test_get_instance_os_policies_compliance_async_from_dict(): + await test_get_instance_os_policies_compliance_async(request_type=dict) + + +def test_get_instance_os_policies_compliance_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_os_policies_compliance), "__call__" + ) as call: + call.return_value = ( + instance_os_policies_compliance.InstanceOSPoliciesCompliance() + ) + client.get_instance_os_policies_compliance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_instance_os_policies_compliance_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_os_policies_compliance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instance_os_policies_compliance.InstanceOSPoliciesCompliance() + ) + await client.get_instance_os_policies_compliance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_instance_os_policies_compliance_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_os_policies_compliance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + instance_os_policies_compliance.InstanceOSPoliciesCompliance() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance_os_policies_compliance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_instance_os_policies_compliance_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_os_policies_compliance( + instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_instance_os_policies_compliance_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_os_policies_compliance), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + instance_os_policies_compliance.InstanceOSPoliciesCompliance() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instance_os_policies_compliance.InstanceOSPoliciesCompliance() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance_os_policies_compliance( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_instance_os_policies_compliance_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance_os_policies_compliance( + instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest, + dict, + ], +) +def test_list_instance_os_policies_compliances(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_os_policies_compliances), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + next_page_token="next_page_token_value", + ) + ) + response = client.list_instance_os_policies_compliances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceOSPoliciesCompliancesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_instance_os_policies_compliances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_os_policies_compliances), "__call__" + ) as call: + client.list_instance_os_policies_compliances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest() + ) + + +@pytest.mark.asyncio +async def test_list_instance_os_policies_compliances_async( + transport: str = "grpc_asyncio", + request_type=instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_os_policies_compliances), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_instance_os_policies_compliances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceOSPoliciesCompliancesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_instance_os_policies_compliances_async_from_dict(): + await test_list_instance_os_policies_compliances_async(request_type=dict) + + +def test_list_instance_os_policies_compliances_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_os_policies_compliances), "__call__" + ) as call: + call.return_value = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse() + ) + client.list_instance_os_policies_compliances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_instance_os_policies_compliances_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_os_policies_compliances), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse() + ) + await client.list_instance_os_policies_compliances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_instance_os_policies_compliances_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_os_policies_compliances), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instance_os_policies_compliances( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_instance_os_policies_compliances_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_os_policies_compliances( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_instance_os_policies_compliances_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_os_policies_compliances), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instance_os_policies_compliances( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_instance_os_policies_compliances_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instance_os_policies_compliances( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest(), + parent="parent_value", + ) + + +def test_list_instance_os_policies_compliances_pager(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_os_policies_compliances), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + next_page_token="abc", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[], + next_page_token="def", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + next_page_token="ghi", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_instance_os_policies_compliances(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, instance_os_policies_compliance.InstanceOSPoliciesCompliance) + for i in results + ) + + +def test_list_instance_os_policies_compliances_pages(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_os_policies_compliances), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + next_page_token="abc", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[], + next_page_token="def", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + next_page_token="ghi", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instance_os_policies_compliances(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_instance_os_policies_compliances_async_pager(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_os_policies_compliances), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + next_page_token="abc", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[], + next_page_token="def", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + next_page_token="ghi", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instance_os_policies_compliances( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, instance_os_policies_compliance.InstanceOSPoliciesCompliance) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_instance_os_policies_compliances_async_pages(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_os_policies_compliances), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + next_page_token="abc", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[], + next_page_token="def", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + next_page_token="ghi", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instance_os_policies_compliances(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, + dict, + ], +) +def test_get_os_policy_assignment_report(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignment_reports.OSPolicyAssignmentReport( + name="name_value", + instance="instance_value", + os_policy_assignment="os_policy_assignment_value", + last_run_id="last_run_id_value", + ) + response = client.get_os_policy_assignment_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert ( + args[0] == os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, os_policy_assignment_reports.OSPolicyAssignmentReport) + assert response.name == "name_value" + assert response.instance == "instance_value" + assert response.os_policy_assignment == "os_policy_assignment_value" + assert response.last_run_id == "last_run_id_value" + + +def test_get_os_policy_assignment_report_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment_report), "__call__" + ) as call: + client.get_os_policy_assignment_report() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] == os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest() + ) + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_report_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignment_reports.OSPolicyAssignmentReport( + name="name_value", + instance="instance_value", + os_policy_assignment="os_policy_assignment_value", + last_run_id="last_run_id_value", + ) + ) + response = await client.get_os_policy_assignment_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert ( + args[0] == os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, os_policy_assignment_reports.OSPolicyAssignmentReport) + assert response.name == "name_value" + assert response.instance == "instance_value" + assert response.os_policy_assignment == "os_policy_assignment_value" + assert response.last_run_id == "last_run_id_value" + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_report_async_from_dict(): + await test_get_os_policy_assignment_report_async(request_type=dict) + + +def test_get_os_policy_assignment_report_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment_report), "__call__" + ) as call: + call.return_value = os_policy_assignment_reports.OSPolicyAssignmentReport() + client.get_os_policy_assignment_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_report_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment_report), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignment_reports.OSPolicyAssignmentReport() + ) + await client.get_os_policy_assignment_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_os_policy_assignment_report_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignment_reports.OSPolicyAssignmentReport() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_os_policy_assignment_report( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_os_policy_assignment_report_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_os_policy_assignment_report( + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_report_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_os_policy_assignment_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = os_policy_assignment_reports.OSPolicyAssignmentReport() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignment_reports.OSPolicyAssignmentReport() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_os_policy_assignment_report( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_os_policy_assignment_report_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_os_policy_assignment_report( + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, + dict, + ], +) +def test_list_os_policy_assignment_reports(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + next_page_token="next_page_token_value", + ) + ) + response = client.list_os_policy_assignment_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentReportsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_os_policy_assignment_reports_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + client.list_os_policy_assignment_reports() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest() + ) + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_reports_async( + transport: str = "grpc_asyncio", + request_type=os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_os_policy_assignment_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert ( + args[0] + == os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest() + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentReportsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_reports_async_from_dict(): + await test_list_os_policy_assignment_reports_async(request_type=dict) + + +def test_list_os_policy_assignment_reports_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + call.return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + client.list_os_policy_assignment_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_reports_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + await client.list_os_policy_assignment_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_os_policy_assignment_reports_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_os_policy_assignment_reports( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_os_policy_assignment_reports_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_os_policy_assignment_reports( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_reports_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_os_policy_assignment_reports( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_reports_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_os_policy_assignment_reports( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest(), + parent="parent_value", + ) + + +def test_list_os_policy_assignment_reports_pager(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="abc", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[], + next_page_token="def", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="ghi", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_os_policy_assignment_reports(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, os_policy_assignment_reports.OSPolicyAssignmentReport) + for i in results + ) + + +def test_list_os_policy_assignment_reports_pages(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="abc", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[], + next_page_token="def", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="ghi", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + ), + RuntimeError, + ) + pages = list(client.list_os_policy_assignment_reports(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_reports_async_pager(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="abc", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[], + next_page_token="def", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="ghi", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_os_policy_assignment_reports( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, os_policy_assignment_reports.OSPolicyAssignmentReport) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_os_policy_assignment_reports_async_pages(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_os_policy_assignment_reports), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="abc", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[], + next_page_token="def", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="ghi", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_os_policy_assignment_reports(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + inventory.GetInventoryRequest, + dict, + ], +) +def test_get_inventory(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_inventory), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = inventory.Inventory( + name="name_value", + ) + response = client.get_inventory(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == inventory.GetInventoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, inventory.Inventory) + assert response.name == "name_value" + + +def test_get_inventory_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_inventory), "__call__") as call: + client.get_inventory() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == inventory.GetInventoryRequest() + + +@pytest.mark.asyncio +async def test_get_inventory_async( + transport: str = "grpc_asyncio", request_type=inventory.GetInventoryRequest +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_inventory), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + inventory.Inventory( + name="name_value", + ) + ) + response = await client.get_inventory(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == inventory.GetInventoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, inventory.Inventory) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_inventory_async_from_dict(): + await test_get_inventory_async(request_type=dict) + + +def test_get_inventory_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = inventory.GetInventoryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_inventory), "__call__") as call: + call.return_value = inventory.Inventory() + client.get_inventory(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_inventory_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = inventory.GetInventoryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_inventory), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(inventory.Inventory()) + await client.get_inventory(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_inventory_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_inventory), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = inventory.Inventory() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_inventory( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_inventory_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inventory( + inventory.GetInventoryRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_inventory_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_inventory), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = inventory.Inventory() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(inventory.Inventory()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_inventory( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_inventory_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_inventory( + inventory.GetInventoryRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + inventory.ListInventoriesRequest, + dict, + ], +) +def test_list_inventories(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = inventory.ListInventoriesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_inventories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == inventory.ListInventoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInventoriesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_inventories_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + client.list_inventories() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == inventory.ListInventoriesRequest() + + +@pytest.mark.asyncio +async def test_list_inventories_async( + transport: str = "grpc_asyncio", request_type=inventory.ListInventoriesRequest +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + inventory.ListInventoriesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_inventories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == inventory.ListInventoriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInventoriesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_inventories_async_from_dict(): + await test_list_inventories_async(request_type=dict) + + +def test_list_inventories_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = inventory.ListInventoriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + call.return_value = inventory.ListInventoriesResponse() + client.list_inventories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_inventories_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = inventory.ListInventoriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + inventory.ListInventoriesResponse() + ) + await client.list_inventories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_inventories_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = inventory.ListInventoriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_inventories( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_inventories_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inventories( + inventory.ListInventoriesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_inventories_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = inventory.ListInventoriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + inventory.ListInventoriesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_inventories( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_inventories_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_inventories( + inventory.ListInventoriesRequest(), + parent="parent_value", + ) + + +def test_list_inventories_pager(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + inventory.Inventory(), + ], + next_page_token="abc", + ), + inventory.ListInventoriesResponse( + inventories=[], + next_page_token="def", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + ], + next_page_token="ghi", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_inventories(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, inventory.Inventory) for i in results) + + +def test_list_inventories_pages(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_inventories), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + inventory.Inventory(), + ], + next_page_token="abc", + ), + inventory.ListInventoriesResponse( + inventories=[], + next_page_token="def", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + ], + next_page_token="ghi", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + ], + ), + RuntimeError, + ) + pages = list(client.list_inventories(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_inventories_async_pager(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inventories), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + inventory.Inventory(), + ], + next_page_token="abc", + ), + inventory.ListInventoriesResponse( + inventories=[], + next_page_token="def", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + ], + next_page_token="ghi", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_inventories( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, inventory.Inventory) for i in responses) + + +@pytest.mark.asyncio +async def test_list_inventories_async_pages(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inventories), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + inventory.Inventory(), + ], + next_page_token="abc", + ), + inventory.ListInventoriesResponse( + inventories=[], + next_page_token="def", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + ], + next_page_token="ghi", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_inventories(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vulnerability.GetVulnerabilityReportRequest, + dict, + ], +) +def test_get_vulnerability_report(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vulnerability_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vulnerability.VulnerabilityReport( + name="name_value", + ) + response = client.get_vulnerability_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vulnerability.GetVulnerabilityReportRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vulnerability.VulnerabilityReport) + assert response.name == "name_value" + + +def test_get_vulnerability_report_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vulnerability_report), "__call__" + ) as call: + client.get_vulnerability_report() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vulnerability.GetVulnerabilityReportRequest() + + +@pytest.mark.asyncio +async def test_get_vulnerability_report_async( + transport: str = "grpc_asyncio", + request_type=vulnerability.GetVulnerabilityReportRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vulnerability_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vulnerability.VulnerabilityReport( + name="name_value", + ) + ) + response = await client.get_vulnerability_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vulnerability.GetVulnerabilityReportRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, vulnerability.VulnerabilityReport) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_vulnerability_report_async_from_dict(): + await test_get_vulnerability_report_async(request_type=dict) + + +def test_get_vulnerability_report_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vulnerability.GetVulnerabilityReportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vulnerability_report), "__call__" + ) as call: + call.return_value = vulnerability.VulnerabilityReport() + client.get_vulnerability_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_vulnerability_report_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vulnerability.GetVulnerabilityReportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vulnerability_report), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vulnerability.VulnerabilityReport() + ) + await client.get_vulnerability_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_vulnerability_report_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vulnerability_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vulnerability.VulnerabilityReport() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_vulnerability_report( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_vulnerability_report_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_vulnerability_report( + vulnerability.GetVulnerabilityReportRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_vulnerability_report_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vulnerability_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vulnerability.VulnerabilityReport() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vulnerability.VulnerabilityReport() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_vulnerability_report( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_vulnerability_report_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_vulnerability_report( + vulnerability.GetVulnerabilityReportRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vulnerability.ListVulnerabilityReportsRequest, + dict, + ], +) +def test_list_vulnerability_reports(request_type, transport: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vulnerability.ListVulnerabilityReportsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_vulnerability_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == vulnerability.ListVulnerabilityReportsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVulnerabilityReportsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_vulnerability_reports_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + client.list_vulnerability_reports() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vulnerability.ListVulnerabilityReportsRequest() + + +@pytest.mark.asyncio +async def test_list_vulnerability_reports_async( + transport: str = "grpc_asyncio", + request_type=vulnerability.ListVulnerabilityReportsRequest, +): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vulnerability.ListVulnerabilityReportsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_vulnerability_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == vulnerability.ListVulnerabilityReportsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVulnerabilityReportsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_vulnerability_reports_async_from_dict(): + await test_list_vulnerability_reports_async(request_type=dict) + + +def test_list_vulnerability_reports_field_headers(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vulnerability.ListVulnerabilityReportsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + call.return_value = vulnerability.ListVulnerabilityReportsResponse() + client.list_vulnerability_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_vulnerability_reports_field_headers_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vulnerability.ListVulnerabilityReportsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vulnerability.ListVulnerabilityReportsResponse() + ) + await client.list_vulnerability_reports(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_vulnerability_reports_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vulnerability.ListVulnerabilityReportsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_vulnerability_reports( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_vulnerability_reports_flattened_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_vulnerability_reports( + vulnerability.ListVulnerabilityReportsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_vulnerability_reports_flattened_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vulnerability.ListVulnerabilityReportsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vulnerability.ListVulnerabilityReportsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_vulnerability_reports( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_vulnerability_reports_flattened_error_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_vulnerability_reports( + vulnerability.ListVulnerabilityReportsRequest(), + parent="parent_value", + ) + + +def test_list_vulnerability_reports_pager(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + next_page_token="abc", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[], + next_page_token="def", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + ], + next_page_token="ghi", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_vulnerability_reports(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vulnerability.VulnerabilityReport) for i in results) + + +def test_list_vulnerability_reports_pages(transport_name: str = "grpc"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + next_page_token="abc", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[], + next_page_token="def", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + ], + next_page_token="ghi", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + ), + RuntimeError, + ) + pages = list(client.list_vulnerability_reports(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_vulnerability_reports_async_pager(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + next_page_token="abc", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[], + next_page_token="def", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + ], + next_page_token="ghi", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_vulnerability_reports( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, vulnerability.VulnerabilityReport) for i in responses) + + +@pytest.mark.asyncio +async def test_list_vulnerability_reports_async_pages(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vulnerability_reports), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + next_page_token="abc", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[], + next_page_token="def", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + ], + next_page_token="ghi", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_vulnerability_reports(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.CreateOSPolicyAssignmentRequest, + dict, + ], +) +def test_create_os_policy_assignment_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["os_policy_assignment"] = { + "name": "name_value", + "description": "description_value", + "os_policies": [ + { + "id": "id_value", + "description": "description_value", + "mode": 1, + "resource_groups": [ + { + "os_filter": { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + }, + "inventory_filters": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + "resources": [ + { + "id": "id_value", + "pkg": { + "desired_state": 1, + "apt": {"name": "name_value"}, + "deb": { + "source": { + "remote": { + "uri": "uri_value", + "sha256_checksum": "sha256_checksum_value", + }, + "gcs": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + "local_path": "local_path_value", + "allow_insecure": True, + }, + "pull_deps": True, + }, + "yum": {"name": "name_value"}, + "zypper": {"name": "name_value"}, + "rpm": {"source": {}, "pull_deps": True}, + "googet": {"name": "name_value"}, + "msi": { + "source": {}, + "properties": [ + "properties_value1", + "properties_value2", + ], + }, + }, + "repository": { + "apt": { + "archive_type": 1, + "uri": "uri_value", + "distribution": "distribution_value", + "components": [ + "components_value1", + "components_value2", + ], + "gpg_key": "gpg_key_value", + }, + "yum": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "zypper": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "goo": {"name": "name_value", "url": "url_value"}, + }, + "exec_": { + "validate": { + "file": {}, + "script": "script_value", + "args": ["args_value1", "args_value2"], + "interpreter": 1, + "output_file_path": "output_file_path_value", + }, + "enforce": {}, + }, + "file": { + "file": {}, + "content": "content_value", + "path": "path_value", + "state": 1, + "permissions": "permissions_value", + }, + } + ], + } + ], + "allow_no_resource_group_match": True, + } + ], + "instance_filter": { + "all_": True, + "os_short_names": ["os_short_names_value1", "os_short_names_value2"], + "inclusion_labels": [{"labels": {}}], + "exclusion_labels": {}, + "inventories": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + }, + "rollout": { + "disruption_budget": {"fixed": 528, "percent": 753}, + "min_wait_duration": {"seconds": 751, "nanos": 543}, + }, + "revision_id": "revision_id_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + "etag": "etag_value", + "rollout_state": 1, + "baseline": True, + "deleted": True, + "reconciling": True, + "uid": "uid_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_os_policy_assignment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_os_policy_assignment_rest_required_fields( + request_type=os_policy_assignments.CreateOSPolicyAssignmentRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["os_policy_assignment_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "osPolicyAssignmentId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_os_policy_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "osPolicyAssignmentId" in jsonified_request + assert ( + jsonified_request["osPolicyAssignmentId"] + == request_init["os_policy_assignment_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["osPolicyAssignmentId"] = "os_policy_assignment_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_os_policy_assignment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("os_policy_assignment_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "osPolicyAssignmentId" in jsonified_request + assert jsonified_request["osPolicyAssignmentId"] == "os_policy_assignment_id_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_os_policy_assignment(request) + + expected_params = [ + ( + "osPolicyAssignmentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_os_policy_assignment_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_os_policy_assignment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("osPolicyAssignmentId",)) + & set( + ( + "parent", + "osPolicyAssignment", + "osPolicyAssignmentId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_os_policy_assignment_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_create_os_policy_assignment", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "pre_create_os_policy_assignment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = os_policy_assignments.CreateOSPolicyAssignmentRequest.pb( + os_policy_assignments.CreateOSPolicyAssignmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = os_policy_assignments.CreateOSPolicyAssignmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_os_policy_assignment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_os_policy_assignment_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignments.CreateOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["os_policy_assignment"] = { + "name": "name_value", + "description": "description_value", + "os_policies": [ + { + "id": "id_value", + "description": "description_value", + "mode": 1, + "resource_groups": [ + { + "os_filter": { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + }, + "inventory_filters": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + "resources": [ + { + "id": "id_value", + "pkg": { + "desired_state": 1, + "apt": {"name": "name_value"}, + "deb": { + "source": { + "remote": { + "uri": "uri_value", + "sha256_checksum": "sha256_checksum_value", + }, + "gcs": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + "local_path": "local_path_value", + "allow_insecure": True, + }, + "pull_deps": True, + }, + "yum": {"name": "name_value"}, + "zypper": {"name": "name_value"}, + "rpm": {"source": {}, "pull_deps": True}, + "googet": {"name": "name_value"}, + "msi": { + "source": {}, + "properties": [ + "properties_value1", + "properties_value2", + ], + }, + }, + "repository": { + "apt": { + "archive_type": 1, + "uri": "uri_value", + "distribution": "distribution_value", + "components": [ + "components_value1", + "components_value2", + ], + "gpg_key": "gpg_key_value", + }, + "yum": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "zypper": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "goo": {"name": "name_value", "url": "url_value"}, + }, + "exec_": { + "validate": { + "file": {}, + "script": "script_value", + "args": ["args_value1", "args_value2"], + "interpreter": 1, + "output_file_path": "output_file_path_value", + }, + "enforce": {}, + }, + "file": { + "file": {}, + "content": "content_value", + "path": "path_value", + "state": 1, + "permissions": "permissions_value", + }, + } + ], + } + ], + "allow_no_resource_group_match": True, + } + ], + "instance_filter": { + "all_": True, + "os_short_names": ["os_short_names_value1", "os_short_names_value2"], + "inclusion_labels": [{"labels": {}}], + "exclusion_labels": {}, + "inventories": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + }, + "rollout": { + "disruption_budget": {"fixed": 528, "percent": 753}, + "min_wait_duration": {"seconds": 751, "nanos": 543}, + }, + "revision_id": "revision_id_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + "etag": "etag_value", + "rollout_state": 1, + "baseline": True, + "deleted": True, + "reconciling": True, + "uid": "uid_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_os_policy_assignment(request) + + +def test_create_os_policy_assignment_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + os_policy_assignment_id="os_policy_assignment_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_os_policy_assignment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/osPolicyAssignments" + % client.transport._host, + args[1], + ) + + +def test_create_os_policy_assignment_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_os_policy_assignment( + os_policy_assignments.CreateOSPolicyAssignmentRequest(), + parent="parent_value", + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + os_policy_assignment_id="os_policy_assignment_id_value", + ) + + +def test_create_os_policy_assignment_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.UpdateOSPolicyAssignmentRequest, + dict, + ], +) +def test_update_os_policy_assignment_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "os_policy_assignment": { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + } + request_init["os_policy_assignment"] = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3", + "description": "description_value", + "os_policies": [ + { + "id": "id_value", + "description": "description_value", + "mode": 1, + "resource_groups": [ + { + "os_filter": { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + }, + "inventory_filters": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + "resources": [ + { + "id": "id_value", + "pkg": { + "desired_state": 1, + "apt": {"name": "name_value"}, + "deb": { + "source": { + "remote": { + "uri": "uri_value", + "sha256_checksum": "sha256_checksum_value", + }, + "gcs": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + "local_path": "local_path_value", + "allow_insecure": True, + }, + "pull_deps": True, + }, + "yum": {"name": "name_value"}, + "zypper": {"name": "name_value"}, + "rpm": {"source": {}, "pull_deps": True}, + "googet": {"name": "name_value"}, + "msi": { + "source": {}, + "properties": [ + "properties_value1", + "properties_value2", + ], + }, + }, + "repository": { + "apt": { + "archive_type": 1, + "uri": "uri_value", + "distribution": "distribution_value", + "components": [ + "components_value1", + "components_value2", + ], + "gpg_key": "gpg_key_value", + }, + "yum": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "zypper": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "goo": {"name": "name_value", "url": "url_value"}, + }, + "exec_": { + "validate": { + "file": {}, + "script": "script_value", + "args": ["args_value1", "args_value2"], + "interpreter": 1, + "output_file_path": "output_file_path_value", + }, + "enforce": {}, + }, + "file": { + "file": {}, + "content": "content_value", + "path": "path_value", + "state": 1, + "permissions": "permissions_value", + }, + } + ], + } + ], + "allow_no_resource_group_match": True, + } + ], + "instance_filter": { + "all_": True, + "os_short_names": ["os_short_names_value1", "os_short_names_value2"], + "inclusion_labels": [{"labels": {}}], + "exclusion_labels": {}, + "inventories": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + }, + "rollout": { + "disruption_budget": {"fixed": 528, "percent": 753}, + "min_wait_duration": {"seconds": 751, "nanos": 543}, + }, + "revision_id": "revision_id_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + "etag": "etag_value", + "rollout_state": 1, + "baseline": True, + "deleted": True, + "reconciling": True, + "uid": "uid_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_os_policy_assignment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_os_policy_assignment_rest_required_fields( + request_type=os_policy_assignments.UpdateOSPolicyAssignmentRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_os_policy_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_os_policy_assignment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_os_policy_assignment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_os_policy_assignment_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_os_policy_assignment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("osPolicyAssignment",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_os_policy_assignment_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_update_os_policy_assignment", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "pre_update_os_policy_assignment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = os_policy_assignments.UpdateOSPolicyAssignmentRequest.pb( + os_policy_assignments.UpdateOSPolicyAssignmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = os_policy_assignments.UpdateOSPolicyAssignmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_os_policy_assignment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_os_policy_assignment_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignments.UpdateOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "os_policy_assignment": { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + } + request_init["os_policy_assignment"] = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3", + "description": "description_value", + "os_policies": [ + { + "id": "id_value", + "description": "description_value", + "mode": 1, + "resource_groups": [ + { + "os_filter": { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + }, + "inventory_filters": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + "resources": [ + { + "id": "id_value", + "pkg": { + "desired_state": 1, + "apt": {"name": "name_value"}, + "deb": { + "source": { + "remote": { + "uri": "uri_value", + "sha256_checksum": "sha256_checksum_value", + }, + "gcs": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + "local_path": "local_path_value", + "allow_insecure": True, + }, + "pull_deps": True, + }, + "yum": {"name": "name_value"}, + "zypper": {"name": "name_value"}, + "rpm": {"source": {}, "pull_deps": True}, + "googet": {"name": "name_value"}, + "msi": { + "source": {}, + "properties": [ + "properties_value1", + "properties_value2", + ], + }, + }, + "repository": { + "apt": { + "archive_type": 1, + "uri": "uri_value", + "distribution": "distribution_value", + "components": [ + "components_value1", + "components_value2", + ], + "gpg_key": "gpg_key_value", + }, + "yum": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "zypper": { + "id": "id_value", + "display_name": "display_name_value", + "base_url": "base_url_value", + "gpg_keys": [ + "gpg_keys_value1", + "gpg_keys_value2", + ], + }, + "goo": {"name": "name_value", "url": "url_value"}, + }, + "exec_": { + "validate": { + "file": {}, + "script": "script_value", + "args": ["args_value1", "args_value2"], + "interpreter": 1, + "output_file_path": "output_file_path_value", + }, + "enforce": {}, + }, + "file": { + "file": {}, + "content": "content_value", + "path": "path_value", + "state": 1, + "permissions": "permissions_value", + }, + } + ], + } + ], + "allow_no_resource_group_match": True, + } + ], + "instance_filter": { + "all_": True, + "os_short_names": ["os_short_names_value1", "os_short_names_value2"], + "inclusion_labels": [{"labels": {}}], + "exclusion_labels": {}, + "inventories": [ + { + "os_short_name": "os_short_name_value", + "os_version": "os_version_value", + } + ], + }, + "rollout": { + "disruption_budget": {"fixed": 528, "percent": 753}, + "min_wait_duration": {"seconds": 751, "nanos": 543}, + }, + "revision_id": "revision_id_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + "etag": "etag_value", + "rollout_state": 1, + "baseline": True, + "deleted": True, + "reconciling": True, + "uid": "uid_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_os_policy_assignment(request) + + +def test_update_os_policy_assignment_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "os_policy_assignment": { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_os_policy_assignment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{os_policy_assignment.name=projects/*/locations/*/osPolicyAssignments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_os_policy_assignment_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_os_policy_assignment( + os_policy_assignments.UpdateOSPolicyAssignmentRequest(), + os_policy_assignment=os_policy_assignments.OSPolicyAssignment( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_os_policy_assignment_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.GetOSPolicyAssignmentRequest, + dict, + ], +) +def test_get_os_policy_assignment_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.OSPolicyAssignment( + name="name_value", + description="description_value", + revision_id="revision_id_value", + etag="etag_value", + rollout_state=os_policy_assignments.OSPolicyAssignment.RolloutState.IN_PROGRESS, + baseline=True, + deleted=True, + reconciling=True, + uid="uid_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = os_policy_assignments.OSPolicyAssignment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_os_policy_assignment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, os_policy_assignments.OSPolicyAssignment) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.revision_id == "revision_id_value" + assert response.etag == "etag_value" + assert ( + response.rollout_state + == os_policy_assignments.OSPolicyAssignment.RolloutState.IN_PROGRESS + ) + assert response.baseline is True + assert response.deleted is True + assert response.reconciling is True + assert response.uid == "uid_value" + + +def test_get_os_policy_assignment_rest_required_fields( + request_type=os_policy_assignments.GetOSPolicyAssignmentRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_os_policy_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_os_policy_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.OSPolicyAssignment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = os_policy_assignments.OSPolicyAssignment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_os_policy_assignment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_os_policy_assignment_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_os_policy_assignment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_os_policy_assignment_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "post_get_os_policy_assignment" + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "pre_get_os_policy_assignment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = os_policy_assignments.GetOSPolicyAssignmentRequest.pb( + os_policy_assignments.GetOSPolicyAssignmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = os_policy_assignments.OSPolicyAssignment.to_json( + os_policy_assignments.OSPolicyAssignment() + ) + + request = os_policy_assignments.GetOSPolicyAssignmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = os_policy_assignments.OSPolicyAssignment() + + client.get_os_policy_assignment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_os_policy_assignment_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignments.GetOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_os_policy_assignment(request) + + +def test_get_os_policy_assignment_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.OSPolicyAssignment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = os_policy_assignments.OSPolicyAssignment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_os_policy_assignment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/osPolicyAssignments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_os_policy_assignment_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_os_policy_assignment( + os_policy_assignments.GetOSPolicyAssignmentRequest(), + name="name_value", + ) + + +def test_get_os_policy_assignment_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.ListOSPolicyAssignmentsRequest, + dict, + ], +) +def test_list_os_policy_assignments_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_os_policy_assignments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_os_policy_assignments_rest_required_fields( + request_type=os_policy_assignments.ListOSPolicyAssignmentsRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_os_policy_assignments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_os_policy_assignments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_os_policy_assignments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_os_policy_assignments_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_os_policy_assignments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_os_policy_assignments_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_list_os_policy_assignments", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "pre_list_os_policy_assignments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = os_policy_assignments.ListOSPolicyAssignmentsRequest.pb( + os_policy_assignments.ListOSPolicyAssignmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + os_policy_assignments.ListOSPolicyAssignmentsResponse.to_json( + os_policy_assignments.ListOSPolicyAssignmentsResponse() + ) + ) + + request = os_policy_assignments.ListOSPolicyAssignmentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse() + + client.list_os_policy_assignments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_os_policy_assignments_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignments.ListOSPolicyAssignmentsRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_os_policy_assignments(request) + + +def test_list_os_policy_assignments_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = os_policy_assignments.ListOSPolicyAssignmentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_os_policy_assignments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/osPolicyAssignments" + % client.transport._host, + args[1], + ) + + +def test_list_os_policy_assignments_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_os_policy_assignments( + os_policy_assignments.ListOSPolicyAssignmentsRequest(), + parent="parent_value", + ) + + +def test_list_os_policy_assignments_rest_pager(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + os_policy_assignments.ListOSPolicyAssignmentsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_os_policy_assignments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, os_policy_assignments.OSPolicyAssignment) for i in results + ) + + pages = list(client.list_os_policy_assignments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, + dict, + ], +) +def test_list_os_policy_assignment_revisions_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_os_policy_assignment_revisions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentRevisionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_os_policy_assignment_revisions_rest_required_fields( + request_type=os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_os_policy_assignment_revisions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_os_policy_assignment_revisions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_os_policy_assignment_revisions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_os_policy_assignment_revisions_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_os_policy_assignment_revisions._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_os_policy_assignment_revisions_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_list_os_policy_assignment_revisions", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "pre_list_os_policy_assignment_revisions", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest.pb( + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.to_json( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + ) + ) + + request = os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + ) + + client.list_os_policy_assignment_revisions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_os_policy_assignment_revisions_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_os_policy_assignment_revisions(request) + + +def test_list_os_policy_assignment_revisions_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_os_policy_assignment_revisions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/osPolicyAssignments/*}:listRevisions" + % client.transport._host, + args[1], + ) + + +def test_list_os_policy_assignment_revisions_rest_flattened_error( + transport: str = "rest", +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_os_policy_assignment_revisions( + os_policy_assignments.ListOSPolicyAssignmentRevisionsRequest(), + name="name_value", + ) + + +def test_list_os_policy_assignment_revisions_rest_pager(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="abc", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[], + next_page_token="def", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + ], + next_page_token="ghi", + ), + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse( + os_policy_assignments=[ + os_policy_assignments.OSPolicyAssignment(), + os_policy_assignments.OSPolicyAssignment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + os_policy_assignments.ListOSPolicyAssignmentRevisionsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + + pager = client.list_os_policy_assignment_revisions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, os_policy_assignments.OSPolicyAssignment) for i in results + ) + + pages = list( + client.list_os_policy_assignment_revisions(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignments.DeleteOSPolicyAssignmentRequest, + dict, + ], +) +def test_delete_os_policy_assignment_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_os_policy_assignment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_os_policy_assignment_rest_required_fields( + request_type=os_policy_assignments.DeleteOSPolicyAssignmentRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_os_policy_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_os_policy_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_os_policy_assignment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_os_policy_assignment_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_os_policy_assignment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_os_policy_assignment_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_delete_os_policy_assignment", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "pre_delete_os_policy_assignment", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = os_policy_assignments.DeleteOSPolicyAssignmentRequest.pb( + os_policy_assignments.DeleteOSPolicyAssignmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = os_policy_assignments.DeleteOSPolicyAssignmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_os_policy_assignment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_os_policy_assignment_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignments.DeleteOSPolicyAssignmentRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_os_policy_assignment(request) + + +def test_delete_os_policy_assignment_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/osPolicyAssignments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_os_policy_assignment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/osPolicyAssignments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_os_policy_assignment_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_os_policy_assignment( + os_policy_assignments.DeleteOSPolicyAssignmentRequest(), + name="name_value", + ) + + +def test_delete_os_policy_assignment_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest, + dict, + ], +) +def test_get_instance_os_policies_compliance_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instanceOSPoliciesCompliances/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = instance_os_policies_compliance.InstanceOSPoliciesCompliance( + name="name_value", + instance="instance_value", + state=config_common.OSPolicyComplianceState.COMPLIANT, + detailed_state="detailed_state_value", + detailed_state_reason="detailed_state_reason_value", + last_compliance_run_id="last_compliance_run_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ( + instance_os_policies_compliance.InstanceOSPoliciesCompliance.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_instance_os_policies_compliance(request) + + # Establish that the response is the type that we expect. + assert isinstance( + response, instance_os_policies_compliance.InstanceOSPoliciesCompliance + ) + assert response.name == "name_value" + assert response.instance == "instance_value" + assert response.state == config_common.OSPolicyComplianceState.COMPLIANT + assert response.detailed_state == "detailed_state_value" + assert response.detailed_state_reason == "detailed_state_reason_value" + assert response.last_compliance_run_id == "last_compliance_run_id_value" + + +def test_get_instance_os_policies_compliance_rest_required_fields( + request_type=instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance_os_policies_compliance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance_os_policies_compliance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = instance_os_policies_compliance.InstanceOSPoliciesCompliance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = ( + instance_os_policies_compliance.InstanceOSPoliciesCompliance.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_instance_os_policies_compliance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_instance_os_policies_compliance_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.get_instance_os_policies_compliance._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_os_policies_compliance_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_get_instance_os_policies_compliance", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "pre_get_instance_os_policies_compliance", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest.pb( + instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + instance_os_policies_compliance.InstanceOSPoliciesCompliance.to_json( + instance_os_policies_compliance.InstanceOSPoliciesCompliance() + ) + ) + + request = ( + instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + instance_os_policies_compliance.InstanceOSPoliciesCompliance() + ) + + client.get_instance_os_policies_compliance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_os_policies_compliance_rest_bad_request( + transport: str = "rest", + request_type=instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instanceOSPoliciesCompliances/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_instance_os_policies_compliance(request) + + +def test_get_instance_os_policies_compliance_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = instance_os_policies_compliance.InstanceOSPoliciesCompliance() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instanceOSPoliciesCompliances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ( + instance_os_policies_compliance.InstanceOSPoliciesCompliance.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_instance_os_policies_compliance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/instanceOSPoliciesCompliances/*}" + % client.transport._host, + args[1], + ) + + +def test_get_instance_os_policies_compliance_rest_flattened_error( + transport: str = "rest", +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_os_policies_compliance( + instance_os_policies_compliance.GetInstanceOSPoliciesComplianceRequest(), + name="name_value", + ) + + +def test_get_instance_os_policies_compliance_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest, + dict, + ], +) +def test_list_instance_os_policies_compliances_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + next_page_token="next_page_token_value", + ) + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_instance_os_policies_compliances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceOSPoliciesCompliancesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_instance_os_policies_compliances_rest_required_fields( + request_type=instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instance_os_policies_compliances._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instance_os_policies_compliances._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_instance_os_policies_compliances(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_instance_os_policies_compliances_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_instance_os_policies_compliances._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instance_os_policies_compliances_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_list_instance_os_policies_compliances", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "pre_list_instance_os_policies_compliances", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest.pb( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse.to_json( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse() + ) + + request = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest() + ) + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse() + ) + + client.list_instance_os_policies_compliances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_instance_os_policies_compliances_rest_bad_request( + transport: str = "rest", + request_type=instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_instance_os_policies_compliances(request) + + +def test_list_instance_os_policies_compliances_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_instance_os_policies_compliances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*}/instanceOSPoliciesCompliances" + % client.transport._host, + args[1], + ) + + +def test_list_instance_os_policies_compliances_rest_flattened_error( + transport: str = "rest", +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_os_policies_compliances( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesRequest(), + parent="parent_value", + ) + + +def test_list_instance_os_policies_compliances_rest_pager(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + next_page_token="abc", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[], + next_page_token="def", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + next_page_token="ghi", + ), + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse( + instance_os_policies_compliances=[ + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + instance_os_policies_compliance.InstanceOSPoliciesCompliance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + instance_os_policies_compliance.ListInstanceOSPoliciesCompliancesResponse.to_json( + x + ) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_instance_os_policies_compliances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, instance_os_policies_compliance.InstanceOSPoliciesCompliance) + for i in results + ) + + pages = list( + client.list_instance_os_policies_compliances(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, + dict, + ], +) +def test_get_os_policy_assignment_report_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/osPolicyAssignments/sample4/report" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignment_reports.OSPolicyAssignmentReport( + name="name_value", + instance="instance_value", + os_policy_assignment="os_policy_assignment_value", + last_run_id="last_run_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = os_policy_assignment_reports.OSPolicyAssignmentReport.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_os_policy_assignment_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, os_policy_assignment_reports.OSPolicyAssignmentReport) + assert response.name == "name_value" + assert response.instance == "instance_value" + assert response.os_policy_assignment == "os_policy_assignment_value" + assert response.last_run_id == "last_run_id_value" + + +def test_get_os_policy_assignment_report_rest_required_fields( + request_type=os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_os_policy_assignment_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_os_policy_assignment_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = os_policy_assignment_reports.OSPolicyAssignmentReport() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = os_policy_assignment_reports.OSPolicyAssignmentReport.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_os_policy_assignment_report(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_os_policy_assignment_report_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_os_policy_assignment_report._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_os_policy_assignment_report_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_get_os_policy_assignment_report", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "pre_get_os_policy_assignment_report", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest.pb( + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + os_policy_assignment_reports.OSPolicyAssignmentReport.to_json( + os_policy_assignment_reports.OSPolicyAssignmentReport() + ) + ) + + request = os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = os_policy_assignment_reports.OSPolicyAssignmentReport() + + client.get_os_policy_assignment_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_os_policy_assignment_report_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/osPolicyAssignments/sample4/report" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_os_policy_assignment_report(request) + + +def test_get_os_policy_assignment_report_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = os_policy_assignment_reports.OSPolicyAssignmentReport() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3/osPolicyAssignments/sample4/report" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = os_policy_assignment_reports.OSPolicyAssignmentReport.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_os_policy_assignment_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/instances/*/osPolicyAssignments/*/report}" + % client.transport._host, + args[1], + ) + + +def test_get_os_policy_assignment_report_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_os_policy_assignment_report( + os_policy_assignment_reports.GetOSPolicyAssignmentReportRequest(), + name="name_value", + ) + + +def test_get_os_policy_assignment_report_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, + dict, + ], +) +def test_list_os_policy_assignment_reports_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/osPolicyAssignments/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + next_page_token="next_page_token_value", + ) + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_os_policy_assignment_reports(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOSPolicyAssignmentReportsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_os_policy_assignment_reports_rest_required_fields( + request_type=os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_os_policy_assignment_reports._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_os_policy_assignment_reports._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_os_policy_assignment_reports(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_os_policy_assignment_reports_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_os_policy_assignment_reports._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_os_policy_assignment_reports_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_list_os_policy_assignment_reports", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "pre_list_os_policy_assignment_reports", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest.pb( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest() + ) + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.to_json( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + ) + + request = os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + + client.list_os_policy_assignment_reports( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_os_policy_assignment_reports_rest_bad_request( + transport: str = "rest", + request_type=os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest, +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/osPolicyAssignments/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_os_policy_assignment_reports(request) + + +def test_list_os_policy_assignment_reports_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3/osPolicyAssignments/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_os_policy_assignment_reports(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/instances/*/osPolicyAssignments/*}/reports" + % client.transport._host, + args[1], + ) + + +def test_list_os_policy_assignment_reports_rest_flattened_error( + transport: str = "rest", +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_os_policy_assignment_reports( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsRequest(), + parent="parent_value", + ) + + +def test_list_os_policy_assignment_reports_rest_pager(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="abc", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[], + next_page_token="def", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + next_page_token="ghi", + ), + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse( + os_policy_assignment_reports=[ + os_policy_assignment_reports.OSPolicyAssignmentReport(), + os_policy_assignment_reports.OSPolicyAssignmentReport(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + os_policy_assignment_reports.ListOSPolicyAssignmentReportsResponse.to_json( + x + ) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3/osPolicyAssignments/sample4" + } + + pager = client.list_os_policy_assignment_reports(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, os_policy_assignment_reports.OSPolicyAssignmentReport) + for i in results + ) + + pages = list( + client.list_os_policy_assignment_reports(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + inventory.GetInventoryRequest, + dict, + ], +) +def test_get_inventory_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/inventory" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = inventory.Inventory( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = inventory.Inventory.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_inventory(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, inventory.Inventory) + assert response.name == "name_value" + + +def test_get_inventory_rest_required_fields(request_type=inventory.GetInventoryRequest): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_inventory._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_inventory._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = inventory.Inventory() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = inventory.Inventory.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_inventory(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_inventory_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_inventory._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_inventory_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "post_get_inventory" + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "pre_get_inventory" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = inventory.GetInventoryRequest.pb(inventory.GetInventoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = inventory.Inventory.to_json(inventory.Inventory()) + + request = inventory.GetInventoryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = inventory.Inventory() + + client.get_inventory( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_inventory_rest_bad_request( + transport: str = "rest", request_type=inventory.GetInventoryRequest +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/inventory" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_inventory(request) + + +def test_get_inventory_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = inventory.Inventory() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3/inventory" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = inventory.Inventory.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_inventory(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/instances/*/inventory}" + % client.transport._host, + args[1], + ) + + +def test_get_inventory_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inventory( + inventory.GetInventoryRequest(), + name="name_value", + ) + + +def test_get_inventory_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + inventory.ListInventoriesRequest, + dict, + ], +) +def test_list_inventories_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = inventory.ListInventoriesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = inventory.ListInventoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_inventories(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInventoriesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_inventories_rest_required_fields( + request_type=inventory.ListInventoriesRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_inventories._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_inventories._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = inventory.ListInventoriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = inventory.ListInventoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_inventories(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_inventories_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_inventories._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_inventories_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "post_list_inventories" + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "pre_list_inventories" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = inventory.ListInventoriesRequest.pb( + inventory.ListInventoriesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = inventory.ListInventoriesResponse.to_json( + inventory.ListInventoriesResponse() + ) + + request = inventory.ListInventoriesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = inventory.ListInventoriesResponse() + + client.list_inventories( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_inventories_rest_bad_request( + transport: str = "rest", request_type=inventory.ListInventoriesRequest +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_inventories(request) + + +def test_list_inventories_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = inventory.ListInventoriesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = inventory.ListInventoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_inventories(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/instances/*}/inventories" + % client.transport._host, + args[1], + ) + + +def test_list_inventories_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inventories( + inventory.ListInventoriesRequest(), + parent="parent_value", + ) + + +def test_list_inventories_rest_pager(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + inventory.Inventory(), + ], + next_page_token="abc", + ), + inventory.ListInventoriesResponse( + inventories=[], + next_page_token="def", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + ], + next_page_token="ghi", + ), + inventory.ListInventoriesResponse( + inventories=[ + inventory.Inventory(), + inventory.Inventory(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(inventory.ListInventoriesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3" + } + + pager = client.list_inventories(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, inventory.Inventory) for i in results) + + pages = list(client.list_inventories(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vulnerability.GetVulnerabilityReportRequest, + dict, + ], +) +def test_get_vulnerability_report_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/vulnerabilityReport" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vulnerability.VulnerabilityReport( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = vulnerability.VulnerabilityReport.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_vulnerability_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vulnerability.VulnerabilityReport) + assert response.name == "name_value" + + +def test_get_vulnerability_report_rest_required_fields( + request_type=vulnerability.GetVulnerabilityReportRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_vulnerability_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_vulnerability_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vulnerability.VulnerabilityReport() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = vulnerability.VulnerabilityReport.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_vulnerability_report(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_vulnerability_report_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_vulnerability_report._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_vulnerability_report_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "post_get_vulnerability_report" + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "pre_get_vulnerability_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vulnerability.GetVulnerabilityReportRequest.pb( + vulnerability.GetVulnerabilityReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = vulnerability.VulnerabilityReport.to_json( + vulnerability.VulnerabilityReport() + ) + + request = vulnerability.GetVulnerabilityReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vulnerability.VulnerabilityReport() + + client.get_vulnerability_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_vulnerability_report_rest_bad_request( + transport: str = "rest", request_type=vulnerability.GetVulnerabilityReportRequest +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/vulnerabilityReport" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_vulnerability_report(request) + + +def test_get_vulnerability_report_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vulnerability.VulnerabilityReport() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3/vulnerabilityReport" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = vulnerability.VulnerabilityReport.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_vulnerability_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/instances/*/vulnerabilityReport}" + % client.transport._host, + args[1], + ) + + +def test_get_vulnerability_report_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_vulnerability_report( + vulnerability.GetVulnerabilityReportRequest(), + name="name_value", + ) + + +def test_get_vulnerability_report_rest_error(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vulnerability.ListVulnerabilityReportsRequest, + dict, + ], +) +def test_list_vulnerability_reports_rest(request_type): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vulnerability.ListVulnerabilityReportsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = vulnerability.ListVulnerabilityReportsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_vulnerability_reports(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVulnerabilityReportsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_vulnerability_reports_rest_required_fields( + request_type=vulnerability.ListVulnerabilityReportsRequest, +): + transport_class = transports.OsConfigZonalServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_vulnerability_reports._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_vulnerability_reports._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vulnerability.ListVulnerabilityReportsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = vulnerability.ListVulnerabilityReportsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_vulnerability_reports(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_vulnerability_reports_rest_unset_required_fields(): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_vulnerability_reports._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_vulnerability_reports_rest_interceptors(null_interceptor): + transport = transports.OsConfigZonalServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OsConfigZonalServiceRestInterceptor(), + ) + client = OsConfigZonalServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, + "post_list_vulnerability_reports", + ) as post, mock.patch.object( + transports.OsConfigZonalServiceRestInterceptor, "pre_list_vulnerability_reports" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = vulnerability.ListVulnerabilityReportsRequest.pb( + vulnerability.ListVulnerabilityReportsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + vulnerability.ListVulnerabilityReportsResponse.to_json( + vulnerability.ListVulnerabilityReportsResponse() + ) + ) + + request = vulnerability.ListVulnerabilityReportsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vulnerability.ListVulnerabilityReportsResponse() + + client.list_vulnerability_reports( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_vulnerability_reports_rest_bad_request( + transport: str = "rest", request_type=vulnerability.ListVulnerabilityReportsRequest +): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_vulnerability_reports(request) + + +def test_list_vulnerability_reports_rest_flattened(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vulnerability.ListVulnerabilityReportsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = vulnerability.ListVulnerabilityReportsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_vulnerability_reports(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/instances/*}/vulnerabilityReports" + % client.transport._host, + args[1], + ) + + +def test_list_vulnerability_reports_rest_flattened_error(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_vulnerability_reports( + vulnerability.ListVulnerabilityReportsRequest(), + parent="parent_value", + ) + + +def test_list_vulnerability_reports_rest_pager(transport: str = "rest"): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + next_page_token="abc", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[], + next_page_token="def", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + ], + next_page_token="ghi", + ), + vulnerability.ListVulnerabilityReportsResponse( + vulnerability_reports=[ + vulnerability.VulnerabilityReport(), + vulnerability.VulnerabilityReport(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vulnerability.ListVulnerabilityReportsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3" + } + + pager = client.list_vulnerability_reports(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, vulnerability.VulnerabilityReport) for i in results) + + pages = list(client.list_vulnerability_reports(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.OsConfigZonalServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.OsConfigZonalServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OsConfigZonalServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.OsConfigZonalServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OsConfigZonalServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OsConfigZonalServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.OsConfigZonalServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OsConfigZonalServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.OsConfigZonalServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = OsConfigZonalServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.OsConfigZonalServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.OsConfigZonalServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigZonalServiceGrpcTransport, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + transports.OsConfigZonalServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = OsConfigZonalServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.OsConfigZonalServiceGrpcTransport, + ) + + +def test_os_config_zonal_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.OsConfigZonalServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_os_config_zonal_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.osconfig_v1alpha.services.os_config_zonal_service.transports.OsConfigZonalServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.OsConfigZonalServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_os_policy_assignment", + "update_os_policy_assignment", + "get_os_policy_assignment", + "list_os_policy_assignments", + "list_os_policy_assignment_revisions", + "delete_os_policy_assignment", + "get_instance_os_policies_compliance", + "list_instance_os_policies_compliances", + "get_os_policy_assignment_report", + "list_os_policy_assignment_reports", + "get_inventory", + "list_inventories", + "get_vulnerability_report", + "list_vulnerability_reports", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_os_config_zonal_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.osconfig_v1alpha.services.os_config_zonal_service.transports.OsConfigZonalServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OsConfigZonalServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_os_config_zonal_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.osconfig_v1alpha.services.os_config_zonal_service.transports.OsConfigZonalServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OsConfigZonalServiceTransport() + adc.assert_called_once() + + +def test_os_config_zonal_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + OsConfigZonalServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigZonalServiceGrpcTransport, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + ], +) +def test_os_config_zonal_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigZonalServiceGrpcTransport, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + transports.OsConfigZonalServiceRestTransport, + ], +) +def test_os_config_zonal_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.OsConfigZonalServiceGrpcTransport, grpc_helpers), + (transports.OsConfigZonalServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_os_config_zonal_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "osconfig.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="osconfig.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigZonalServiceGrpcTransport, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + ], +) +def test_os_config_zonal_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_os_config_zonal_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.OsConfigZonalServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_os_config_zonal_service_rest_lro_client(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_os_config_zonal_service_host_no_port(transport_name): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="osconfig.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "osconfig.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://osconfig.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_os_config_zonal_service_host_with_port(transport_name): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="osconfig.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "osconfig.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://osconfig.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_os_config_zonal_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = OsConfigZonalServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = OsConfigZonalServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_os_policy_assignment._session + session2 = client2.transport.create_os_policy_assignment._session + assert session1 != session2 + session1 = client1.transport.update_os_policy_assignment._session + session2 = client2.transport.update_os_policy_assignment._session + assert session1 != session2 + session1 = client1.transport.get_os_policy_assignment._session + session2 = client2.transport.get_os_policy_assignment._session + assert session1 != session2 + session1 = client1.transport.list_os_policy_assignments._session + session2 = client2.transport.list_os_policy_assignments._session + assert session1 != session2 + session1 = client1.transport.list_os_policy_assignment_revisions._session + session2 = client2.transport.list_os_policy_assignment_revisions._session + assert session1 != session2 + session1 = client1.transport.delete_os_policy_assignment._session + session2 = client2.transport.delete_os_policy_assignment._session + assert session1 != session2 + session1 = client1.transport.get_instance_os_policies_compliance._session + session2 = client2.transport.get_instance_os_policies_compliance._session + assert session1 != session2 + session1 = client1.transport.list_instance_os_policies_compliances._session + session2 = client2.transport.list_instance_os_policies_compliances._session + assert session1 != session2 + session1 = client1.transport.get_os_policy_assignment_report._session + session2 = client2.transport.get_os_policy_assignment_report._session + assert session1 != session2 + session1 = client1.transport.list_os_policy_assignment_reports._session + session2 = client2.transport.list_os_policy_assignment_reports._session + assert session1 != session2 + session1 = client1.transport.get_inventory._session + session2 = client2.transport.get_inventory._session + assert session1 != session2 + session1 = client1.transport.list_inventories._session + session2 = client2.transport.list_inventories._session + assert session1 != session2 + session1 = client1.transport.get_vulnerability_report._session + session2 = client2.transport.get_vulnerability_report._session + assert session1 != session2 + session1 = client1.transport.list_vulnerability_reports._session + session2 = client2.transport.list_vulnerability_reports._session + assert session1 != session2 + + +def test_os_config_zonal_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.OsConfigZonalServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_os_config_zonal_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.OsConfigZonalServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigZonalServiceGrpcTransport, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + ], +) +def test_os_config_zonal_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigZonalServiceGrpcTransport, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + ], +) +def test_os_config_zonal_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_os_config_zonal_service_grpc_lro_client(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_os_config_zonal_service_grpc_lro_async_client(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_instance_path(): + project = "squid" + location = "clam" + instance = "whelk" + expected = "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) + actual = OsConfigZonalServiceClient.instance_path(project, location, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "octopus", + "location": "oyster", + "instance": "nudibranch", + } + path = OsConfigZonalServiceClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_instance_path(path) + assert expected == actual + + +def test_instance_os_policies_compliance_path(): + project = "cuttlefish" + location = "mussel" + instance = "winkle" + expected = "projects/{project}/locations/{location}/instanceOSPoliciesCompliances/{instance}".format( + project=project, + location=location, + instance=instance, + ) + actual = OsConfigZonalServiceClient.instance_os_policies_compliance_path( + project, location, instance + ) + assert expected == actual + + +def test_parse_instance_os_policies_compliance_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "instance": "abalone", + } + path = OsConfigZonalServiceClient.instance_os_policies_compliance_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_instance_os_policies_compliance_path(path) + assert expected == actual + + +def test_instance_os_policy_assignment_path(): + project = "squid" + location = "clam" + instance = "whelk" + assignment = "octopus" + expected = "projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}".format( + project=project, + location=location, + instance=instance, + assignment=assignment, + ) + actual = OsConfigZonalServiceClient.instance_os_policy_assignment_path( + project, location, instance, assignment + ) + assert expected == actual + + +def test_parse_instance_os_policy_assignment_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "instance": "cuttlefish", + "assignment": "mussel", + } + path = OsConfigZonalServiceClient.instance_os_policy_assignment_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_instance_os_policy_assignment_path(path) + assert expected == actual + + +def test_inventory_path(): + project = "winkle" + location = "nautilus" + instance = "scallop" + expected = ( + "projects/{project}/locations/{location}/instances/{instance}/inventory".format( + project=project, + location=location, + instance=instance, + ) + ) + actual = OsConfigZonalServiceClient.inventory_path(project, location, instance) + assert expected == actual + + +def test_parse_inventory_path(): + expected = { + "project": "abalone", + "location": "squid", + "instance": "clam", + } + path = OsConfigZonalServiceClient.inventory_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_inventory_path(path) + assert expected == actual + + +def test_os_policy_assignment_path(): + project = "whelk" + location = "octopus" + os_policy_assignment = "oyster" + expected = "projects/{project}/locations/{location}/osPolicyAssignments/{os_policy_assignment}".format( + project=project, + location=location, + os_policy_assignment=os_policy_assignment, + ) + actual = OsConfigZonalServiceClient.os_policy_assignment_path( + project, location, os_policy_assignment + ) + assert expected == actual + + +def test_parse_os_policy_assignment_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "os_policy_assignment": "mussel", + } + path = OsConfigZonalServiceClient.os_policy_assignment_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_os_policy_assignment_path(path) + assert expected == actual + + +def test_os_policy_assignment_report_path(): + project = "winkle" + location = "nautilus" + instance = "scallop" + assignment = "abalone" + expected = "projects/{project}/locations/{location}/instances/{instance}/osPolicyAssignments/{assignment}/report".format( + project=project, + location=location, + instance=instance, + assignment=assignment, + ) + actual = OsConfigZonalServiceClient.os_policy_assignment_report_path( + project, location, instance, assignment + ) + assert expected == actual + + +def test_parse_os_policy_assignment_report_path(): + expected = { + "project": "squid", + "location": "clam", + "instance": "whelk", + "assignment": "octopus", + } + path = OsConfigZonalServiceClient.os_policy_assignment_report_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_os_policy_assignment_report_path(path) + assert expected == actual + + +def test_vulnerability_report_path(): + project = "oyster" + location = "nudibranch" + instance = "cuttlefish" + expected = "projects/{project}/locations/{location}/instances/{instance}/vulnerabilityReport".format( + project=project, + location=location, + instance=instance, + ) + actual = OsConfigZonalServiceClient.vulnerability_report_path( + project, location, instance + ) + assert expected == actual + + +def test_parse_vulnerability_report_path(): + expected = { + "project": "mussel", + "location": "winkle", + "instance": "nautilus", + } + path = OsConfigZonalServiceClient.vulnerability_report_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_vulnerability_report_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = OsConfigZonalServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = OsConfigZonalServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = OsConfigZonalServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = OsConfigZonalServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = OsConfigZonalServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = OsConfigZonalServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = OsConfigZonalServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = OsConfigZonalServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = OsConfigZonalServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = OsConfigZonalServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = OsConfigZonalServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.OsConfigZonalServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.OsConfigZonalServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = OsConfigZonalServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = OsConfigZonalServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = OsConfigZonalServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (OsConfigZonalServiceClient, transports.OsConfigZonalServiceGrpcTransport), + ( + OsConfigZonalServiceAsyncClient, + transports.OsConfigZonalServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-scheduler/.OwlBot.yaml b/packages/google-cloud-scheduler/.OwlBot.yaml new file mode 100644 index 000000000000..7862831301ca --- /dev/null +++ b/packages/google-cloud-scheduler/.OwlBot.yaml @@ -0,0 +1,24 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/scheduler/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-scheduler/$1 + +begin-after-commit-hash: dfccedc726d558444c665121fb5c1d08a5978e94 + diff --git a/packages/google-cloud-scheduler/.coveragerc b/packages/google-cloud-scheduler/.coveragerc new file mode 100644 index 000000000000..cb4a55fb56dc --- /dev/null +++ b/packages/google-cloud-scheduler/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/scheduler/__init__.py + google/cloud/scheduler/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-scheduler/.flake8 b/packages/google-cloud-scheduler/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-scheduler/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-scheduler/.gitignore b/packages/google-cloud-scheduler/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-scheduler/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-scheduler/.repo-metadata.json b/packages/google-cloud-scheduler/.repo-metadata.json new file mode 100644 index 000000000000..48ee081f2d6f --- /dev/null +++ b/packages/google-cloud-scheduler/.repo-metadata.json @@ -0,0 +1,18 @@ +{ + "name": "cloudscheduler", + "name_pretty": "Cloud Scheduler", + "product_documentation": "https://cloud.google.com/scheduler/docs", + "client_documentation": "https://cloud.google.com/python/docs/reference/cloudscheduler/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5411429", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-scheduler", + "api_id": "cloudscheduler.googleapis.com", + "requires_billing": true, + "default_version": "v1", + "codeowner_team": "@googleapis/aap-dpes", + "api_shortname": "cloudscheduler", + "api_description": "lets you set up scheduled units of work to be executed at defined times or regular intervals. These work units are commonly known as cron jobs. Typical use cases might include sending out a report email on a daily basis, updating some cached data every 10 minutes, or updating some summary information once an hour." +} diff --git a/packages/google-cloud-scheduler/CHANGELOG.md b/packages/google-cloud-scheduler/CHANGELOG.md new file mode 100644 index 000000000000..871b73f2c207 --- /dev/null +++ b/packages/google-cloud-scheduler/CHANGELOG.md @@ -0,0 +1,396 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-scheduler/#history + +## [2.11.1](https://github.com/googleapis/python-scheduler/compare/v2.11.0...v2.11.1) (2023-07-04) + + +### Bug Fixes + +* Add async context manager return types ([#340](https://github.com/googleapis/python-scheduler/issues/340)) ([7e65978](https://github.com/googleapis/python-scheduler/commit/7e65978db72b38d1fed273562df86dd058cfd271)) + +## [2.11.0](https://github.com/googleapis/python-scheduler/compare/v2.10.0...v2.11.0) (2023-03-23) + + +### Features + +* Location API methods ([#324](https://github.com/googleapis/python-scheduler/issues/324)) ([662e648](https://github.com/googleapis/python-scheduler/commit/662e6489710ea62b86b43d04f9bc69f9bc96e8b8)) + + +### Documentation + +* Fix formatting of request arg in docstring ([#328](https://github.com/googleapis/python-scheduler/issues/328)) ([94e406e](https://github.com/googleapis/python-scheduler/commit/94e406eb242b027824536e597ced29b6f82cba97)) + +## [2.10.0](https://github.com/googleapis/python-scheduler/compare/v2.9.1...v2.10.0) (2023-02-28) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([3de2adb](https://github.com/googleapis/python-scheduler/commit/3de2adbc753902bdfa72085567da4e45a520416e)) + +## [2.9.1](https://github.com/googleapis/python-scheduler/compare/v2.9.0...v2.9.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([1ebe016](https://github.com/googleapis/python-scheduler/commit/1ebe016e8c755cd94465090079a98d304ca0e730)) + + +### Documentation + +* Add documentation for enums ([1ebe016](https://github.com/googleapis/python-scheduler/commit/1ebe016e8c755cd94465090079a98d304ca0e730)) + +## [2.9.0](https://github.com/googleapis/python-scheduler/compare/v2.8.0...v2.9.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#307](https://github.com/googleapis/python-scheduler/issues/307)) ([455fa74](https://github.com/googleapis/python-scheduler/commit/455fa74db83f7ca93f822acec4002358cfd27f3e)) + +## [2.8.0](https://github.com/googleapis/python-scheduler/compare/v2.7.3...v2.8.0) (2022-12-14) + + +### Features + +* Add support for `google.cloud.scheduler.__version__` ([ab1a908](https://github.com/googleapis/python-scheduler/commit/ab1a9089bbb8f9dd0b4ea26afffa8ae7e7ad069a)) +* Add typing to proto.Message based class attributes ([ab1a908](https://github.com/googleapis/python-scheduler/commit/ab1a9089bbb8f9dd0b4ea26afffa8ae7e7ad069a)) +* Updated Client Libraries for Cloud Scheduler ([#304](https://github.com/googleapis/python-scheduler/issues/304)) ([a6cad2f](https://github.com/googleapis/python-scheduler/commit/a6cad2f03d2846f672f8403d38d2fcb9da69912b)) + + +### Bug Fixes + +* Add dict typing for client_options ([ab1a908](https://github.com/googleapis/python-scheduler/commit/ab1a9089bbb8f9dd0b4ea26afffa8ae7e7ad069a)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([75a677a](https://github.com/googleapis/python-scheduler/commit/75a677a30af56568ae365715056360d23f4b7f7c)) +* Drop usage of pkg_resources ([75a677a](https://github.com/googleapis/python-scheduler/commit/75a677a30af56568ae365715056360d23f4b7f7c)) +* Fix timeout default values ([75a677a](https://github.com/googleapis/python-scheduler/commit/75a677a30af56568ae365715056360d23f4b7f7c)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([ab1a908](https://github.com/googleapis/python-scheduler/commit/ab1a9089bbb8f9dd0b4ea26afffa8ae7e7ad069a)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([75a677a](https://github.com/googleapis/python-scheduler/commit/75a677a30af56568ae365715056360d23f4b7f7c)) + +## [2.7.3](https://github.com/googleapis/python-scheduler/compare/v2.7.2...v2.7.3) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#291](https://github.com/googleapis/python-scheduler/issues/291)) ([f0195d0](https://github.com/googleapis/python-scheduler/commit/f0195d0f5ef9e8b6342da965832a2a93fe795df2)) + +## [2.7.2](https://github.com/googleapis/python-scheduler/compare/v2.7.1...v2.7.2) (2022-10-03) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#288](https://github.com/googleapis/python-scheduler/issues/288)) ([c8357fe](https://github.com/googleapis/python-scheduler/commit/c8357fe07bd79d52b72f2733b7a7cf9557386b57)) + +## [2.7.1](https://github.com/googleapis/python-scheduler/compare/v2.7.0...v2.7.1) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#268](https://github.com/googleapis/python-scheduler/issues/268)) ([7081d77](https://github.com/googleapis/python-scheduler/commit/7081d777761e201a29b3d6d0542e30a1761350e0)) +* **deps:** require proto-plus >= 1.22.0 ([7081d77](https://github.com/googleapis/python-scheduler/commit/7081d777761e201a29b3d6d0542e30a1761350e0)) + +## [2.7.0](https://github.com/googleapis/python-scheduler/compare/v2.6.4...v2.7.0) (2022-07-16) + + +### Features + +* add audience parameter ([c8adf9c](https://github.com/googleapis/python-scheduler/commit/c8adf9c9877d4bbea2f5b282f95cb9f56011a94f)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#258](https://github.com/googleapis/python-scheduler/issues/258)) ([a57f965](https://github.com/googleapis/python-scheduler/commit/a57f96505640623170ae6b86c604127d14481561)) +* require python 3.7+ ([#256](https://github.com/googleapis/python-scheduler/issues/256)) ([6b0faa0](https://github.com/googleapis/python-scheduler/commit/6b0faa00c155798fac1218a1f05cda54b3651f65)) + +## [2.6.4](https://github.com/googleapis/python-scheduler/compare/v2.6.3...v2.6.4) (2022-06-06) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#243](https://github.com/googleapis/python-scheduler/issues/243)) ([72b64ad](https://github.com/googleapis/python-scheduler/commit/72b64ad3ff1dbf92273c73e07c798f974f6afd5e)) + + +### Documentation + +* fix changelog header to consistent size ([#244](https://github.com/googleapis/python-scheduler/issues/244)) ([b6b6fd1](https://github.com/googleapis/python-scheduler/commit/b6b6fd1f607f7b67867e8dc31472c69c9ef20958)) + +## [2.6.3](https://github.com/googleapis/python-scheduler/compare/v2.6.2...v2.6.3) (2022-04-14) + + +### Bug Fixes + +* fix type in docstring for map fields ([#223](https://github.com/googleapis/python-scheduler/issues/223)) ([34d7478](https://github.com/googleapis/python-scheduler/commit/34d7478c4ac14489b36980099446b9520ff3eb4a)) + +## [2.6.2](https://github.com/googleapis/python-scheduler/compare/v2.6.1...v2.6.2) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#203](https://github.com/googleapis/python-scheduler/issues/203)) ([3e68808](https://github.com/googleapis/python-scheduler/commit/3e688088d09b0ff5af55571b4e47109638a47825)) +* **deps:** require proto-plus>=1.15.0 ([3e68808](https://github.com/googleapis/python-scheduler/commit/3e688088d09b0ff5af55571b4e47109638a47825)) + +## [2.6.1](https://github.com/googleapis/python-scheduler/compare/v2.6.0...v2.6.1) (2022-02-26) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([993ac1a](https://github.com/googleapis/python-scheduler/commit/993ac1a793fef60730546f1faae269624efb98f7)) + + +### Documentation + +* add generated snippets ([#189](https://github.com/googleapis/python-scheduler/issues/189)) ([eccf8c6](https://github.com/googleapis/python-scheduler/commit/eccf8c635d1c2a93d933ed1381da42a18d128fec)) + +## [2.6.0](https://github.com/googleapis/python-scheduler/compare/v2.5.1...v2.6.0) (2022-01-25) + + +### Features + +* add api key support ([#180](https://github.com/googleapis/python-scheduler/issues/180)) ([74eaf8b](https://github.com/googleapis/python-scheduler/commit/74eaf8b00c684c476d7a9f271880e83fc67dedac)) + +## [2.5.1](https://www.github.com/googleapis/python-scheduler/compare/v2.5.0...v2.5.1) (2021-11-01) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([240c125](https://www.github.com/googleapis/python-scheduler/commit/240c12575f8ac31c5262ad111a99a01b4dd4711a)) +* **deps:** require google-api-core >= 1.28.0 ([240c125](https://www.github.com/googleapis/python-scheduler/commit/240c12575f8ac31c5262ad111a99a01b4dd4711a)) + + +### Documentation + +* list oneofs in docstring ([240c125](https://www.github.com/googleapis/python-scheduler/commit/240c12575f8ac31c5262ad111a99a01b4dd4711a)) + +## [2.5.0](https://www.github.com/googleapis/python-scheduler/compare/v2.4.0...v2.5.0) (2021-10-25) + + +### Features + +* add support for python 3.10 ([#149](https://www.github.com/googleapis/python-scheduler/issues/149)) ([8c671d9](https://www.github.com/googleapis/python-scheduler/commit/8c671d928f9a39dc7c15cd1e700363e028eb61e7)) + +## [2.4.0](https://www.github.com/googleapis/python-scheduler/compare/v2.3.4...v2.4.0) (2021-10-08) + + +### Features + +* add context manager support in client ([#144](https://www.github.com/googleapis/python-scheduler/issues/144)) ([4bb0fb6](https://www.github.com/googleapis/python-scheduler/commit/4bb0fb62f173edc09b640b0024d2be3bbd97b3b9)) + +## [2.3.4](https://www.github.com/googleapis/python-scheduler/compare/v2.3.3...v2.3.4) (2021-09-30) + + +### Bug Fixes + +* improper types in pagers generation ([a24ad41](https://www.github.com/googleapis/python-scheduler/commit/a24ad41ae62407bc542eb5362f2fd84a1370d3c2)) + +## [2.3.3](https://www.github.com/googleapis/python-scheduler/compare/v2.3.2...v2.3.3) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([b8b77d1](https://www.github.com/googleapis/python-scheduler/commit/b8b77d11a46136baf2739cfdf48060bd4bfc10fa)) + +## [2.3.2](https://www.github.com/googleapis/python-scheduler/compare/v2.3.1...v2.3.2) (2021-07-28) + + +### Bug Fixes + +* enable self signed jwt for grpc ([#116](https://www.github.com/googleapis/python-scheduler/issues/116)) ([a18fe2a](https://www.github.com/googleapis/python-scheduler/commit/a18fe2a4fe3dff4550687c0853aee351b54596de)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#111](https://www.github.com/googleapis/python-scheduler/issues/111)) ([ed91668](https://www.github.com/googleapis/python-scheduler/commit/ed9166882974b4eadb63df4e5278e88aeb0f8d89)) + + +### Miscellaneous Chores + +* release as 2.3.2 ([#117](https://www.github.com/googleapis/python-scheduler/issues/117)) ([f06e90b](https://www.github.com/googleapis/python-scheduler/commit/f06e90b68a9021108a732ca9733a80f447489be8)) + +## [2.3.1](https://www.github.com/googleapis/python-scheduler/compare/v2.3.0...v2.3.1) (2021-07-20) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#110](https://www.github.com/googleapis/python-scheduler/issues/110)) ([2b68578](https://www.github.com/googleapis/python-scheduler/commit/2b6857876f22441960badebbcdfac19130b1af9a)) + +## [2.3.0](https://www.github.com/googleapis/python-scheduler/compare/v2.2.0...v2.3.0) (2021-07-01) + + +### Features + +* add always_use_jwt_access ([#102](https://www.github.com/googleapis/python-scheduler/issues/102)) ([bd5550b](https://www.github.com/googleapis/python-scheduler/commit/bd5550b4c7732ad20c5a16fa0ac2c9f86704b8fc)) + + +### Bug Fixes + +* **deps:** add packaging requirement ([#89](https://www.github.com/googleapis/python-scheduler/issues/89)) ([8966559](https://www.github.com/googleapis/python-scheduler/commit/8966559b7bf2e4409906ca4a5eb831a011ba3484)) +* disable always_use_jwt_access ([#106](https://www.github.com/googleapis/python-scheduler/issues/106)) ([c8dd497](https://www.github.com/googleapis/python-scheduler/commit/c8dd497c56f475c63c05c2ba5708067cc03c4173)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-scheduler/issues/1127)) ([#99](https://www.github.com/googleapis/python-scheduler/issues/99)) ([2dcbcdf](https://www.github.com/googleapis/python-scheduler/commit/2dcbcdf36c7678ee62d2b76ea31bee69f597d3b2)), closes [#1126](https://www.github.com/googleapis/python-scheduler/issues/1126) + +## [2.2.0](https://www.github.com/googleapis/python-scheduler/compare/v2.1.1...v2.2.0) (2021-03-31) + + +### Features + +* add `from_service_account_info` ([#67](https://www.github.com/googleapis/python-scheduler/issues/67)) ([bd21900](https://www.github.com/googleapis/python-scheduler/commit/bd2190046269eea1e08111b97f01e845f748b8e5)) + +## [2.1.1](https://www.github.com/googleapis/python-scheduler/compare/v2.1.0...v2.1.1) (2021-02-08) + + +### Bug Fixes + +* remove client recv msg limit and add enums to `types/__init__.py` ([#46](https://www.github.com/googleapis/python-scheduler/issues/46)) ([b6a9feb](https://www.github.com/googleapis/python-scheduler/commit/b6a9feb31aec9ee1aa4eb46ccd44dcc8e6cc27a7)) + +## [2.1.0](https://www.github.com/googleapis/python-scheduler/compare/v2.0.0...v2.1.0) (2020-12-08) + + +### Features + +* add common resource helpers; expose client transport ([#41](https://www.github.com/googleapis/python-scheduler/issues/41)) ([f9fc0f9](https://www.github.com/googleapis/python-scheduler/commit/f9fc0f9613302de642680c87286de0a02f09d086)) + +## [2.0.0](https://www.github.com/googleapis/python-scheduler/compare/v1.3.0...v2.0.0) (2020-08-27) + + +### ⚠ BREAKING CHANGES + +* migrate to microgenerator (#29) + +### Features + +* migrate to microgenerator ([#29](https://www.github.com/googleapis/python-scheduler/issues/29)) ([82f66ed](https://www.github.com/googleapis/python-scheduler/commit/82f66ed9c163b2f6597bf5661469ca9ca1bef741)) + + +### Bug Fixes + +* update retry configs ([#20](https://www.github.com/googleapis/python-scheduler/issues/20)) ([7f82c9f](https://www.github.com/googleapis/python-scheduler/commit/7f82c9ffc292d72907de66bf6d5fa39e38d26085)) + +## [1.3.0](https://www.github.com/googleapis/python-scheduler/compare/v1.2.1...v1.3.0) (2020-04-21) + + +### ⚠ BREAKING CHANGES + +* **scheduler:** remove `project_path` method, update docstrings (via synth) (#9522) + +### Bug Fixes + +* **scheduler:** remove `project_path` method, update docstrings (via synth) ([#9522](https://www.github.com/googleapis/python-scheduler/issues/9522)) ([36c611b](https://www.github.com/googleapis/python-scheduler/commit/36c611bdd1504918ecec39f7846c533b1e7b181c)) +* add python 2.7 deprecation warning (via synth) ([#9](https://www.github.com/googleapis/python-scheduler/issues/9)) ([d17f5ff](https://www.github.com/googleapis/python-scheduler/commit/d17f5ffd8d6030190e3529d6eed5c9899145dd96)) + +## 1.2.1 + +08-12-2019 13:53 PDT + +### Implementation Changes +- Remove send/recv msg size limit (via synth). ([#8966](https://github.com/googleapis/google-cloud-python/pull/8966)) + +### Documentation +- Fix links to googleapis.dev ([#8998](https://github.com/googleapis/google-cloud-python/pull/8998)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + +## 1.2.0 + +07-24-2019 17:27 PDT + + +### Implementation Changes +- Allow kwargs to be passed to create_channel (via synth). ([#8401](https://github.com/googleapis/google-cloud-python/pull/8401)) + +### New Features +- Add 'client_options' support, update list method docstrings (via synth). ([#8520](https://github.com/googleapis/google-cloud-python/pull/8520)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) +- Pin black version (via synth). ([#8593](https://github.com/googleapis/google-cloud-python/pull/8593)) + +### Documentation +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) +- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) + +### Internal / Testing Changes +- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) +- Declare encoding as utf-8 in pb2 files (via synth). ([#8361](https://github.com/googleapis/google-cloud-python/pull/8361)) +- Add disclaimer to auto-generated template (via synth). ([#8325](https://github.com/googleapis/google-cloud-python/pull/8325)) +- Suppress checking 'cov-fail-under' in nox default session (via synth). ([#8250](https://github.com/googleapis/google-cloud-python/pull/8250)) +- Fix coverage in 'types.py' (via synth). ([#8162](https://github.com/googleapis/google-cloud-python/pull/8162)) +- Blacken noxfile.py, setup.py (via synth). ([#8130](https://github.com/googleapis/google-cloud-python/pull/8130)) +- Add empty lines (via synth). ([#8069](https://github.com/googleapis/google-cloud-python/pull/8069)) + +## 1.1.0 + +05-13-2019 13:15 PDT + +### New Features +- Add authorization headers and deadline for job attempts (via synth). ([#7938](https://github.com/googleapis/google-cloud-python/pull/7938)) + +### Internal / Testing Changes +- Add nox session `docs`, reorder methods (via synth). ([#7779](https://github.com/googleapis/google-cloud-python/pull/7779)) + +## 1.0.0 + +05-03-2019 10:04 PDT + +### Internal / Testing Changes +- Add smoke test for scheduler. ([#7854](https://github.com/googleapis/google-cloud-python/pull/7854)) + +## 0.3.0 + +04-15-2019 10:32 PDT + + +### New Features +- add auth and configurable timeouts to v1beta1 (via synth). ([#7665](https://github.com/googleapis/google-cloud-python/pull/7665)) + +## 0.2.0 + +04-01-2019 15:39 PDT + + +### Implementation Changes +- Add routing header to method metadata (via synth). ([#7599](https://github.com/googleapis/google-cloud-python/pull/7599)) +- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) +- Remove unused message exports. ([#7273](https://github.com/googleapis/google-cloud-python/pull/7273)) +- Protoc-generated serialization update. ([#7093](https://github.com/googleapis/google-cloud-python/pull/7093)) +- Protoc-generated serialization update. ([#7055](https://github.com/googleapis/google-cloud-python/pull/7055)) +- Use moved iam.policy now at google.api_core.iam.policy. ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) + +### New Features +- Add v1. ([#7608](https://github.com/googleapis/google-cloud-python/pull/7608)) +- Pick up fixes to GAPIC generator. ([#6505](https://github.com/googleapis/google-cloud-python/pull/6505)) + +### Documentation +- googlecloudplatform --> googleapis in READMEs. ([#7411](https://github.com/googleapis/google-cloud-python/pull/7411)) +- Updated client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) +- Update copyright year. ([#7155](https://github.com/googleapis/google-cloud-python/pull/7155)) +- Correct a link in a documentation string. ([#7119](https://github.com/googleapis/google-cloud-python/pull/7119)) +- Pick up stub docstring fix in GAPIC generator. ([#6980](https://github.com/googleapis/google-cloud-python/pull/6980)) +- Document Python 2 deprecation. ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) +- Update link for Scheduler Docs. ([#6925](https://github.com/googleapis/google-cloud-python/pull/6925)) + +### Internal / Testing Changes +- Copy lintified proto files (via synth). ([#7469](https://github.com/googleapis/google-cloud-python/pull/7469)) +- Add clarifying comment to blacken nox target. ([#7401](https://github.com/googleapis/google-cloud-python/pull/7401)) +- Add protos as an artifact to library. ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) +- Add baseline for synth.metadata. ([#6792](https://github.com/googleapis/google-cloud-python/pull/6865)) +- Update noxfile. ([#6814](https://github.com/googleapis/google-cloud-python/pull/6814)) +- Blacken all gen'd libs. ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) +- Omit local deps. ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) +- Run black at end of synth.py. ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) +- Run Black on Generated libraries. ([#6666](https://github.com/googleapis/google-cloud-python/pull/6666)) +- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) + +## 0.1.0 + +11-13-2018 11:03 PST + + +### New Features +- Initial release of Cloud Scheduler library. ([#6482](https://github.com/googleapis/google-cloud-python/pull/6482)) diff --git a/packages/google-cloud-scheduler/CODE_OF_CONDUCT.md b/packages/google-cloud-scheduler/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-scheduler/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-scheduler/CONTRIBUTING.rst b/packages/google-cloud-scheduler/CONTRIBUTING.rst new file mode 100644 index 000000000000..7d4738e24457 --- /dev/null +++ b/packages/google-cloud-scheduler/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.11 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-scheduler + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-scheduler/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-scheduler/LICENSE b/packages/google-cloud-scheduler/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-scheduler/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-scheduler/MANIFEST.in b/packages/google-cloud-scheduler/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-scheduler/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-scheduler/README.rst b/packages/google-cloud-scheduler/README.rst new file mode 100644 index 000000000000..a5a099c6e13f --- /dev/null +++ b/packages/google-cloud-scheduler/README.rst @@ -0,0 +1,108 @@ +Python Client for Cloud Scheduler +================================= + +|stable| |pypi| |versions| + +`Cloud Scheduler`_: lets you set up scheduled units of work to be executed at defined times or regular intervals. These work units are commonly known as cron jobs. Typical use cases might include sending out a report email on a daily basis, updating some cached data every 10 minutes, or updating some summary information once an hour. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-scheduler.svg + :target: https://pypi.org/project/google-cloud-scheduler/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-scheduler.svg + :target: https://pypi.org/project/google-cloud-scheduler/ +.. _Cloud Scheduler: https://cloud.google.com/scheduler/docs +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/cloudscheduler/latest +.. _Product Documentation: https://cloud.google.com/scheduler/docs + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud Scheduler.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud Scheduler.: https://cloud.google.com/scheduler/docs +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-scheduler/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-scheduler + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-scheduler + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud Scheduler + to see other available methods on the client. +- Read the `Cloud Scheduler Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Cloud Scheduler Product documentation: https://cloud.google.com/scheduler/docs +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-scheduler/SECURITY.md b/packages/google-cloud-scheduler/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-scheduler/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-scheduler/docs/CHANGELOG.md b/packages/google-cloud-scheduler/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-scheduler/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-scheduler/docs/README.rst b/packages/google-cloud-scheduler/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-scheduler/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-scheduler/docs/_static/custom.css b/packages/google-cloud-scheduler/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-scheduler/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-scheduler/docs/_templates/layout.html b/packages/google-cloud-scheduler/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-scheduler/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-scheduler/docs/conf.py b/packages/google-cloud-scheduler/docs/conf.py new file mode 100644 index 000000000000..6c1278df8ab1 --- /dev/null +++ b/packages/google-cloud-scheduler/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-scheduler documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-scheduler" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-scheduler", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-scheduler-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-scheduler.tex", + "google-cloud-scheduler Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-scheduler", + "google-cloud-scheduler Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-scheduler", + "google-cloud-scheduler Documentation", + author, + "google-cloud-scheduler", + "google-cloud-scheduler Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-scheduler/docs/index.rst b/packages/google-cloud-scheduler/docs/index.rst new file mode 100644 index 000000000000..1b8d4ee5153b --- /dev/null +++ b/packages/google-cloud-scheduler/docs/index.rst @@ -0,0 +1,34 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of Cloud Scheduler. +By default, you will get version ``scheduler_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + scheduler_v1/services + scheduler_v1/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + scheduler_v1beta1/services + scheduler_v1beta1/types + + +Changelog +--------- + +For a list of all ``google-cloud-scheduler`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-scheduler/docs/multiprocessing.rst b/packages/google-cloud-scheduler/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-scheduler/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-scheduler/docs/scheduler_v1/cloud_scheduler.rst b/packages/google-cloud-scheduler/docs/scheduler_v1/cloud_scheduler.rst new file mode 100644 index 000000000000..b4984390d22c --- /dev/null +++ b/packages/google-cloud-scheduler/docs/scheduler_v1/cloud_scheduler.rst @@ -0,0 +1,10 @@ +CloudScheduler +-------------------------------- + +.. automodule:: google.cloud.scheduler_v1.services.cloud_scheduler + :members: + :inherited-members: + +.. automodule:: google.cloud.scheduler_v1.services.cloud_scheduler.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-scheduler/docs/scheduler_v1/services.rst b/packages/google-cloud-scheduler/docs/scheduler_v1/services.rst new file mode 100644 index 000000000000..a320925c73ee --- /dev/null +++ b/packages/google-cloud-scheduler/docs/scheduler_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Scheduler v1 API +========================================== +.. toctree:: + :maxdepth: 2 + + cloud_scheduler diff --git a/packages/google-cloud-scheduler/docs/scheduler_v1/types.rst b/packages/google-cloud-scheduler/docs/scheduler_v1/types.rst new file mode 100644 index 000000000000..f9aa5666066b --- /dev/null +++ b/packages/google-cloud-scheduler/docs/scheduler_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Scheduler v1 API +======================================= + +.. automodule:: google.cloud.scheduler_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-scheduler/docs/scheduler_v1beta1/cloud_scheduler.rst b/packages/google-cloud-scheduler/docs/scheduler_v1beta1/cloud_scheduler.rst new file mode 100644 index 000000000000..17730e94d61f --- /dev/null +++ b/packages/google-cloud-scheduler/docs/scheduler_v1beta1/cloud_scheduler.rst @@ -0,0 +1,10 @@ +CloudScheduler +-------------------------------- + +.. automodule:: google.cloud.scheduler_v1beta1.services.cloud_scheduler + :members: + :inherited-members: + +.. automodule:: google.cloud.scheduler_v1beta1.services.cloud_scheduler.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-scheduler/docs/scheduler_v1beta1/services.rst b/packages/google-cloud-scheduler/docs/scheduler_v1beta1/services.rst new file mode 100644 index 000000000000..4dbe3cd9d8b2 --- /dev/null +++ b/packages/google-cloud-scheduler/docs/scheduler_v1beta1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Scheduler v1beta1 API +=============================================== +.. toctree:: + :maxdepth: 2 + + cloud_scheduler diff --git a/packages/google-cloud-scheduler/docs/scheduler_v1beta1/types.rst b/packages/google-cloud-scheduler/docs/scheduler_v1beta1/types.rst new file mode 100644 index 000000000000..157968a2fec4 --- /dev/null +++ b/packages/google-cloud-scheduler/docs/scheduler_v1beta1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Scheduler v1beta1 API +============================================ + +.. automodule:: google.cloud.scheduler_v1beta1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler/__init__.py b/packages/google-cloud-scheduler/google/cloud/scheduler/__init__.py new file mode 100644 index 000000000000..8e9538997a07 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler/__init__.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.scheduler import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.scheduler_v1.services.cloud_scheduler.async_client import ( + CloudSchedulerAsyncClient, +) +from google.cloud.scheduler_v1.services.cloud_scheduler.client import ( + CloudSchedulerClient, +) +from google.cloud.scheduler_v1.types.cloudscheduler import ( + CreateJobRequest, + DeleteJobRequest, + GetJobRequest, + ListJobsRequest, + ListJobsResponse, + PauseJobRequest, + ResumeJobRequest, + RunJobRequest, + UpdateJobRequest, +) +from google.cloud.scheduler_v1.types.job import Job, RetryConfig +from google.cloud.scheduler_v1.types.target import ( + AppEngineHttpTarget, + AppEngineRouting, + HttpMethod, + HttpTarget, + OAuthToken, + OidcToken, + PubsubTarget, +) + +__all__ = ( + "CloudSchedulerClient", + "CloudSchedulerAsyncClient", + "CreateJobRequest", + "DeleteJobRequest", + "GetJobRequest", + "ListJobsRequest", + "ListJobsResponse", + "PauseJobRequest", + "ResumeJobRequest", + "RunJobRequest", + "UpdateJobRequest", + "Job", + "RetryConfig", + "AppEngineHttpTarget", + "AppEngineRouting", + "HttpTarget", + "OAuthToken", + "OidcToken", + "PubsubTarget", + "HttpMethod", +) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler/gapic_version.py b/packages/google-cloud-scheduler/google/cloud/scheduler/gapic_version.py new file mode 100644 index 000000000000..9ac1d4a82044 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler/py.typed b/packages/google-cloud-scheduler/google/cloud/scheduler/py.typed new file mode 100644 index 000000000000..ca4d524bcfe7 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-scheduler package uses inline types. diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/__init__.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/__init__.py new file mode 100644 index 000000000000..2a93f2f1f69e --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/__init__.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.scheduler_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.cloud_scheduler import CloudSchedulerAsyncClient, CloudSchedulerClient +from .types.cloudscheduler import ( + CreateJobRequest, + DeleteJobRequest, + GetJobRequest, + ListJobsRequest, + ListJobsResponse, + PauseJobRequest, + ResumeJobRequest, + RunJobRequest, + UpdateJobRequest, +) +from .types.job import Job, RetryConfig +from .types.target import ( + AppEngineHttpTarget, + AppEngineRouting, + HttpMethod, + HttpTarget, + OAuthToken, + OidcToken, + PubsubTarget, +) + +__all__ = ( + "CloudSchedulerAsyncClient", + "AppEngineHttpTarget", + "AppEngineRouting", + "CloudSchedulerClient", + "CreateJobRequest", + "DeleteJobRequest", + "GetJobRequest", + "HttpMethod", + "HttpTarget", + "Job", + "ListJobsRequest", + "ListJobsResponse", + "OAuthToken", + "OidcToken", + "PauseJobRequest", + "PubsubTarget", + "ResumeJobRequest", + "RetryConfig", + "RunJobRequest", + "UpdateJobRequest", +) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/gapic_metadata.json b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/gapic_metadata.json new file mode 100644 index 000000000000..cca5dd3a3879 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/gapic_metadata.json @@ -0,0 +1,148 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.scheduler_v1", + "protoPackage": "google.cloud.scheduler.v1", + "schema": "1.0", + "services": { + "CloudScheduler": { + "clients": { + "grpc": { + "libraryClient": "CloudSchedulerClient", + "rpcs": { + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "PauseJob": { + "methods": [ + "pause_job" + ] + }, + "ResumeJob": { + "methods": [ + "resume_job" + ] + }, + "RunJob": { + "methods": [ + "run_job" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudSchedulerAsyncClient", + "rpcs": { + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "PauseJob": { + "methods": [ + "pause_job" + ] + }, + "ResumeJob": { + "methods": [ + "resume_job" + ] + }, + "RunJob": { + "methods": [ + "run_job" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + }, + "rest": { + "libraryClient": "CloudSchedulerClient", + "rpcs": { + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "PauseJob": { + "methods": [ + "pause_job" + ] + }, + "ResumeJob": { + "methods": [ + "resume_job" + ] + }, + "RunJob": { + "methods": [ + "run_job" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/gapic_version.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/gapic_version.py new file mode 100644 index 000000000000..9ac1d4a82044 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/py.typed b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/py.typed new file mode 100644 index 000000000000..ca4d524bcfe7 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-scheduler package uses inline types. diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/__init__.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/__init__.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/__init__.py new file mode 100644 index 000000000000..7000a11dd2bb --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import CloudSchedulerAsyncClient +from .client import CloudSchedulerClient + +__all__ = ( + "CloudSchedulerClient", + "CloudSchedulerAsyncClient", +) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/async_client.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/async_client.py new file mode 100644 index 000000000000..b11589c56b60 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/async_client.py @@ -0,0 +1,1268 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.scheduler_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.cloud.scheduler_v1.services.cloud_scheduler import pagers +from google.cloud.scheduler_v1.types import cloudscheduler +from google.cloud.scheduler_v1.types import job +from google.cloud.scheduler_v1.types import job as gcs_job +from google.cloud.scheduler_v1.types import target + +from .client import CloudSchedulerClient +from .transports.base import DEFAULT_CLIENT_INFO, CloudSchedulerTransport +from .transports.grpc_asyncio import CloudSchedulerGrpcAsyncIOTransport + + +class CloudSchedulerAsyncClient: + """The Cloud Scheduler API allows external entities to reliably + schedule asynchronous jobs. + """ + + _client: CloudSchedulerClient + + DEFAULT_ENDPOINT = CloudSchedulerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudSchedulerClient.DEFAULT_MTLS_ENDPOINT + + job_path = staticmethod(CloudSchedulerClient.job_path) + parse_job_path = staticmethod(CloudSchedulerClient.parse_job_path) + topic_path = staticmethod(CloudSchedulerClient.topic_path) + parse_topic_path = staticmethod(CloudSchedulerClient.parse_topic_path) + common_billing_account_path = staticmethod( + CloudSchedulerClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudSchedulerClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(CloudSchedulerClient.common_folder_path) + parse_common_folder_path = staticmethod( + CloudSchedulerClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + CloudSchedulerClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + CloudSchedulerClient.parse_common_organization_path + ) + common_project_path = staticmethod(CloudSchedulerClient.common_project_path) + parse_common_project_path = staticmethod( + CloudSchedulerClient.parse_common_project_path + ) + common_location_path = staticmethod(CloudSchedulerClient.common_location_path) + parse_common_location_path = staticmethod( + CloudSchedulerClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudSchedulerAsyncClient: The constructed client. + """ + return CloudSchedulerClient.from_service_account_info.__func__(CloudSchedulerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudSchedulerAsyncClient: The constructed client. + """ + return CloudSchedulerClient.from_service_account_file.__func__(CloudSchedulerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CloudSchedulerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CloudSchedulerTransport: + """Returns the transport used by the client instance. + + Returns: + CloudSchedulerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(CloudSchedulerClient).get_transport_class, type(CloudSchedulerClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CloudSchedulerTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud scheduler client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudSchedulerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CloudSchedulerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_jobs( + self, + request: Optional[Union[cloudscheduler.ListJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsAsyncPager: + r"""Lists jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + async def sample_list_jobs(): + # Create a client + client = scheduler_v1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.scheduler_v1.types.ListJobsRequest, dict]]): + The request object. Request message for listing jobs using + [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1.services.cloud_scheduler.pagers.ListJobsAsyncPager: + Response message for listing jobs using + [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.ListJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_jobs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job( + self, + request: Optional[Union[cloudscheduler.GetJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Gets a job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + async def sample_get_job(): + # Create a client + client = scheduler_v1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.scheduler_v1.types.GetJobRequest, dict]]): + The request object. Request message for + [GetJob][google.cloud.scheduler.v1.CloudScheduler.GetJob]. + name (:class:`str`): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.GetJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_job( + self, + request: Optional[Union[cloudscheduler.CreateJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + job: Optional[gcs_job.Job] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_job.Job: + r"""Creates a job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + async def sample_create_job(): + # Create a client + client = scheduler_v1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1.CreateJobRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.scheduler_v1.types.CreateJobRequest, dict]]): + The request object. Request message for + [CreateJob][google.cloud.scheduler.v1.CloudScheduler.CreateJob]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (:class:`google.cloud.scheduler_v1.types.Job`): + Required. The job to add. The user can optionally + specify a name for the job in + [name][google.cloud.scheduler.v1.Job.name]. + [name][google.cloud.scheduler.v1.Job.name] cannot be the + same as an existing job. If a name is not specified then + the system will generate a random unique name that will + be returned ([name][google.cloud.scheduler.v1.Job.name]) + in the response. + + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.CreateJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_job( + self, + request: Optional[Union[cloudscheduler.UpdateJobRequest, dict]] = None, + *, + job: Optional[gcs_job.Job] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_job.Job: + r"""Updates a job. + + If successful, the updated [Job][google.cloud.scheduler.v1.Job] + is returned. If the job does not exist, ``NOT_FOUND`` is + returned. + + If UpdateJob does not successfully return, it is possible for + the job to be in an + [Job.State.UPDATE_FAILED][google.cloud.scheduler.v1.Job.State.UPDATE_FAILED] + state. A job in this state may not be executed. If this happens, + retry the UpdateJob request until a successful response is + received. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + async def sample_update_job(): + # Create a client + client = scheduler_v1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1.UpdateJobRequest( + ) + + # Make the request + response = await client.update_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.scheduler_v1.types.UpdateJobRequest, dict]]): + The request object. Request message for + [UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob]. + job (:class:`google.cloud.scheduler_v1.types.Job`): + Required. The new job properties. + [name][google.cloud.scheduler.v1.Job.name] must be + specified. + + Output only fields cannot be modified using UpdateJob. + Any value specified for an output only field will be + ignored. + + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + A mask used to specify which fields + of the job are being updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([job, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.UpdateJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if job is not None: + request.job = job + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_job, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("job.name", request.job.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job( + self, + request: Optional[Union[cloudscheduler.DeleteJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + async def sample_delete_job(): + # Create a client + client = scheduler_v1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_job(request=request) + + Args: + request (Optional[Union[google.cloud.scheduler_v1.types.DeleteJobRequest, dict]]): + The request object. Request message for deleting a job using + [DeleteJob][google.cloud.scheduler.v1.CloudScheduler.DeleteJob]. + name (:class:`str`): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.DeleteJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def pause_job( + self, + request: Optional[Union[cloudscheduler.PauseJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Pauses a job. + + If a job is paused then the system will stop executing the job + until it is re-enabled via + [ResumeJob][google.cloud.scheduler.v1.CloudScheduler.ResumeJob]. + The state of the job is stored in + [state][google.cloud.scheduler.v1.Job.state]; if paused it will + be set to + [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED]. + A job must be in + [Job.State.ENABLED][google.cloud.scheduler.v1.Job.State.ENABLED] + to be paused. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + async def sample_pause_job(): + # Create a client + client = scheduler_v1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1.PauseJobRequest( + name="name_value", + ) + + # Make the request + response = await client.pause_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.scheduler_v1.types.PauseJobRequest, dict]]): + The request object. Request message for + [PauseJob][google.cloud.scheduler.v1.CloudScheduler.PauseJob]. + name (:class:`str`): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.PauseJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pause_job, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def resume_job( + self, + request: Optional[Union[cloudscheduler.ResumeJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Resume a job. + + This method reenables a job after it has been + [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED]. + The state of a job is stored in + [Job.state][google.cloud.scheduler.v1.Job.state]; after calling + this method it will be set to + [Job.State.ENABLED][google.cloud.scheduler.v1.Job.State.ENABLED]. + A job must be in + [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED] + to be resumed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + async def sample_resume_job(): + # Create a client + client = scheduler_v1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1.ResumeJobRequest( + name="name_value", + ) + + # Make the request + response = await client.resume_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.scheduler_v1.types.ResumeJobRequest, dict]]): + The request object. Request message for + [ResumeJob][google.cloud.scheduler.v1.CloudScheduler.ResumeJob]. + name (:class:`str`): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.ResumeJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_job, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_job( + self, + request: Optional[Union[cloudscheduler.RunJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Forces a job to run now. + + When this method is called, Cloud Scheduler will + dispatch the job, even if the job is already running. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + async def sample_run_job(): + # Create a client + client = scheduler_v1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1.RunJobRequest( + name="name_value", + ) + + # Make the request + response = await client.run_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.scheduler_v1.types.RunJobRequest, dict]]): + The request object. Request message for forcing a job to run now using + [RunJob][google.cloud.scheduler.v1.CloudScheduler.RunJob]. + name (:class:`str`): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.RunJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_job, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "CloudSchedulerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CloudSchedulerAsyncClient",) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/client.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/client.py new file mode 100644 index 000000000000..034dd6664cf5 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/client.py @@ -0,0 +1,1489 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.scheduler_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.cloud.scheduler_v1.services.cloud_scheduler import pagers +from google.cloud.scheduler_v1.types import cloudscheduler +from google.cloud.scheduler_v1.types import job +from google.cloud.scheduler_v1.types import job as gcs_job +from google.cloud.scheduler_v1.types import target + +from .transports.base import DEFAULT_CLIENT_INFO, CloudSchedulerTransport +from .transports.grpc import CloudSchedulerGrpcTransport +from .transports.grpc_asyncio import CloudSchedulerGrpcAsyncIOTransport +from .transports.rest import CloudSchedulerRestTransport + + +class CloudSchedulerClientMeta(type): + """Metaclass for the CloudScheduler client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[CloudSchedulerTransport]] + _transport_registry["grpc"] = CloudSchedulerGrpcTransport + _transport_registry["grpc_asyncio"] = CloudSchedulerGrpcAsyncIOTransport + _transport_registry["rest"] = CloudSchedulerRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CloudSchedulerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudSchedulerClient(metaclass=CloudSchedulerClientMeta): + """The Cloud Scheduler API allows external entities to reliably + schedule asynchronous jobs. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudscheduler.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudSchedulerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudSchedulerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudSchedulerTransport: + """Returns the transport used by the client instance. + + Returns: + CloudSchedulerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def job_path( + project: str, + location: str, + job: str, + ) -> str: + """Returns a fully-qualified job string.""" + return "projects/{project}/locations/{location}/jobs/{job}".format( + project=project, + location=location, + job=job, + ) + + @staticmethod + def parse_job_path(path: str) -> Dict[str, str]: + """Parses a job path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def topic_path( + project: str, + topic: str, + ) -> str: + """Returns a fully-qualified topic string.""" + return "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str, str]: + """Parses a topic path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudSchedulerTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud scheduler client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CloudSchedulerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudSchedulerTransport): + # transport is a CloudSchedulerTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_jobs( + self, + request: Optional[Union[cloudscheduler.ListJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsPager: + r"""Lists jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + def sample_list_jobs(): + # Create a client + client = scheduler_v1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.scheduler_v1.types.ListJobsRequest, dict]): + The request object. Request message for listing jobs using + [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs]. + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1.services.cloud_scheduler.pagers.ListJobsPager: + Response message for listing jobs using + [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.ListJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.ListJobsRequest): + request = cloudscheduler.ListJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job( + self, + request: Optional[Union[cloudscheduler.GetJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Gets a job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + def sample_get_job(): + # Create a client + client = scheduler_v1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.scheduler_v1.types.GetJobRequest, dict]): + The request object. Request message for + [GetJob][google.cloud.scheduler.v1.CloudScheduler.GetJob]. + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.GetJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.GetJobRequest): + request = cloudscheduler.GetJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_job( + self, + request: Optional[Union[cloudscheduler.CreateJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + job: Optional[gcs_job.Job] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_job.Job: + r"""Creates a job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + def sample_create_job(): + # Create a client + client = scheduler_v1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1.CreateJobRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.scheduler_v1.types.CreateJobRequest, dict]): + The request object. Request message for + [CreateJob][google.cloud.scheduler.v1.CloudScheduler.CreateJob]. + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (google.cloud.scheduler_v1.types.Job): + Required. The job to add. The user can optionally + specify a name for the job in + [name][google.cloud.scheduler.v1.Job.name]. + [name][google.cloud.scheduler.v1.Job.name] cannot be the + same as an existing job. If a name is not specified then + the system will generate a random unique name that will + be returned ([name][google.cloud.scheduler.v1.Job.name]) + in the response. + + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.CreateJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.CreateJobRequest): + request = cloudscheduler.CreateJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_job( + self, + request: Optional[Union[cloudscheduler.UpdateJobRequest, dict]] = None, + *, + job: Optional[gcs_job.Job] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_job.Job: + r"""Updates a job. + + If successful, the updated [Job][google.cloud.scheduler.v1.Job] + is returned. If the job does not exist, ``NOT_FOUND`` is + returned. + + If UpdateJob does not successfully return, it is possible for + the job to be in an + [Job.State.UPDATE_FAILED][google.cloud.scheduler.v1.Job.State.UPDATE_FAILED] + state. A job in this state may not be executed. If this happens, + retry the UpdateJob request until a successful response is + received. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + def sample_update_job(): + # Create a client + client = scheduler_v1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1.UpdateJobRequest( + ) + + # Make the request + response = client.update_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.scheduler_v1.types.UpdateJobRequest, dict]): + The request object. Request message for + [UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob]. + job (google.cloud.scheduler_v1.types.Job): + Required. The new job properties. + [name][google.cloud.scheduler.v1.Job.name] must be + specified. + + Output only fields cannot be modified using UpdateJob. + Any value specified for an output only field will be + ignored. + + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields + of the job are being updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([job, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.UpdateJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.UpdateJobRequest): + request = cloudscheduler.UpdateJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if job is not None: + request.job = job + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("job.name", request.job.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job( + self, + request: Optional[Union[cloudscheduler.DeleteJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + def sample_delete_job(): + # Create a client + client = scheduler_v1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + client.delete_job(request=request) + + Args: + request (Union[google.cloud.scheduler_v1.types.DeleteJobRequest, dict]): + The request object. Request message for deleting a job using + [DeleteJob][google.cloud.scheduler.v1.CloudScheduler.DeleteJob]. + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.DeleteJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.DeleteJobRequest): + request = cloudscheduler.DeleteJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def pause_job( + self, + request: Optional[Union[cloudscheduler.PauseJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Pauses a job. + + If a job is paused then the system will stop executing the job + until it is re-enabled via + [ResumeJob][google.cloud.scheduler.v1.CloudScheduler.ResumeJob]. + The state of the job is stored in + [state][google.cloud.scheduler.v1.Job.state]; if paused it will + be set to + [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED]. + A job must be in + [Job.State.ENABLED][google.cloud.scheduler.v1.Job.State.ENABLED] + to be paused. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + def sample_pause_job(): + # Create a client + client = scheduler_v1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1.PauseJobRequest( + name="name_value", + ) + + # Make the request + response = client.pause_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.scheduler_v1.types.PauseJobRequest, dict]): + The request object. Request message for + [PauseJob][google.cloud.scheduler.v1.CloudScheduler.PauseJob]. + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.PauseJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.PauseJobRequest): + request = cloudscheduler.PauseJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pause_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resume_job( + self, + request: Optional[Union[cloudscheduler.ResumeJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Resume a job. + + This method reenables a job after it has been + [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED]. + The state of a job is stored in + [Job.state][google.cloud.scheduler.v1.Job.state]; after calling + this method it will be set to + [Job.State.ENABLED][google.cloud.scheduler.v1.Job.State.ENABLED]. + A job must be in + [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED] + to be resumed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + def sample_resume_job(): + # Create a client + client = scheduler_v1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1.ResumeJobRequest( + name="name_value", + ) + + # Make the request + response = client.resume_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.scheduler_v1.types.ResumeJobRequest, dict]): + The request object. Request message for + [ResumeJob][google.cloud.scheduler.v1.CloudScheduler.ResumeJob]. + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.ResumeJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.ResumeJobRequest): + request = cloudscheduler.ResumeJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_job( + self, + request: Optional[Union[cloudscheduler.RunJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Forces a job to run now. + + When this method is called, Cloud Scheduler will + dispatch the job, even if the job is already running. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1 + + def sample_run_job(): + # Create a client + client = scheduler_v1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1.RunJobRequest( + name="name_value", + ) + + # Make the request + response = client.run_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.scheduler_v1.types.RunJobRequest, dict]): + The request object. Request message for forcing a job to run now using + [RunJob][google.cloud.scheduler.v1.CloudScheduler.RunJob]. + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.RunJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.RunJobRequest): + request = cloudscheduler.RunJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CloudSchedulerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CloudSchedulerClient",) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/pagers.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/pagers.py new file mode 100644 index 000000000000..c284a5cf459d --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.scheduler_v1.types import cloudscheduler, job + + +class ListJobsPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.scheduler_v1.types.ListJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.scheduler_v1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudscheduler.ListJobsResponse], + request: cloudscheduler.ListJobsRequest, + response: cloudscheduler.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.scheduler_v1.types.ListJobsRequest): + The initial request object. + response (google.cloud.scheduler_v1.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudscheduler.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudscheduler.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[job.Job]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListJobsAsyncPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.scheduler_v1.types.ListJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.scheduler_v1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudscheduler.ListJobsResponse]], + request: cloudscheduler.ListJobsRequest, + response: cloudscheduler.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.scheduler_v1.types.ListJobsRequest): + The initial request object. + response (google.cloud.scheduler_v1.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudscheduler.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudscheduler.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[job.Job]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/__init__.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/__init__.py new file mode 100644 index 000000000000..80615da54855 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudSchedulerTransport +from .grpc import CloudSchedulerGrpcTransport +from .grpc_asyncio import CloudSchedulerGrpcAsyncIOTransport +from .rest import CloudSchedulerRestInterceptor, CloudSchedulerRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudSchedulerTransport]] +_transport_registry["grpc"] = CloudSchedulerGrpcTransport +_transport_registry["grpc_asyncio"] = CloudSchedulerGrpcAsyncIOTransport +_transport_registry["rest"] = CloudSchedulerRestTransport + +__all__ = ( + "CloudSchedulerTransport", + "CloudSchedulerGrpcTransport", + "CloudSchedulerGrpcAsyncIOTransport", + "CloudSchedulerRestTransport", + "CloudSchedulerRestInterceptor", +) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/base.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/base.py new file mode 100644 index 000000000000..789a0ed77dc8 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/base.py @@ -0,0 +1,298 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.scheduler_v1 import gapic_version as package_version +from google.cloud.scheduler_v1.types import cloudscheduler +from google.cloud.scheduler_v1.types import job +from google.cloud.scheduler_v1.types import job as gcs_job + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class CloudSchedulerTransport(abc.ABC): + """Abstract transport class for CloudScheduler.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "cloudscheduler.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_jobs: gapic_v1.method.wrap_method( + self.list_jobs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.get_job: gapic_v1.method.wrap_method( + self.get_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.create_job: gapic_v1.method.wrap_method( + self.create_job, + default_timeout=600.0, + client_info=client_info, + ), + self.update_job: gapic_v1.method.wrap_method( + self.update_job, + default_timeout=600.0, + client_info=client_info, + ), + self.delete_job: gapic_v1.method.wrap_method( + self.delete_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.pause_job: gapic_v1.method.wrap_method( + self.pause_job, + default_timeout=600.0, + client_info=client_info, + ), + self.resume_job: gapic_v1.method.wrap_method( + self.resume_job, + default_timeout=600.0, + client_info=client_info, + ), + self.run_job: gapic_v1.method.wrap_method( + self.run_job, + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_jobs( + self, + ) -> Callable[ + [cloudscheduler.ListJobsRequest], + Union[ + cloudscheduler.ListJobsResponse, Awaitable[cloudscheduler.ListJobsResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_job( + self, + ) -> Callable[[cloudscheduler.GetJobRequest], Union[job.Job, Awaitable[job.Job]]]: + raise NotImplementedError() + + @property + def create_job( + self, + ) -> Callable[ + [cloudscheduler.CreateJobRequest], Union[gcs_job.Job, Awaitable[gcs_job.Job]] + ]: + raise NotImplementedError() + + @property + def update_job( + self, + ) -> Callable[ + [cloudscheduler.UpdateJobRequest], Union[gcs_job.Job, Awaitable[gcs_job.Job]] + ]: + raise NotImplementedError() + + @property + def delete_job( + self, + ) -> Callable[ + [cloudscheduler.DeleteJobRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def pause_job( + self, + ) -> Callable[[cloudscheduler.PauseJobRequest], Union[job.Job, Awaitable[job.Job]]]: + raise NotImplementedError() + + @property + def resume_job( + self, + ) -> Callable[ + [cloudscheduler.ResumeJobRequest], Union[job.Job, Awaitable[job.Job]] + ]: + raise NotImplementedError() + + @property + def run_job( + self, + ) -> Callable[[cloudscheduler.RunJobRequest], Union[job.Job, Awaitable[job.Job]]]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("CloudSchedulerTransport",) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/grpc.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/grpc.py new file mode 100644 index 000000000000..bbddddaf9d15 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/grpc.py @@ -0,0 +1,511 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.scheduler_v1.types import cloudscheduler +from google.cloud.scheduler_v1.types import job +from google.cloud.scheduler_v1.types import job as gcs_job + +from .base import DEFAULT_CLIENT_INFO, CloudSchedulerTransport + + +class CloudSchedulerGrpcTransport(CloudSchedulerTransport): + """gRPC backend transport for CloudScheduler. + + The Cloud Scheduler API allows external entities to reliably + schedule asynchronous jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudscheduler.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "cloudscheduler.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_jobs( + self, + ) -> Callable[[cloudscheduler.ListJobsRequest], cloudscheduler.ListJobsResponse]: + r"""Return a callable for the list jobs method over gRPC. + + Lists jobs. + + Returns: + Callable[[~.ListJobsRequest], + ~.ListJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_jobs" not in self._stubs: + self._stubs["list_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/ListJobs", + request_serializer=cloudscheduler.ListJobsRequest.serialize, + response_deserializer=cloudscheduler.ListJobsResponse.deserialize, + ) + return self._stubs["list_jobs"] + + @property + def get_job(self) -> Callable[[cloudscheduler.GetJobRequest], job.Job]: + r"""Return a callable for the get job method over gRPC. + + Gets a job. + + Returns: + Callable[[~.GetJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job" not in self._stubs: + self._stubs["get_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/GetJob", + request_serializer=cloudscheduler.GetJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["get_job"] + + @property + def create_job(self) -> Callable[[cloudscheduler.CreateJobRequest], gcs_job.Job]: + r"""Return a callable for the create job method over gRPC. + + Creates a job. + + Returns: + Callable[[~.CreateJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_job" not in self._stubs: + self._stubs["create_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/CreateJob", + request_serializer=cloudscheduler.CreateJobRequest.serialize, + response_deserializer=gcs_job.Job.deserialize, + ) + return self._stubs["create_job"] + + @property + def update_job(self) -> Callable[[cloudscheduler.UpdateJobRequest], gcs_job.Job]: + r"""Return a callable for the update job method over gRPC. + + Updates a job. + + If successful, the updated [Job][google.cloud.scheduler.v1.Job] + is returned. If the job does not exist, ``NOT_FOUND`` is + returned. + + If UpdateJob does not successfully return, it is possible for + the job to be in an + [Job.State.UPDATE_FAILED][google.cloud.scheduler.v1.Job.State.UPDATE_FAILED] + state. A job in this state may not be executed. If this happens, + retry the UpdateJob request until a successful response is + received. + + Returns: + Callable[[~.UpdateJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_job" not in self._stubs: + self._stubs["update_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/UpdateJob", + request_serializer=cloudscheduler.UpdateJobRequest.serialize, + response_deserializer=gcs_job.Job.deserialize, + ) + return self._stubs["update_job"] + + @property + def delete_job( + self, + ) -> Callable[[cloudscheduler.DeleteJobRequest], empty_pb2.Empty]: + r"""Return a callable for the delete job method over gRPC. + + Deletes a job. + + Returns: + Callable[[~.DeleteJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_job" not in self._stubs: + self._stubs["delete_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/DeleteJob", + request_serializer=cloudscheduler.DeleteJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_job"] + + @property + def pause_job(self) -> Callable[[cloudscheduler.PauseJobRequest], job.Job]: + r"""Return a callable for the pause job method over gRPC. + + Pauses a job. + + If a job is paused then the system will stop executing the job + until it is re-enabled via + [ResumeJob][google.cloud.scheduler.v1.CloudScheduler.ResumeJob]. + The state of the job is stored in + [state][google.cloud.scheduler.v1.Job.state]; if paused it will + be set to + [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED]. + A job must be in + [Job.State.ENABLED][google.cloud.scheduler.v1.Job.State.ENABLED] + to be paused. + + Returns: + Callable[[~.PauseJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_job" not in self._stubs: + self._stubs["pause_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/PauseJob", + request_serializer=cloudscheduler.PauseJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["pause_job"] + + @property + def resume_job(self) -> Callable[[cloudscheduler.ResumeJobRequest], job.Job]: + r"""Return a callable for the resume job method over gRPC. + + Resume a job. + + This method reenables a job after it has been + [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED]. + The state of a job is stored in + [Job.state][google.cloud.scheduler.v1.Job.state]; after calling + this method it will be set to + [Job.State.ENABLED][google.cloud.scheduler.v1.Job.State.ENABLED]. + A job must be in + [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED] + to be resumed. + + Returns: + Callable[[~.ResumeJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_job" not in self._stubs: + self._stubs["resume_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/ResumeJob", + request_serializer=cloudscheduler.ResumeJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["resume_job"] + + @property + def run_job(self) -> Callable[[cloudscheduler.RunJobRequest], job.Job]: + r"""Return a callable for the run job method over gRPC. + + Forces a job to run now. + + When this method is called, Cloud Scheduler will + dispatch the job, even if the job is already running. + + Returns: + Callable[[~.RunJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_job" not in self._stubs: + self._stubs["run_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/RunJob", + request_serializer=cloudscheduler.RunJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["run_job"] + + def close(self): + self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("CloudSchedulerGrpcTransport",) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/grpc_asyncio.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/grpc_asyncio.py new file mode 100644 index 000000000000..653a47cc1bc0 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/grpc_asyncio.py @@ -0,0 +1,520 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.scheduler_v1.types import cloudscheduler +from google.cloud.scheduler_v1.types import job +from google.cloud.scheduler_v1.types import job as gcs_job + +from .base import DEFAULT_CLIENT_INFO, CloudSchedulerTransport +from .grpc import CloudSchedulerGrpcTransport + + +class CloudSchedulerGrpcAsyncIOTransport(CloudSchedulerTransport): + """gRPC AsyncIO backend transport for CloudScheduler. + + The Cloud Scheduler API allows external entities to reliably + schedule asynchronous jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudscheduler.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudscheduler.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_jobs( + self, + ) -> Callable[ + [cloudscheduler.ListJobsRequest], Awaitable[cloudscheduler.ListJobsResponse] + ]: + r"""Return a callable for the list jobs method over gRPC. + + Lists jobs. + + Returns: + Callable[[~.ListJobsRequest], + Awaitable[~.ListJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_jobs" not in self._stubs: + self._stubs["list_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/ListJobs", + request_serializer=cloudscheduler.ListJobsRequest.serialize, + response_deserializer=cloudscheduler.ListJobsResponse.deserialize, + ) + return self._stubs["list_jobs"] + + @property + def get_job(self) -> Callable[[cloudscheduler.GetJobRequest], Awaitable[job.Job]]: + r"""Return a callable for the get job method over gRPC. + + Gets a job. + + Returns: + Callable[[~.GetJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job" not in self._stubs: + self._stubs["get_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/GetJob", + request_serializer=cloudscheduler.GetJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["get_job"] + + @property + def create_job( + self, + ) -> Callable[[cloudscheduler.CreateJobRequest], Awaitable[gcs_job.Job]]: + r"""Return a callable for the create job method over gRPC. + + Creates a job. + + Returns: + Callable[[~.CreateJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_job" not in self._stubs: + self._stubs["create_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/CreateJob", + request_serializer=cloudscheduler.CreateJobRequest.serialize, + response_deserializer=gcs_job.Job.deserialize, + ) + return self._stubs["create_job"] + + @property + def update_job( + self, + ) -> Callable[[cloudscheduler.UpdateJobRequest], Awaitable[gcs_job.Job]]: + r"""Return a callable for the update job method over gRPC. + + Updates a job. + + If successful, the updated [Job][google.cloud.scheduler.v1.Job] + is returned. If the job does not exist, ``NOT_FOUND`` is + returned. + + If UpdateJob does not successfully return, it is possible for + the job to be in an + [Job.State.UPDATE_FAILED][google.cloud.scheduler.v1.Job.State.UPDATE_FAILED] + state. A job in this state may not be executed. If this happens, + retry the UpdateJob request until a successful response is + received. + + Returns: + Callable[[~.UpdateJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_job" not in self._stubs: + self._stubs["update_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/UpdateJob", + request_serializer=cloudscheduler.UpdateJobRequest.serialize, + response_deserializer=gcs_job.Job.deserialize, + ) + return self._stubs["update_job"] + + @property + def delete_job( + self, + ) -> Callable[[cloudscheduler.DeleteJobRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job method over gRPC. + + Deletes a job. + + Returns: + Callable[[~.DeleteJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_job" not in self._stubs: + self._stubs["delete_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/DeleteJob", + request_serializer=cloudscheduler.DeleteJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_job"] + + @property + def pause_job( + self, + ) -> Callable[[cloudscheduler.PauseJobRequest], Awaitable[job.Job]]: + r"""Return a callable for the pause job method over gRPC. + + Pauses a job. + + If a job is paused then the system will stop executing the job + until it is re-enabled via + [ResumeJob][google.cloud.scheduler.v1.CloudScheduler.ResumeJob]. + The state of the job is stored in + [state][google.cloud.scheduler.v1.Job.state]; if paused it will + be set to + [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED]. + A job must be in + [Job.State.ENABLED][google.cloud.scheduler.v1.Job.State.ENABLED] + to be paused. + + Returns: + Callable[[~.PauseJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_job" not in self._stubs: + self._stubs["pause_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/PauseJob", + request_serializer=cloudscheduler.PauseJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["pause_job"] + + @property + def resume_job( + self, + ) -> Callable[[cloudscheduler.ResumeJobRequest], Awaitable[job.Job]]: + r"""Return a callable for the resume job method over gRPC. + + Resume a job. + + This method reenables a job after it has been + [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED]. + The state of a job is stored in + [Job.state][google.cloud.scheduler.v1.Job.state]; after calling + this method it will be set to + [Job.State.ENABLED][google.cloud.scheduler.v1.Job.State.ENABLED]. + A job must be in + [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED] + to be resumed. + + Returns: + Callable[[~.ResumeJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_job" not in self._stubs: + self._stubs["resume_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/ResumeJob", + request_serializer=cloudscheduler.ResumeJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["resume_job"] + + @property + def run_job(self) -> Callable[[cloudscheduler.RunJobRequest], Awaitable[job.Job]]: + r"""Return a callable for the run job method over gRPC. + + Forces a job to run now. + + When this method is called, Cloud Scheduler will + dispatch the job, even if the job is already running. + + Returns: + Callable[[~.RunJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_job" not in self._stubs: + self._stubs["run_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1.CloudScheduler/RunJob", + request_serializer=cloudscheduler.RunJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["run_job"] + + def close(self): + return self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("CloudSchedulerGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/rest.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/rest.py new file mode 100644 index 000000000000..2502c0f98c78 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/services/cloud_scheduler/transports/rest.py @@ -0,0 +1,1381 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.scheduler_v1.types import cloudscheduler +from google.cloud.scheduler_v1.types import job +from google.cloud.scheduler_v1.types import job as gcs_job + +from .base import CloudSchedulerTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CloudSchedulerRestInterceptor: + """Interceptor for CloudScheduler. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudSchedulerRestTransport. + + .. code-block:: python + class MyCustomCloudSchedulerInterceptor(CloudSchedulerRestInterceptor): + def pre_create_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_pause_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_pause_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_resume_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_resume_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_job(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CloudSchedulerRestTransport(interceptor=MyCustomCloudSchedulerInterceptor()) + client = CloudSchedulerClient(transport=transport) + + + """ + + def pre_create_job( + self, + request: cloudscheduler.CreateJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudscheduler.CreateJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_create_job(self, response: gcs_job.Job) -> gcs_job.Job: + """Post-rpc interceptor for create_job + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_delete_job( + self, + request: cloudscheduler.DeleteJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudscheduler.DeleteJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def pre_get_job( + self, request: cloudscheduler.GetJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudscheduler.GetJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_get_job(self, response: job.Job) -> job.Job: + """Post-rpc interceptor for get_job + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_list_jobs( + self, + request: cloudscheduler.ListJobsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudscheduler.ListJobsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_list_jobs( + self, response: cloudscheduler.ListJobsResponse + ) -> cloudscheduler.ListJobsResponse: + """Post-rpc interceptor for list_jobs + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_pause_job( + self, + request: cloudscheduler.PauseJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudscheduler.PauseJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for pause_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_pause_job(self, response: job.Job) -> job.Job: + """Post-rpc interceptor for pause_job + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_resume_job( + self, + request: cloudscheduler.ResumeJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudscheduler.ResumeJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for resume_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_resume_job(self, response: job.Job) -> job.Job: + """Post-rpc interceptor for resume_job + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_run_job( + self, request: cloudscheduler.RunJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudscheduler.RunJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_run_job(self, response: job.Job) -> job.Job: + """Post-rpc interceptor for run_job + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_update_job( + self, + request: cloudscheduler.UpdateJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudscheduler.UpdateJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_update_job(self, response: gcs_job.Job) -> gcs_job.Job: + """Post-rpc interceptor for update_job + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudSchedulerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudSchedulerRestInterceptor + + +class CloudSchedulerRestTransport(CloudSchedulerTransport): + """REST backend transport for CloudScheduler. + + The Cloud Scheduler API allows external entities to reliably + schedule asynchronous jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "cloudscheduler.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudSchedulerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudSchedulerRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateJob(CloudSchedulerRestStub): + def __hash__(self): + return hash("CreateJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.CreateJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_job.Job: + r"""Call the create job method over HTTP. + + Args: + request (~.cloudscheduler.CreateJobRequest): + The request object. Request message for + [CreateJob][google.cloud.scheduler.v1.CloudScheduler.CreateJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcs_job.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/jobs", + "body": "job", + }, + ] + request, metadata = self._interceptor.pre_create_job(request, metadata) + pb_request = cloudscheduler.CreateJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcs_job.Job() + pb_resp = gcs_job.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_job(resp) + return resp + + class _DeleteJob(CloudSchedulerRestStub): + def __hash__(self): + return hash("DeleteJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.DeleteJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete job method over HTTP. + + Args: + request (~.cloudscheduler.DeleteJobRequest): + The request object. Request message for deleting a job using + [DeleteJob][google.cloud.scheduler.v1.CloudScheduler.DeleteJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/jobs/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_job(request, metadata) + pb_request = cloudscheduler.DeleteJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetJob(CloudSchedulerRestStub): + def __hash__(self): + return hash("GetJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.GetJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Call the get job method over HTTP. + + Args: + request (~.cloudscheduler.GetJobRequest): + The request object. Request message for + [GetJob][google.cloud.scheduler.v1.CloudScheduler.GetJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.job.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/jobs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_job(request, metadata) + pb_request = cloudscheduler.GetJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = job.Job() + pb_resp = job.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job(resp) + return resp + + class _ListJobs(CloudSchedulerRestStub): + def __hash__(self): + return hash("ListJobs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.ListJobsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudscheduler.ListJobsResponse: + r"""Call the list jobs method over HTTP. + + Args: + request (~.cloudscheduler.ListJobsRequest): + The request object. Request message for listing jobs using + [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudscheduler.ListJobsResponse: + Response message for listing jobs using + [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/jobs", + }, + ] + request, metadata = self._interceptor.pre_list_jobs(request, metadata) + pb_request = cloudscheduler.ListJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudscheduler.ListJobsResponse() + pb_resp = cloudscheduler.ListJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_jobs(resp) + return resp + + class _PauseJob(CloudSchedulerRestStub): + def __hash__(self): + return hash("PauseJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.PauseJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Call the pause job method over HTTP. + + Args: + request (~.cloudscheduler.PauseJobRequest): + The request object. Request message for + [PauseJob][google.cloud.scheduler.v1.CloudScheduler.PauseJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.job.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/jobs/*}:pause", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_pause_job(request, metadata) + pb_request = cloudscheduler.PauseJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = job.Job() + pb_resp = job.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_pause_job(resp) + return resp + + class _ResumeJob(CloudSchedulerRestStub): + def __hash__(self): + return hash("ResumeJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.ResumeJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Call the resume job method over HTTP. + + Args: + request (~.cloudscheduler.ResumeJobRequest): + The request object. Request message for + [ResumeJob][google.cloud.scheduler.v1.CloudScheduler.ResumeJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.job.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/jobs/*}:resume", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_resume_job(request, metadata) + pb_request = cloudscheduler.ResumeJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = job.Job() + pb_resp = job.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resume_job(resp) + return resp + + class _RunJob(CloudSchedulerRestStub): + def __hash__(self): + return hash("RunJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.RunJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Call the run job method over HTTP. + + Args: + request (~.cloudscheduler.RunJobRequest): + The request object. Request message for forcing a job to run now using + [RunJob][google.cloud.scheduler.v1.CloudScheduler.RunJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.job.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/jobs/*}:run", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_run_job(request, metadata) + pb_request = cloudscheduler.RunJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = job.Job() + pb_resp = job.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_job(resp) + return resp + + class _UpdateJob(CloudSchedulerRestStub): + def __hash__(self): + return hash("UpdateJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.UpdateJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_job.Job: + r"""Call the update job method over HTTP. + + Args: + request (~.cloudscheduler.UpdateJobRequest): + The request object. Request message for + [UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcs_job.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{job.name=projects/*/locations/*/jobs/*}", + "body": "job", + }, + ] + request, metadata = self._interceptor.pre_update_job(request, metadata) + pb_request = cloudscheduler.UpdateJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcs_job.Job() + pb_resp = gcs_job.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_job(resp) + return resp + + @property + def create_job(self) -> Callable[[cloudscheduler.CreateJobRequest], gcs_job.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_job( + self, + ) -> Callable[[cloudscheduler.DeleteJobRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job(self) -> Callable[[cloudscheduler.GetJobRequest], job.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_jobs( + self, + ) -> Callable[[cloudscheduler.ListJobsRequest], cloudscheduler.ListJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def pause_job(self) -> Callable[[cloudscheduler.PauseJobRequest], job.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PauseJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def resume_job(self) -> Callable[[cloudscheduler.ResumeJobRequest], job.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ResumeJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_job(self) -> Callable[[cloudscheduler.RunJobRequest], job.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_job(self) -> Callable[[cloudscheduler.UpdateJobRequest], gcs_job.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(CloudSchedulerRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(CloudSchedulerRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CloudSchedulerRestTransport",) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/types/__init__.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/types/__init__.py new file mode 100644 index 000000000000..26a2dd654c46 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/types/__init__.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloudscheduler import ( + CreateJobRequest, + DeleteJobRequest, + GetJobRequest, + ListJobsRequest, + ListJobsResponse, + PauseJobRequest, + ResumeJobRequest, + RunJobRequest, + UpdateJobRequest, +) +from .job import Job, RetryConfig +from .target import ( + AppEngineHttpTarget, + AppEngineRouting, + HttpMethod, + HttpTarget, + OAuthToken, + OidcToken, + PubsubTarget, +) + +__all__ = ( + "CreateJobRequest", + "DeleteJobRequest", + "GetJobRequest", + "ListJobsRequest", + "ListJobsResponse", + "PauseJobRequest", + "ResumeJobRequest", + "RunJobRequest", + "UpdateJobRequest", + "Job", + "RetryConfig", + "AppEngineHttpTarget", + "AppEngineRouting", + "HttpTarget", + "OAuthToken", + "OidcToken", + "PubsubTarget", + "HttpMethod", +) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/types/cloudscheduler.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/types/cloudscheduler.py new file mode 100644 index 000000000000..bb38699f57e9 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/types/cloudscheduler.py @@ -0,0 +1,259 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.scheduler_v1.types import job as gcs_job + +__protobuf__ = proto.module( + package="google.cloud.scheduler.v1", + manifest={ + "ListJobsRequest", + "ListJobsResponse", + "GetJobRequest", + "CreateJobRequest", + "UpdateJobRequest", + "DeleteJobRequest", + "PauseJobRequest", + "ResumeJobRequest", + "RunJobRequest", + }, +) + + +class ListJobsRequest(proto.Message): + r"""Request message for listing jobs using + [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs]. + + Attributes: + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID``. + page_size (int): + Requested page size. + + The maximum page size is 500. If unspecified, the page size + will be the maximum. Fewer jobs than requested might be + returned, even if more jobs exist; use next_page_token to + determine if more jobs exist. + page_token (str): + A token identifying a page of results the server will + return. To request the first page results, page_token must + be empty. To request the next page of results, page_token + must be the value of + [next_page_token][google.cloud.scheduler.v1.ListJobsResponse.next_page_token] + returned from the previous call to + [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs]. + It is an error to switch the value of + [filter][google.cloud.scheduler.v1.ListJobsRequest.filter] + or + [order_by][google.cloud.scheduler.v1.ListJobsRequest.order_by] + while iterating through pages. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=5, + ) + page_token: str = proto.Field( + proto.STRING, + number=6, + ) + + +class ListJobsResponse(proto.Message): + r"""Response message for listing jobs using + [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs]. + + Attributes: + jobs (MutableSequence[google.cloud.scheduler_v1.types.Job]): + The list of jobs. + next_page_token (str): + A token to retrieve next page of results. Pass this value in + the + [page_token][google.cloud.scheduler.v1.ListJobsRequest.page_token] + field in the subsequent call to + [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs] + to retrieve the next page of results. If this is empty it + indicates that there are no more results through which to + paginate. + + The page token is valid for only 2 hours. + """ + + @property + def raw_page(self): + return self + + jobs: MutableSequence[gcs_job.Job] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcs_job.Job, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetJobRequest(proto.Message): + r"""Request message for + [GetJob][google.cloud.scheduler.v1.CloudScheduler.GetJob]. + + Attributes: + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateJobRequest(proto.Message): + r"""Request message for + [CreateJob][google.cloud.scheduler.v1.CloudScheduler.CreateJob]. + + Attributes: + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID``. + job (google.cloud.scheduler_v1.types.Job): + Required. The job to add. The user can optionally specify a + name for the job in + [name][google.cloud.scheduler.v1.Job.name]. + [name][google.cloud.scheduler.v1.Job.name] cannot be the + same as an existing job. If a name is not specified then the + system will generate a random unique name that will be + returned ([name][google.cloud.scheduler.v1.Job.name]) in the + response. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + job: gcs_job.Job = proto.Field( + proto.MESSAGE, + number=2, + message=gcs_job.Job, + ) + + +class UpdateJobRequest(proto.Message): + r"""Request message for + [UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob]. + + Attributes: + job (google.cloud.scheduler_v1.types.Job): + Required. The new job properties. + [name][google.cloud.scheduler.v1.Job.name] must be + specified. + + Output only fields cannot be modified using UpdateJob. Any + value specified for an output only field will be ignored. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields of the + job are being updated. + """ + + job: gcs_job.Job = proto.Field( + proto.MESSAGE, + number=1, + message=gcs_job.Job, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteJobRequest(proto.Message): + r"""Request message for deleting a job using + [DeleteJob][google.cloud.scheduler.v1.CloudScheduler.DeleteJob]. + + Attributes: + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PauseJobRequest(proto.Message): + r"""Request message for + [PauseJob][google.cloud.scheduler.v1.CloudScheduler.PauseJob]. + + Attributes: + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ResumeJobRequest(proto.Message): + r"""Request message for + [ResumeJob][google.cloud.scheduler.v1.CloudScheduler.ResumeJob]. + + Attributes: + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RunJobRequest(proto.Message): + r"""Request message for forcing a job to run now using + [RunJob][google.cloud.scheduler.v1.CloudScheduler.RunJob]. + + Attributes: + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/types/job.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/types/job.py new file mode 100644 index 000000000000..fe0c2af8d0d4 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/types/job.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.scheduler_v1.types import target + +__protobuf__ = proto.module( + package="google.cloud.scheduler.v1", + manifest={ + "Job", + "RetryConfig", + }, +) + + +class Job(proto.Message): + r"""Configuration for a job. + The maximum allowed size for a job is 1MB. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Optionally caller-specified in + [CreateJob][google.cloud.scheduler.v1.CloudScheduler.CreateJob], + after which it becomes output only. + + The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the job's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``JOB_ID`` can contain only letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), or underscores (_). The maximum + length is 500 characters. + description (str): + Optionally caller-specified in + [CreateJob][google.cloud.scheduler.v1.CloudScheduler.CreateJob] + or + [UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob]. + + A human-readable description for the job. This string must + not contain more than 500 characters. + pubsub_target (google.cloud.scheduler_v1.types.PubsubTarget): + Pub/Sub target. + + This field is a member of `oneof`_ ``target``. + app_engine_http_target (google.cloud.scheduler_v1.types.AppEngineHttpTarget): + App Engine HTTP target. + + This field is a member of `oneof`_ ``target``. + http_target (google.cloud.scheduler_v1.types.HttpTarget): + HTTP target. + + This field is a member of `oneof`_ ``target``. + schedule (str): + Required, except when used with + [UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob]. + + Describes the schedule on which the job will be executed. + + The schedule can be either of the following types: + + - `Crontab `__ + - English-like + `schedule `__ + + As a general rule, execution ``n + 1`` of a job will not + begin until execution ``n`` has finished. Cloud Scheduler + will never allow two simultaneously outstanding executions. + For example, this implies that if the ``n+1``\ th execution + is scheduled to run at 16:00 but the ``n``\ th execution + takes until 16:15, the ``n+1``\ th execution will not start + until ``16:15``. A scheduled start time will be delayed if + the previous execution has not ended when its scheduled time + occurs. + + If + [retry_count][google.cloud.scheduler.v1.RetryConfig.retry_count] + > 0 and a job attempt fails, the job will be tried a total + of + [retry_count][google.cloud.scheduler.v1.RetryConfig.retry_count] + times, with exponential backoff, until the next scheduled + start time. + time_zone (str): + Specifies the time zone to be used in interpreting + [schedule][google.cloud.scheduler.v1.Job.schedule]. The + value of this field must be a time zone name from the `tz + database `__. + + Note that some time zones include a provision for daylight + savings time. The rules for daylight saving time are + determined by the chosen tz. For UTC use the string "utc". + If a time zone is not specified, the default will be in UTC + (also known as GMT). + user_update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time of the job. + state (google.cloud.scheduler_v1.types.Job.State): + Output only. State of the job. + status (google.rpc.status_pb2.Status): + Output only. The response from the target for + the last attempted execution. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The next time the job is + scheduled. Note that this may be a retry of a + previously failed attempt or the next execution + time according to the schedule. + last_attempt_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the last job attempt + started. + retry_config (google.cloud.scheduler_v1.types.RetryConfig): + Settings that determine the retry behavior. + attempt_deadline (google.protobuf.duration_pb2.Duration): + The deadline for job attempts. If the request handler does + not respond by this deadline then the request is cancelled + and the attempt is marked as a ``DEADLINE_EXCEEDED`` + failure. The failed attempt can be viewed in execution logs. + Cloud Scheduler will retry the job according to the + [RetryConfig][google.cloud.scheduler.v1.RetryConfig]. + + The default and the allowed values depend on the type of + target: + + - For [HTTP + targets][google.cloud.scheduler.v1.Job.http_target], the + default is 3 minutes. The deadline must be in the + interval [15 seconds, 30 minutes]. + + - For [App Engine HTTP + targets][google.cloud.scheduler.v1.Job.app_engine_http_target], + 0 indicates that the request has the default deadline. + The default deadline depends on the scaling type of the + service: 10 minutes for standard apps with automatic + scaling, 24 hours for standard apps with manual and basic + scaling, and 60 minutes for flex apps. If the request + deadline is set, it must be in the interval [15 seconds, + 24 hours 15 seconds]. + + - For [Pub/Sub + targets][google.cloud.scheduler.v1.Job.pubsub_target], + this field is ignored. + """ + + class State(proto.Enum): + r"""State of the job. + + Values: + STATE_UNSPECIFIED (0): + Unspecified state. + ENABLED (1): + The job is executing normally. + PAUSED (2): + The job is paused by the user. It will not execute. A user + can intentionally pause the job using + [PauseJobRequest][google.cloud.scheduler.v1.PauseJobRequest]. + DISABLED (3): + The job is disabled by the system due to + error. The user cannot directly set a job to be + disabled. + UPDATE_FAILED (4): + The job state resulting from a failed + [CloudScheduler.UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob] + operation. To recover a job from this state, retry + [CloudScheduler.UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob] + until a successful response is received. + """ + STATE_UNSPECIFIED = 0 + ENABLED = 1 + PAUSED = 2 + DISABLED = 3 + UPDATE_FAILED = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + pubsub_target: target.PubsubTarget = proto.Field( + proto.MESSAGE, + number=4, + oneof="target", + message=target.PubsubTarget, + ) + app_engine_http_target: target.AppEngineHttpTarget = proto.Field( + proto.MESSAGE, + number=5, + oneof="target", + message=target.AppEngineHttpTarget, + ) + http_target: target.HttpTarget = proto.Field( + proto.MESSAGE, + number=6, + oneof="target", + message=target.HttpTarget, + ) + schedule: str = proto.Field( + proto.STRING, + number=20, + ) + time_zone: str = proto.Field( + proto.STRING, + number=21, + ) + user_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=10, + enum=State, + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=11, + message=status_pb2.Status, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=17, + message=timestamp_pb2.Timestamp, + ) + last_attempt_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=18, + message=timestamp_pb2.Timestamp, + ) + retry_config: "RetryConfig" = proto.Field( + proto.MESSAGE, + number=19, + message="RetryConfig", + ) + attempt_deadline: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=22, + message=duration_pb2.Duration, + ) + + +class RetryConfig(proto.Message): + r"""Settings that determine the retry behavior. + + By default, if a job does not complete successfully (meaning that an + acknowledgement is not received from the handler, then it will be + retried with exponential backoff according to the settings in + [RetryConfig][google.cloud.scheduler.v1.RetryConfig]. + + Attributes: + retry_count (int): + The number of attempts that the system will make to run a + job using the exponential backoff procedure described by + [max_doublings][google.cloud.scheduler.v1.RetryConfig.max_doublings]. + + The default value of retry_count is zero. + + If retry_count is zero, a job attempt will *not* be retried + if it fails. Instead the Cloud Scheduler system will wait + for the next scheduled execution time. + + If retry_count is set to a non-zero number then Cloud + Scheduler will retry failed attempts, using exponential + backoff, retry_count times, or until the next scheduled + execution time, whichever comes first. + + Values greater than 5 and negative values are not allowed. + max_retry_duration (google.protobuf.duration_pb2.Duration): + The time limit for retrying a failed job, measured from time + when an execution was first attempted. If specified with + [retry_count][google.cloud.scheduler.v1.RetryConfig.retry_count], + the job will be retried until both limits are reached. + + The default value for max_retry_duration is zero, which + means retry duration is unlimited. + min_backoff_duration (google.protobuf.duration_pb2.Duration): + The minimum amount of time to wait before + retrying a job after it fails. + + The default value of this field is 5 seconds. + max_backoff_duration (google.protobuf.duration_pb2.Duration): + The maximum amount of time to wait before + retrying a job after it fails. + + The default value of this field is 1 hour. + max_doublings (int): + The time between retries will double ``max_doublings`` + times. + + A job's retry interval starts at + [min_backoff_duration][google.cloud.scheduler.v1.RetryConfig.min_backoff_duration], + then doubles ``max_doublings`` times, then increases + linearly, and finally retries at intervals of + [max_backoff_duration][google.cloud.scheduler.v1.RetryConfig.max_backoff_duration] + up to + [retry_count][google.cloud.scheduler.v1.RetryConfig.retry_count] + times. + + For example, if + [min_backoff_duration][google.cloud.scheduler.v1.RetryConfig.min_backoff_duration] + is 10s, + [max_backoff_duration][google.cloud.scheduler.v1.RetryConfig.max_backoff_duration] + is 300s, and ``max_doublings`` is 3, then the a job will + first be retried in 10s. The retry interval will double + three times, and then increase linearly by 2^3 \* 10s. + Finally, the job will retry at intervals of + [max_backoff_duration][google.cloud.scheduler.v1.RetryConfig.max_backoff_duration] + until the job has been attempted + [retry_count][google.cloud.scheduler.v1.RetryConfig.retry_count] + times. Thus, the requests will retry at 10s, 20s, 40s, 80s, + 160s, 240s, 300s, 300s, .... + + The default value of this field is 5. + """ + + retry_count: int = proto.Field( + proto.INT32, + number=1, + ) + max_retry_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + min_backoff_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + max_backoff_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + max_doublings: int = proto.Field( + proto.INT32, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1/types/target.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/types/target.py new file mode 100644 index 000000000000..0da4df8c9c85 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1/types/target.py @@ -0,0 +1,527 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.scheduler.v1", + manifest={ + "HttpMethod", + "HttpTarget", + "AppEngineHttpTarget", + "PubsubTarget", + "AppEngineRouting", + "OAuthToken", + "OidcToken", + }, +) + + +class HttpMethod(proto.Enum): + r"""The HTTP method used to execute the job. + + Values: + HTTP_METHOD_UNSPECIFIED (0): + HTTP method unspecified. Defaults to POST. + POST (1): + HTTP POST + GET (2): + HTTP GET + HEAD (3): + HTTP HEAD + PUT (4): + HTTP PUT + DELETE (5): + HTTP DELETE + PATCH (6): + HTTP PATCH + OPTIONS (7): + HTTP OPTIONS + """ + HTTP_METHOD_UNSPECIFIED = 0 + POST = 1 + GET = 2 + HEAD = 3 + PUT = 4 + DELETE = 5 + PATCH = 6 + OPTIONS = 7 + + +class HttpTarget(proto.Message): + r"""Http target. The job will be pushed to the job handler by means of + an HTTP request via an + [http_method][google.cloud.scheduler.v1.HttpTarget.http_method] such + as HTTP POST, HTTP GET, etc. The job is acknowledged by means of an + HTTP response code in the range [200 - 299]. A failure to receive a + response constitutes a failed execution. For a redirected request, + the response returned by the redirected request is considered. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + Required. The full URI path that the request will be sent + to. This string must begin with either "http://" or + "https://". Some examples of valid values for + [uri][google.cloud.scheduler.v1.HttpTarget.uri] are: + ``http://acme.com`` and ``https://acme.com/sales:8080``. + Cloud Scheduler will encode some characters for safety and + compatibility. The maximum allowed URL length is 2083 + characters after encoding. + http_method (google.cloud.scheduler_v1.types.HttpMethod): + Which HTTP method to use for the request. + headers (MutableMapping[str, str]): + The user can specify HTTP request headers to send with the + job's HTTP request. This map contains the header field names + and values. Repeated headers are not supported, but a header + value can contain commas. These headers represent a subset + of the headers that will accompany the job's HTTP request. + Some HTTP request headers will be ignored or replaced. A + partial list of headers that will be ignored or replaced is + below: + + - Host: This will be computed by Cloud Scheduler and + derived from + [uri][google.cloud.scheduler.v1.HttpTarget.uri]. + + - ``Content-Length``: This will be computed by Cloud + Scheduler. + - ``User-Agent``: This will be set to + ``"Google-Cloud-Scheduler"``. + - ``X-Google-*``: Google internal use only. + - ``X-AppEngine-*``: Google internal use only. + - ``X-CloudScheduler``: This header will be set to true. + - ``X-CloudScheduler-JobName``: This header will contain + the job name. + - ``X-CloudScheduler-ScheduleTime``: For Cloud Scheduler + jobs specified in the unix-cron format, this header will + contain the job schedule time in RFC3339 UTC "Zulu" + format. + + The total size of headers must be less than 80KB. + body (bytes): + HTTP request body. A request body is allowed only if the + HTTP method is POST, PUT, or PATCH. It is an error to set + body on a job with an incompatible + [HttpMethod][google.cloud.scheduler.v1.HttpMethod]. + oauth_token (google.cloud.scheduler_v1.types.OAuthToken): + If specified, an `OAuth + token `__ + will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization should generally only be used + when calling Google APIs hosted on \*.googleapis.com. + + This field is a member of `oneof`_ ``authorization_header``. + oidc_token (google.cloud.scheduler_v1.types.OidcToken): + If specified, an + `OIDC `__ + token will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend + to validate the token yourself. + + This field is a member of `oneof`_ ``authorization_header``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + http_method: "HttpMethod" = proto.Field( + proto.ENUM, + number=2, + enum="HttpMethod", + ) + headers: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + body: bytes = proto.Field( + proto.BYTES, + number=4, + ) + oauth_token: "OAuthToken" = proto.Field( + proto.MESSAGE, + number=5, + oneof="authorization_header", + message="OAuthToken", + ) + oidc_token: "OidcToken" = proto.Field( + proto.MESSAGE, + number=6, + oneof="authorization_header", + message="OidcToken", + ) + + +class AppEngineHttpTarget(proto.Message): + r"""App Engine target. The job will be pushed to a job handler by means + of an HTTP request via an + [http_method][google.cloud.scheduler.v1.AppEngineHttpTarget.http_method] + such as HTTP POST, HTTP GET, etc. The job is acknowledged by means + of an HTTP response code in the range [200 - 299]. Error 503 is + considered an App Engine system error instead of an application + error. Requests returning error 503 will be retried regardless of + retry configuration and not counted against retry counts. Any other + response code, or a failure to receive a response before the + deadline, constitutes a failed attempt. + + Attributes: + http_method (google.cloud.scheduler_v1.types.HttpMethod): + The HTTP method to use for the request. PATCH + and OPTIONS are not permitted. + app_engine_routing (google.cloud.scheduler_v1.types.AppEngineRouting): + App Engine Routing setting for the job. + relative_uri (str): + The relative URI. + + The relative URL must begin with "/" and must be a valid + HTTP relative URL. It can contain a path, query string + arguments, and ``#`` fragments. If the relative URL is + empty, then the root path "/" will be used. No spaces are + allowed, and the maximum length allowed is 2083 characters. + headers (MutableMapping[str, str]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the job is created. + + Cloud Scheduler sets some headers to default values: + + - ``User-Agent``: By default, this header is + ``"AppEngine-Google; (+http://code.google.com/appengine)"``. + This header can be modified, but Cloud Scheduler will + append + ``"AppEngine-Google; (+http://code.google.com/appengine)"`` + to the modified ``User-Agent``. + - ``X-CloudScheduler``: This header will be set to true. + - ``X-CloudScheduler-JobName``: This header will contain + the job name. + - ``X-CloudScheduler-ScheduleTime``: For Cloud Scheduler + jobs specified in the unix-cron format, this header will + contain the job schedule time in RFC3339 UTC "Zulu" + format. + + If the job has an + [body][google.cloud.scheduler.v1.AppEngineHttpTarget.body], + Cloud Scheduler sets the following headers: + + - ``Content-Type``: By default, the ``Content-Type`` header + is set to ``"application/octet-stream"``. The default can + be overridden by explictly setting ``Content-Type`` to a + particular media type when the job is created. For + example, ``Content-Type`` can be set to + ``"application/json"``. + - ``Content-Length``: This is computed by Cloud Scheduler. + This value is output only. It cannot be changed. + + The headers below are output only. They cannot be set or + overridden: + + - ``X-Google-*``: For Google internal use only. + - ``X-AppEngine-*``: For Google internal use only. + + In addition, some App Engine headers, which contain + job-specific information, are also be sent to the job + handler. + body (bytes): + Body. + + HTTP request body. A request body is allowed only if the + HTTP method is POST or PUT. It will result in invalid + argument error to set a body on a job with an incompatible + [HttpMethod][google.cloud.scheduler.v1.HttpMethod]. + """ + + http_method: "HttpMethod" = proto.Field( + proto.ENUM, + number=1, + enum="HttpMethod", + ) + app_engine_routing: "AppEngineRouting" = proto.Field( + proto.MESSAGE, + number=2, + message="AppEngineRouting", + ) + relative_uri: str = proto.Field( + proto.STRING, + number=3, + ) + headers: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + body: bytes = proto.Field( + proto.BYTES, + number=5, + ) + + +class PubsubTarget(proto.Message): + r"""Pub/Sub target. The job will be delivered by publishing a + message to the given Pub/Sub topic. + + Attributes: + topic_name (str): + Required. The name of the Cloud Pub/Sub topic to which + messages will be published when a job is delivered. The + topic name must be in the same format as required by + Pub/Sub's + `PublishRequest.name `__, + for example ``projects/PROJECT_ID/topics/TOPIC_ID``. + + The topic must be in the same project as the Cloud Scheduler + job. + data (bytes): + The message payload for PubsubMessage. + + Pubsub message must contain either non-empty + data, or at least one attribute. + attributes (MutableMapping[str, str]): + Attributes for PubsubMessage. + + Pubsub message must contain either non-empty + data, or at least one attribute. + """ + + topic_name: str = proto.Field( + proto.STRING, + number=1, + ) + data: bytes = proto.Field( + proto.BYTES, + number=3, + ) + attributes: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + +class AppEngineRouting(proto.Message): + r"""App Engine Routing. + + For more information about services, versions, and instances see `An + Overview of App + Engine `__, + `Microservices Architecture on Google App + Engine `__, + `App Engine Standard request + routing `__, + and `App Engine Flex request + routing `__. + + Attributes: + service (str): + App service. + + By default, the job is sent to the service which + is the default service when the job is + attempted. + version (str): + App version. + + By default, the job is sent to the version which + is the default version when the job is + attempted. + instance (str): + App instance. + + By default, the job is sent to an instance which is + available when the job is attempted. + + Requests can only be sent to a specific instance if `manual + scaling is used in App Engine + Standard `__. + App Engine Flex does not support instances. For more + information, see `App Engine Standard request + routing `__ + and `App Engine Flex request + routing `__. + host (str): + Output only. The host that the job is sent to. + + For more information about how App Engine requests are + routed, see + `here `__. + + The host is constructed as: + + - ``host = [application_domain_name]``\ + ``| [service] + '.' + [application_domain_name]``\ + ``| [version] + '.' + [application_domain_name]``\ + ``| [version_dot_service]+ '.' + [application_domain_name]``\ + ``| [instance] + '.' + [application_domain_name]``\ + ``| [instance_dot_service] + '.' + [application_domain_name]``\ + ``| [instance_dot_version] + '.' + [application_domain_name]``\ + ``| [instance_dot_version_dot_service] + '.' + [application_domain_name]`` + + - ``application_domain_name`` = The domain name of the app, + for example .appspot.com, which is associated with the + job's project ID. + + - ``service =`` + [service][google.cloud.scheduler.v1.AppEngineRouting.service] + + - ``version =`` + [version][google.cloud.scheduler.v1.AppEngineRouting.version] + + - ``version_dot_service =`` + [version][google.cloud.scheduler.v1.AppEngineRouting.version] + ``+ '.' +`` + [service][google.cloud.scheduler.v1.AppEngineRouting.service] + + - ``instance =`` + [instance][google.cloud.scheduler.v1.AppEngineRouting.instance] + + - ``instance_dot_service =`` + [instance][google.cloud.scheduler.v1.AppEngineRouting.instance] + ``+ '.' +`` + [service][google.cloud.scheduler.v1.AppEngineRouting.service] + + - ``instance_dot_version =`` + [instance][google.cloud.scheduler.v1.AppEngineRouting.instance] + ``+ '.' +`` + [version][google.cloud.scheduler.v1.AppEngineRouting.version] + + - ``instance_dot_version_dot_service =`` + [instance][google.cloud.scheduler.v1.AppEngineRouting.instance] + ``+ '.' +`` + [version][google.cloud.scheduler.v1.AppEngineRouting.version] + ``+ '.' +`` + [service][google.cloud.scheduler.v1.AppEngineRouting.service] + + If + [service][google.cloud.scheduler.v1.AppEngineRouting.service] + is empty, then the job will be sent to the service which is + the default service when the job is attempted. + + If + [version][google.cloud.scheduler.v1.AppEngineRouting.version] + is empty, then the job will be sent to the version which is + the default version when the job is attempted. + + If + [instance][google.cloud.scheduler.v1.AppEngineRouting.instance] + is empty, then the job will be sent to an instance which is + available when the job is attempted. + + If + [service][google.cloud.scheduler.v1.AppEngineRouting.service], + [version][google.cloud.scheduler.v1.AppEngineRouting.version], + or + [instance][google.cloud.scheduler.v1.AppEngineRouting.instance] + is invalid, then the job will be sent to the default version + of the default service when the job is attempted. + """ + + service: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + instance: str = proto.Field( + proto.STRING, + number=3, + ) + host: str = proto.Field( + proto.STRING, + number=4, + ) + + +class OAuthToken(proto.Message): + r"""Contains information needed for generating an `OAuth + token `__. + This type of authorization should generally only be used when + calling Google APIs hosted on \*.googleapis.com. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OAuth token. The service account + must be within the same project as the job. The caller must + have iam.serviceAccounts.actAs permission for the service + account. + scope (str): + OAuth scope to be used for generating OAuth + access token. If not specified, + "https://www.googleapis.com/auth/cloud-platform" + will be used. + """ + + service_account_email: str = proto.Field( + proto.STRING, + number=1, + ) + scope: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OidcToken(proto.Message): + r"""Contains information needed for generating an `OpenID Connect + token `__. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the + token yourself. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OIDC token. The service account + must be within the same project as the job. The caller must + have iam.serviceAccounts.actAs permission for the service + account. + audience (str): + Audience to be used when generating OIDC + token. If not specified, the URI specified in + target will be used. + """ + + service_account_email: str = proto.Field( + proto.STRING, + number=1, + ) + audience: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/__init__.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/__init__.py new file mode 100644 index 000000000000..19ed06b27217 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/__init__.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.scheduler_v1beta1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.cloud_scheduler import CloudSchedulerAsyncClient, CloudSchedulerClient +from .types.cloudscheduler import ( + CreateJobRequest, + DeleteJobRequest, + GetJobRequest, + ListJobsRequest, + ListJobsResponse, + PauseJobRequest, + ResumeJobRequest, + RunJobRequest, + UpdateJobRequest, +) +from .types.job import Job, RetryConfig +from .types.target import ( + AppEngineHttpTarget, + AppEngineRouting, + HttpMethod, + HttpTarget, + OAuthToken, + OidcToken, + PubsubTarget, +) + +__all__ = ( + "CloudSchedulerAsyncClient", + "AppEngineHttpTarget", + "AppEngineRouting", + "CloudSchedulerClient", + "CreateJobRequest", + "DeleteJobRequest", + "GetJobRequest", + "HttpMethod", + "HttpTarget", + "Job", + "ListJobsRequest", + "ListJobsResponse", + "OAuthToken", + "OidcToken", + "PauseJobRequest", + "PubsubTarget", + "ResumeJobRequest", + "RetryConfig", + "RunJobRequest", + "UpdateJobRequest", +) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/gapic_metadata.json b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/gapic_metadata.json new file mode 100644 index 000000000000..96ff086c82eb --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/gapic_metadata.json @@ -0,0 +1,148 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.scheduler_v1beta1", + "protoPackage": "google.cloud.scheduler.v1beta1", + "schema": "1.0", + "services": { + "CloudScheduler": { + "clients": { + "grpc": { + "libraryClient": "CloudSchedulerClient", + "rpcs": { + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "PauseJob": { + "methods": [ + "pause_job" + ] + }, + "ResumeJob": { + "methods": [ + "resume_job" + ] + }, + "RunJob": { + "methods": [ + "run_job" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudSchedulerAsyncClient", + "rpcs": { + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "PauseJob": { + "methods": [ + "pause_job" + ] + }, + "ResumeJob": { + "methods": [ + "resume_job" + ] + }, + "RunJob": { + "methods": [ + "run_job" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + }, + "rest": { + "libraryClient": "CloudSchedulerClient", + "rpcs": { + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "PauseJob": { + "methods": [ + "pause_job" + ] + }, + "ResumeJob": { + "methods": [ + "resume_job" + ] + }, + "RunJob": { + "methods": [ + "run_job" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/gapic_version.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/gapic_version.py new file mode 100644 index 000000000000..9ac1d4a82044 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.11.1" # {x-release-please-version} diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/py.typed b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/py.typed new file mode 100644 index 000000000000..ca4d524bcfe7 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-scheduler package uses inline types. diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/__init__.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/__init__.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/__init__.py new file mode 100644 index 000000000000..7000a11dd2bb --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import CloudSchedulerAsyncClient +from .client import CloudSchedulerClient + +__all__ = ( + "CloudSchedulerClient", + "CloudSchedulerAsyncClient", +) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/async_client.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/async_client.py new file mode 100644 index 000000000000..ede6f7fbeddf --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/async_client.py @@ -0,0 +1,1289 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.scheduler_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.cloud.scheduler_v1beta1.services.cloud_scheduler import pagers +from google.cloud.scheduler_v1beta1.types import cloudscheduler +from google.cloud.scheduler_v1beta1.types import job +from google.cloud.scheduler_v1beta1.types import job as gcs_job +from google.cloud.scheduler_v1beta1.types import target + +from .client import CloudSchedulerClient +from .transports.base import DEFAULT_CLIENT_INFO, CloudSchedulerTransport +from .transports.grpc_asyncio import CloudSchedulerGrpcAsyncIOTransport + + +class CloudSchedulerAsyncClient: + """The Cloud Scheduler API allows external entities to reliably + schedule asynchronous jobs. + """ + + _client: CloudSchedulerClient + + DEFAULT_ENDPOINT = CloudSchedulerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudSchedulerClient.DEFAULT_MTLS_ENDPOINT + + job_path = staticmethod(CloudSchedulerClient.job_path) + parse_job_path = staticmethod(CloudSchedulerClient.parse_job_path) + topic_path = staticmethod(CloudSchedulerClient.topic_path) + parse_topic_path = staticmethod(CloudSchedulerClient.parse_topic_path) + common_billing_account_path = staticmethod( + CloudSchedulerClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudSchedulerClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(CloudSchedulerClient.common_folder_path) + parse_common_folder_path = staticmethod( + CloudSchedulerClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + CloudSchedulerClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + CloudSchedulerClient.parse_common_organization_path + ) + common_project_path = staticmethod(CloudSchedulerClient.common_project_path) + parse_common_project_path = staticmethod( + CloudSchedulerClient.parse_common_project_path + ) + common_location_path = staticmethod(CloudSchedulerClient.common_location_path) + parse_common_location_path = staticmethod( + CloudSchedulerClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudSchedulerAsyncClient: The constructed client. + """ + return CloudSchedulerClient.from_service_account_info.__func__(CloudSchedulerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudSchedulerAsyncClient: The constructed client. + """ + return CloudSchedulerClient.from_service_account_file.__func__(CloudSchedulerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CloudSchedulerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CloudSchedulerTransport: + """Returns the transport used by the client instance. + + Returns: + CloudSchedulerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(CloudSchedulerClient).get_transport_class, type(CloudSchedulerClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CloudSchedulerTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud scheduler client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudSchedulerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CloudSchedulerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_jobs( + self, + request: Optional[Union[cloudscheduler.ListJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsAsyncPager: + r"""Lists jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + async def sample_list_jobs(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.scheduler_v1beta1.types.ListJobsRequest, dict]]): + The request object. Request message for listing jobs using + [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1beta1.services.cloud_scheduler.pagers.ListJobsAsyncPager: + Response message for listing jobs using + [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.ListJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_jobs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job( + self, + request: Optional[Union[cloudscheduler.GetJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Gets a job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + async def sample_get_job(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.scheduler_v1beta1.types.GetJobRequest, dict]]): + The request object. Request message for + [GetJob][google.cloud.scheduler.v1beta1.CloudScheduler.GetJob]. + name (:class:`str`): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1beta1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.GetJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_job( + self, + request: Optional[Union[cloudscheduler.CreateJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + job: Optional[gcs_job.Job] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_job.Job: + r"""Creates a job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + async def sample_create_job(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.CreateJobRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.scheduler_v1beta1.types.CreateJobRequest, dict]]): + The request object. Request message for + [CreateJob][google.cloud.scheduler.v1beta1.CloudScheduler.CreateJob]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (:class:`google.cloud.scheduler_v1beta1.types.Job`): + Required. The job to add. The user can optionally + specify a name for the job in + [name][google.cloud.scheduler.v1beta1.Job.name]. + [name][google.cloud.scheduler.v1beta1.Job.name] cannot + be the same as an existing job. If a name is not + specified then the system will generate a random unique + name that will be returned + ([name][google.cloud.scheduler.v1beta1.Job.name]) in the + response. + + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1beta1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.CreateJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_job( + self, + request: Optional[Union[cloudscheduler.UpdateJobRequest, dict]] = None, + *, + job: Optional[gcs_job.Job] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_job.Job: + r"""Updates a job. + + If successful, the updated + [Job][google.cloud.scheduler.v1beta1.Job] is returned. If the + job does not exist, ``NOT_FOUND`` is returned. + + If UpdateJob does not successfully return, it is possible for + the job to be in an + [Job.State.UPDATE_FAILED][google.cloud.scheduler.v1beta1.Job.State.UPDATE_FAILED] + state. A job in this state may not be executed. If this happens, + retry the UpdateJob request until a successful response is + received. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + async def sample_update_job(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.UpdateJobRequest( + ) + + # Make the request + response = await client.update_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.scheduler_v1beta1.types.UpdateJobRequest, dict]]): + The request object. Request message for + [UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob]. + job (:class:`google.cloud.scheduler_v1beta1.types.Job`): + Required. The new job properties. + [name][google.cloud.scheduler.v1beta1.Job.name] must be + specified. + + Output only fields cannot be modified using UpdateJob. + Any value specified for an output only field will be + ignored. + + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + A mask used to specify which fields + of the job are being updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1beta1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([job, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.UpdateJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if job is not None: + request.job = job + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_job, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("job.name", request.job.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job( + self, + request: Optional[Union[cloudscheduler.DeleteJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + async def sample_delete_job(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_job(request=request) + + Args: + request (Optional[Union[google.cloud.scheduler_v1beta1.types.DeleteJobRequest, dict]]): + The request object. Request message for deleting a job using + [DeleteJob][google.cloud.scheduler.v1beta1.CloudScheduler.DeleteJob]. + name (:class:`str`): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.DeleteJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def pause_job( + self, + request: Optional[Union[cloudscheduler.PauseJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Pauses a job. + + If a job is paused then the system will stop executing the job + until it is re-enabled via + [ResumeJob][google.cloud.scheduler.v1beta1.CloudScheduler.ResumeJob]. + The state of the job is stored in + [state][google.cloud.scheduler.v1beta1.Job.state]; if paused it + will be set to + [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED]. + A job must be in + [Job.State.ENABLED][google.cloud.scheduler.v1beta1.Job.State.ENABLED] + to be paused. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + async def sample_pause_job(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.PauseJobRequest( + name="name_value", + ) + + # Make the request + response = await client.pause_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.scheduler_v1beta1.types.PauseJobRequest, dict]]): + The request object. Request message for + [PauseJob][google.cloud.scheduler.v1beta1.CloudScheduler.PauseJob]. + name (:class:`str`): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1beta1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.PauseJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pause_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def resume_job( + self, + request: Optional[Union[cloudscheduler.ResumeJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Resume a job. + + This method reenables a job after it has been + [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED]. + The state of a job is stored in + [Job.state][google.cloud.scheduler.v1beta1.Job.state]; after + calling this method it will be set to + [Job.State.ENABLED][google.cloud.scheduler.v1beta1.Job.State.ENABLED]. + A job must be in + [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED] + to be resumed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + async def sample_resume_job(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.ResumeJobRequest( + name="name_value", + ) + + # Make the request + response = await client.resume_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.scheduler_v1beta1.types.ResumeJobRequest, dict]]): + The request object. Request message for + [ResumeJob][google.cloud.scheduler.v1beta1.CloudScheduler.ResumeJob]. + name (:class:`str`): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1beta1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.ResumeJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_job( + self, + request: Optional[Union[cloudscheduler.RunJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Forces a job to run now. + + When this method is called, Cloud Scheduler will + dispatch the job, even if the job is already running. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + async def sample_run_job(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerAsyncClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.RunJobRequest( + name="name_value", + ) + + # Make the request + response = await client.run_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.scheduler_v1beta1.types.RunJobRequest, dict]]): + The request object. Request message for forcing a job to run now using + [RunJob][google.cloud.scheduler.v1beta1.CloudScheduler.RunJob]. + name (:class:`str`): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1beta1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudscheduler.RunJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_job, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "CloudSchedulerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CloudSchedulerAsyncClient",) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/client.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/client.py new file mode 100644 index 000000000000..6df974f39d2d --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/client.py @@ -0,0 +1,1490 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.scheduler_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.cloud.scheduler_v1beta1.services.cloud_scheduler import pagers +from google.cloud.scheduler_v1beta1.types import cloudscheduler +from google.cloud.scheduler_v1beta1.types import job +from google.cloud.scheduler_v1beta1.types import job as gcs_job +from google.cloud.scheduler_v1beta1.types import target + +from .transports.base import DEFAULT_CLIENT_INFO, CloudSchedulerTransport +from .transports.grpc import CloudSchedulerGrpcTransport +from .transports.grpc_asyncio import CloudSchedulerGrpcAsyncIOTransport +from .transports.rest import CloudSchedulerRestTransport + + +class CloudSchedulerClientMeta(type): + """Metaclass for the CloudScheduler client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[CloudSchedulerTransport]] + _transport_registry["grpc"] = CloudSchedulerGrpcTransport + _transport_registry["grpc_asyncio"] = CloudSchedulerGrpcAsyncIOTransport + _transport_registry["rest"] = CloudSchedulerRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CloudSchedulerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudSchedulerClient(metaclass=CloudSchedulerClientMeta): + """The Cloud Scheduler API allows external entities to reliably + schedule asynchronous jobs. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudscheduler.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudSchedulerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudSchedulerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudSchedulerTransport: + """Returns the transport used by the client instance. + + Returns: + CloudSchedulerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def job_path( + project: str, + location: str, + job: str, + ) -> str: + """Returns a fully-qualified job string.""" + return "projects/{project}/locations/{location}/jobs/{job}".format( + project=project, + location=location, + job=job, + ) + + @staticmethod + def parse_job_path(path: str) -> Dict[str, str]: + """Parses a job path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def topic_path( + project: str, + topic: str, + ) -> str: + """Returns a fully-qualified topic string.""" + return "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str, str]: + """Parses a topic path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudSchedulerTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud scheduler client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CloudSchedulerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudSchedulerTransport): + # transport is a CloudSchedulerTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_jobs( + self, + request: Optional[Union[cloudscheduler.ListJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsPager: + r"""Lists jobs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + def sample_list_jobs(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.scheduler_v1beta1.types.ListJobsRequest, dict]): + The request object. Request message for listing jobs using + [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs]. + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1beta1.services.cloud_scheduler.pagers.ListJobsPager: + Response message for listing jobs using + [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.ListJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.ListJobsRequest): + request = cloudscheduler.ListJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job( + self, + request: Optional[Union[cloudscheduler.GetJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Gets a job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + def sample_get_job(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.scheduler_v1beta1.types.GetJobRequest, dict]): + The request object. Request message for + [GetJob][google.cloud.scheduler.v1beta1.CloudScheduler.GetJob]. + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1beta1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.GetJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.GetJobRequest): + request = cloudscheduler.GetJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_job( + self, + request: Optional[Union[cloudscheduler.CreateJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + job: Optional[gcs_job.Job] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_job.Job: + r"""Creates a job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + def sample_create_job(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.CreateJobRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.scheduler_v1beta1.types.CreateJobRequest, dict]): + The request object. Request message for + [CreateJob][google.cloud.scheduler.v1beta1.CloudScheduler.CreateJob]. + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (google.cloud.scheduler_v1beta1.types.Job): + Required. The job to add. The user can optionally + specify a name for the job in + [name][google.cloud.scheduler.v1beta1.Job.name]. + [name][google.cloud.scheduler.v1beta1.Job.name] cannot + be the same as an existing job. If a name is not + specified then the system will generate a random unique + name that will be returned + ([name][google.cloud.scheduler.v1beta1.Job.name]) in the + response. + + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1beta1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.CreateJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.CreateJobRequest): + request = cloudscheduler.CreateJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_job( + self, + request: Optional[Union[cloudscheduler.UpdateJobRequest, dict]] = None, + *, + job: Optional[gcs_job.Job] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_job.Job: + r"""Updates a job. + + If successful, the updated + [Job][google.cloud.scheduler.v1beta1.Job] is returned. If the + job does not exist, ``NOT_FOUND`` is returned. + + If UpdateJob does not successfully return, it is possible for + the job to be in an + [Job.State.UPDATE_FAILED][google.cloud.scheduler.v1beta1.Job.State.UPDATE_FAILED] + state. A job in this state may not be executed. If this happens, + retry the UpdateJob request until a successful response is + received. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + def sample_update_job(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.UpdateJobRequest( + ) + + # Make the request + response = client.update_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.scheduler_v1beta1.types.UpdateJobRequest, dict]): + The request object. Request message for + [UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob]. + job (google.cloud.scheduler_v1beta1.types.Job): + Required. The new job properties. + [name][google.cloud.scheduler.v1beta1.Job.name] must be + specified. + + Output only fields cannot be modified using UpdateJob. + Any value specified for an output only field will be + ignored. + + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields + of the job are being updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1beta1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([job, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.UpdateJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.UpdateJobRequest): + request = cloudscheduler.UpdateJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if job is not None: + request.job = job + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("job.name", request.job.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job( + self, + request: Optional[Union[cloudscheduler.DeleteJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + def sample_delete_job(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + client.delete_job(request=request) + + Args: + request (Union[google.cloud.scheduler_v1beta1.types.DeleteJobRequest, dict]): + The request object. Request message for deleting a job using + [DeleteJob][google.cloud.scheduler.v1beta1.CloudScheduler.DeleteJob]. + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.DeleteJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.DeleteJobRequest): + request = cloudscheduler.DeleteJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def pause_job( + self, + request: Optional[Union[cloudscheduler.PauseJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Pauses a job. + + If a job is paused then the system will stop executing the job + until it is re-enabled via + [ResumeJob][google.cloud.scheduler.v1beta1.CloudScheduler.ResumeJob]. + The state of the job is stored in + [state][google.cloud.scheduler.v1beta1.Job.state]; if paused it + will be set to + [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED]. + A job must be in + [Job.State.ENABLED][google.cloud.scheduler.v1beta1.Job.State.ENABLED] + to be paused. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + def sample_pause_job(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.PauseJobRequest( + name="name_value", + ) + + # Make the request + response = client.pause_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.scheduler_v1beta1.types.PauseJobRequest, dict]): + The request object. Request message for + [PauseJob][google.cloud.scheduler.v1beta1.CloudScheduler.PauseJob]. + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1beta1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.PauseJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.PauseJobRequest): + request = cloudscheduler.PauseJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pause_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resume_job( + self, + request: Optional[Union[cloudscheduler.ResumeJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Resume a job. + + This method reenables a job after it has been + [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED]. + The state of a job is stored in + [Job.state][google.cloud.scheduler.v1beta1.Job.state]; after + calling this method it will be set to + [Job.State.ENABLED][google.cloud.scheduler.v1beta1.Job.State.ENABLED]. + A job must be in + [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED] + to be resumed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + def sample_resume_job(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.ResumeJobRequest( + name="name_value", + ) + + # Make the request + response = client.resume_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.scheduler_v1beta1.types.ResumeJobRequest, dict]): + The request object. Request message for + [ResumeJob][google.cloud.scheduler.v1beta1.CloudScheduler.ResumeJob]. + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1beta1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.ResumeJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.ResumeJobRequest): + request = cloudscheduler.ResumeJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_job( + self, + request: Optional[Union[cloudscheduler.RunJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Forces a job to run now. + + When this method is called, Cloud Scheduler will + dispatch the job, even if the job is already running. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import scheduler_v1beta1 + + def sample_run_job(): + # Create a client + client = scheduler_v1beta1.CloudSchedulerClient() + + # Initialize request argument(s) + request = scheduler_v1beta1.RunJobRequest( + name="name_value", + ) + + # Make the request + response = client.run_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.scheduler_v1beta1.types.RunJobRequest, dict]): + The request object. Request message for forcing a job to run now using + [RunJob][google.cloud.scheduler.v1beta1.CloudScheduler.RunJob]. + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.scheduler_v1beta1.types.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudscheduler.RunJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudscheduler.RunJobRequest): + request = cloudscheduler.RunJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CloudSchedulerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CloudSchedulerClient",) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/pagers.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/pagers.py new file mode 100644 index 000000000000..a360ffd41435 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/pagers.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.scheduler_v1beta1.types import cloudscheduler, job + + +class ListJobsPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.scheduler_v1beta1.types.ListJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.scheduler_v1beta1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudscheduler.ListJobsResponse], + request: cloudscheduler.ListJobsRequest, + response: cloudscheduler.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.scheduler_v1beta1.types.ListJobsRequest): + The initial request object. + response (google.cloud.scheduler_v1beta1.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudscheduler.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudscheduler.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[job.Job]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListJobsAsyncPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.scheduler_v1beta1.types.ListJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.scheduler_v1beta1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudscheduler.ListJobsResponse]], + request: cloudscheduler.ListJobsRequest, + response: cloudscheduler.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.scheduler_v1beta1.types.ListJobsRequest): + The initial request object. + response (google.cloud.scheduler_v1beta1.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudscheduler.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudscheduler.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[job.Job]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/__init__.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/__init__.py new file mode 100644 index 000000000000..80615da54855 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudSchedulerTransport +from .grpc import CloudSchedulerGrpcTransport +from .grpc_asyncio import CloudSchedulerGrpcAsyncIOTransport +from .rest import CloudSchedulerRestInterceptor, CloudSchedulerRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudSchedulerTransport]] +_transport_registry["grpc"] = CloudSchedulerGrpcTransport +_transport_registry["grpc_asyncio"] = CloudSchedulerGrpcAsyncIOTransport +_transport_registry["rest"] = CloudSchedulerRestTransport + +__all__ = ( + "CloudSchedulerTransport", + "CloudSchedulerGrpcTransport", + "CloudSchedulerGrpcAsyncIOTransport", + "CloudSchedulerRestTransport", + "CloudSchedulerRestInterceptor", +) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/base.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/base.py new file mode 100644 index 000000000000..294e888320ef --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/base.py @@ -0,0 +1,318 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.scheduler_v1beta1 import gapic_version as package_version +from google.cloud.scheduler_v1beta1.types import cloudscheduler +from google.cloud.scheduler_v1beta1.types import job +from google.cloud.scheduler_v1beta1.types import job as gcs_job + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class CloudSchedulerTransport(abc.ABC): + """Abstract transport class for CloudScheduler.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "cloudscheduler.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_jobs: gapic_v1.method.wrap_method( + self.list_jobs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.get_job: gapic_v1.method.wrap_method( + self.get_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.create_job: gapic_v1.method.wrap_method( + self.create_job, + default_timeout=600.0, + client_info=client_info, + ), + self.update_job: gapic_v1.method.wrap_method( + self.update_job, + default_timeout=600.0, + client_info=client_info, + ), + self.delete_job: gapic_v1.method.wrap_method( + self.delete_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.pause_job: gapic_v1.method.wrap_method( + self.pause_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.resume_job: gapic_v1.method.wrap_method( + self.resume_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.run_job: gapic_v1.method.wrap_method( + self.run_job, + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_jobs( + self, + ) -> Callable[ + [cloudscheduler.ListJobsRequest], + Union[ + cloudscheduler.ListJobsResponse, Awaitable[cloudscheduler.ListJobsResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_job( + self, + ) -> Callable[[cloudscheduler.GetJobRequest], Union[job.Job, Awaitable[job.Job]]]: + raise NotImplementedError() + + @property + def create_job( + self, + ) -> Callable[ + [cloudscheduler.CreateJobRequest], Union[gcs_job.Job, Awaitable[gcs_job.Job]] + ]: + raise NotImplementedError() + + @property + def update_job( + self, + ) -> Callable[ + [cloudscheduler.UpdateJobRequest], Union[gcs_job.Job, Awaitable[gcs_job.Job]] + ]: + raise NotImplementedError() + + @property + def delete_job( + self, + ) -> Callable[ + [cloudscheduler.DeleteJobRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def pause_job( + self, + ) -> Callable[[cloudscheduler.PauseJobRequest], Union[job.Job, Awaitable[job.Job]]]: + raise NotImplementedError() + + @property + def resume_job( + self, + ) -> Callable[ + [cloudscheduler.ResumeJobRequest], Union[job.Job, Awaitable[job.Job]] + ]: + raise NotImplementedError() + + @property + def run_job( + self, + ) -> Callable[[cloudscheduler.RunJobRequest], Union[job.Job, Awaitable[job.Job]]]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("CloudSchedulerTransport",) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/grpc.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/grpc.py new file mode 100644 index 000000000000..031a8ce2b07b --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/grpc.py @@ -0,0 +1,511 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.scheduler_v1beta1.types import cloudscheduler +from google.cloud.scheduler_v1beta1.types import job +from google.cloud.scheduler_v1beta1.types import job as gcs_job + +from .base import DEFAULT_CLIENT_INFO, CloudSchedulerTransport + + +class CloudSchedulerGrpcTransport(CloudSchedulerTransport): + """gRPC backend transport for CloudScheduler. + + The Cloud Scheduler API allows external entities to reliably + schedule asynchronous jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudscheduler.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "cloudscheduler.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_jobs( + self, + ) -> Callable[[cloudscheduler.ListJobsRequest], cloudscheduler.ListJobsResponse]: + r"""Return a callable for the list jobs method over gRPC. + + Lists jobs. + + Returns: + Callable[[~.ListJobsRequest], + ~.ListJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_jobs" not in self._stubs: + self._stubs["list_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/ListJobs", + request_serializer=cloudscheduler.ListJobsRequest.serialize, + response_deserializer=cloudscheduler.ListJobsResponse.deserialize, + ) + return self._stubs["list_jobs"] + + @property + def get_job(self) -> Callable[[cloudscheduler.GetJobRequest], job.Job]: + r"""Return a callable for the get job method over gRPC. + + Gets a job. + + Returns: + Callable[[~.GetJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job" not in self._stubs: + self._stubs["get_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/GetJob", + request_serializer=cloudscheduler.GetJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["get_job"] + + @property + def create_job(self) -> Callable[[cloudscheduler.CreateJobRequest], gcs_job.Job]: + r"""Return a callable for the create job method over gRPC. + + Creates a job. + + Returns: + Callable[[~.CreateJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_job" not in self._stubs: + self._stubs["create_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/CreateJob", + request_serializer=cloudscheduler.CreateJobRequest.serialize, + response_deserializer=gcs_job.Job.deserialize, + ) + return self._stubs["create_job"] + + @property + def update_job(self) -> Callable[[cloudscheduler.UpdateJobRequest], gcs_job.Job]: + r"""Return a callable for the update job method over gRPC. + + Updates a job. + + If successful, the updated + [Job][google.cloud.scheduler.v1beta1.Job] is returned. If the + job does not exist, ``NOT_FOUND`` is returned. + + If UpdateJob does not successfully return, it is possible for + the job to be in an + [Job.State.UPDATE_FAILED][google.cloud.scheduler.v1beta1.Job.State.UPDATE_FAILED] + state. A job in this state may not be executed. If this happens, + retry the UpdateJob request until a successful response is + received. + + Returns: + Callable[[~.UpdateJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_job" not in self._stubs: + self._stubs["update_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/UpdateJob", + request_serializer=cloudscheduler.UpdateJobRequest.serialize, + response_deserializer=gcs_job.Job.deserialize, + ) + return self._stubs["update_job"] + + @property + def delete_job( + self, + ) -> Callable[[cloudscheduler.DeleteJobRequest], empty_pb2.Empty]: + r"""Return a callable for the delete job method over gRPC. + + Deletes a job. + + Returns: + Callable[[~.DeleteJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_job" not in self._stubs: + self._stubs["delete_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/DeleteJob", + request_serializer=cloudscheduler.DeleteJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_job"] + + @property + def pause_job(self) -> Callable[[cloudscheduler.PauseJobRequest], job.Job]: + r"""Return a callable for the pause job method over gRPC. + + Pauses a job. + + If a job is paused then the system will stop executing the job + until it is re-enabled via + [ResumeJob][google.cloud.scheduler.v1beta1.CloudScheduler.ResumeJob]. + The state of the job is stored in + [state][google.cloud.scheduler.v1beta1.Job.state]; if paused it + will be set to + [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED]. + A job must be in + [Job.State.ENABLED][google.cloud.scheduler.v1beta1.Job.State.ENABLED] + to be paused. + + Returns: + Callable[[~.PauseJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_job" not in self._stubs: + self._stubs["pause_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/PauseJob", + request_serializer=cloudscheduler.PauseJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["pause_job"] + + @property + def resume_job(self) -> Callable[[cloudscheduler.ResumeJobRequest], job.Job]: + r"""Return a callable for the resume job method over gRPC. + + Resume a job. + + This method reenables a job after it has been + [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED]. + The state of a job is stored in + [Job.state][google.cloud.scheduler.v1beta1.Job.state]; after + calling this method it will be set to + [Job.State.ENABLED][google.cloud.scheduler.v1beta1.Job.State.ENABLED]. + A job must be in + [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED] + to be resumed. + + Returns: + Callable[[~.ResumeJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_job" not in self._stubs: + self._stubs["resume_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/ResumeJob", + request_serializer=cloudscheduler.ResumeJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["resume_job"] + + @property + def run_job(self) -> Callable[[cloudscheduler.RunJobRequest], job.Job]: + r"""Return a callable for the run job method over gRPC. + + Forces a job to run now. + + When this method is called, Cloud Scheduler will + dispatch the job, even if the job is already running. + + Returns: + Callable[[~.RunJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_job" not in self._stubs: + self._stubs["run_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/RunJob", + request_serializer=cloudscheduler.RunJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["run_job"] + + def close(self): + self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("CloudSchedulerGrpcTransport",) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/grpc_asyncio.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/grpc_asyncio.py new file mode 100644 index 000000000000..eefdcf741c0c --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/grpc_asyncio.py @@ -0,0 +1,520 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.scheduler_v1beta1.types import cloudscheduler +from google.cloud.scheduler_v1beta1.types import job +from google.cloud.scheduler_v1beta1.types import job as gcs_job + +from .base import DEFAULT_CLIENT_INFO, CloudSchedulerTransport +from .grpc import CloudSchedulerGrpcTransport + + +class CloudSchedulerGrpcAsyncIOTransport(CloudSchedulerTransport): + """gRPC AsyncIO backend transport for CloudScheduler. + + The Cloud Scheduler API allows external entities to reliably + schedule asynchronous jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudscheduler.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudscheduler.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_jobs( + self, + ) -> Callable[ + [cloudscheduler.ListJobsRequest], Awaitable[cloudscheduler.ListJobsResponse] + ]: + r"""Return a callable for the list jobs method over gRPC. + + Lists jobs. + + Returns: + Callable[[~.ListJobsRequest], + Awaitable[~.ListJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_jobs" not in self._stubs: + self._stubs["list_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/ListJobs", + request_serializer=cloudscheduler.ListJobsRequest.serialize, + response_deserializer=cloudscheduler.ListJobsResponse.deserialize, + ) + return self._stubs["list_jobs"] + + @property + def get_job(self) -> Callable[[cloudscheduler.GetJobRequest], Awaitable[job.Job]]: + r"""Return a callable for the get job method over gRPC. + + Gets a job. + + Returns: + Callable[[~.GetJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job" not in self._stubs: + self._stubs["get_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/GetJob", + request_serializer=cloudscheduler.GetJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["get_job"] + + @property + def create_job( + self, + ) -> Callable[[cloudscheduler.CreateJobRequest], Awaitable[gcs_job.Job]]: + r"""Return a callable for the create job method over gRPC. + + Creates a job. + + Returns: + Callable[[~.CreateJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_job" not in self._stubs: + self._stubs["create_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/CreateJob", + request_serializer=cloudscheduler.CreateJobRequest.serialize, + response_deserializer=gcs_job.Job.deserialize, + ) + return self._stubs["create_job"] + + @property + def update_job( + self, + ) -> Callable[[cloudscheduler.UpdateJobRequest], Awaitable[gcs_job.Job]]: + r"""Return a callable for the update job method over gRPC. + + Updates a job. + + If successful, the updated + [Job][google.cloud.scheduler.v1beta1.Job] is returned. If the + job does not exist, ``NOT_FOUND`` is returned. + + If UpdateJob does not successfully return, it is possible for + the job to be in an + [Job.State.UPDATE_FAILED][google.cloud.scheduler.v1beta1.Job.State.UPDATE_FAILED] + state. A job in this state may not be executed. If this happens, + retry the UpdateJob request until a successful response is + received. + + Returns: + Callable[[~.UpdateJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_job" not in self._stubs: + self._stubs["update_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/UpdateJob", + request_serializer=cloudscheduler.UpdateJobRequest.serialize, + response_deserializer=gcs_job.Job.deserialize, + ) + return self._stubs["update_job"] + + @property + def delete_job( + self, + ) -> Callable[[cloudscheduler.DeleteJobRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job method over gRPC. + + Deletes a job. + + Returns: + Callable[[~.DeleteJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_job" not in self._stubs: + self._stubs["delete_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/DeleteJob", + request_serializer=cloudscheduler.DeleteJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_job"] + + @property + def pause_job( + self, + ) -> Callable[[cloudscheduler.PauseJobRequest], Awaitable[job.Job]]: + r"""Return a callable for the pause job method over gRPC. + + Pauses a job. + + If a job is paused then the system will stop executing the job + until it is re-enabled via + [ResumeJob][google.cloud.scheduler.v1beta1.CloudScheduler.ResumeJob]. + The state of the job is stored in + [state][google.cloud.scheduler.v1beta1.Job.state]; if paused it + will be set to + [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED]. + A job must be in + [Job.State.ENABLED][google.cloud.scheduler.v1beta1.Job.State.ENABLED] + to be paused. + + Returns: + Callable[[~.PauseJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_job" not in self._stubs: + self._stubs["pause_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/PauseJob", + request_serializer=cloudscheduler.PauseJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["pause_job"] + + @property + def resume_job( + self, + ) -> Callable[[cloudscheduler.ResumeJobRequest], Awaitable[job.Job]]: + r"""Return a callable for the resume job method over gRPC. + + Resume a job. + + This method reenables a job after it has been + [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED]. + The state of a job is stored in + [Job.state][google.cloud.scheduler.v1beta1.Job.state]; after + calling this method it will be set to + [Job.State.ENABLED][google.cloud.scheduler.v1beta1.Job.State.ENABLED]. + A job must be in + [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED] + to be resumed. + + Returns: + Callable[[~.ResumeJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_job" not in self._stubs: + self._stubs["resume_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/ResumeJob", + request_serializer=cloudscheduler.ResumeJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["resume_job"] + + @property + def run_job(self) -> Callable[[cloudscheduler.RunJobRequest], Awaitable[job.Job]]: + r"""Return a callable for the run job method over gRPC. + + Forces a job to run now. + + When this method is called, Cloud Scheduler will + dispatch the job, even if the job is already running. + + Returns: + Callable[[~.RunJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_job" not in self._stubs: + self._stubs["run_job"] = self.grpc_channel.unary_unary( + "/google.cloud.scheduler.v1beta1.CloudScheduler/RunJob", + request_serializer=cloudscheduler.RunJobRequest.serialize, + response_deserializer=job.Job.deserialize, + ) + return self._stubs["run_job"] + + def close(self): + return self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("CloudSchedulerGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/rest.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/rest.py new file mode 100644 index 000000000000..ad180a29f132 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/services/cloud_scheduler/transports/rest.py @@ -0,0 +1,1381 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.scheduler_v1beta1.types import cloudscheduler +from google.cloud.scheduler_v1beta1.types import job +from google.cloud.scheduler_v1beta1.types import job as gcs_job + +from .base import CloudSchedulerTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CloudSchedulerRestInterceptor: + """Interceptor for CloudScheduler. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudSchedulerRestTransport. + + .. code-block:: python + class MyCustomCloudSchedulerInterceptor(CloudSchedulerRestInterceptor): + def pre_create_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_pause_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_pause_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_resume_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_resume_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_job(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CloudSchedulerRestTransport(interceptor=MyCustomCloudSchedulerInterceptor()) + client = CloudSchedulerClient(transport=transport) + + + """ + + def pre_create_job( + self, + request: cloudscheduler.CreateJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudscheduler.CreateJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_create_job(self, response: gcs_job.Job) -> gcs_job.Job: + """Post-rpc interceptor for create_job + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_delete_job( + self, + request: cloudscheduler.DeleteJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudscheduler.DeleteJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def pre_get_job( + self, request: cloudscheduler.GetJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudscheduler.GetJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_get_job(self, response: job.Job) -> job.Job: + """Post-rpc interceptor for get_job + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_list_jobs( + self, + request: cloudscheduler.ListJobsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudscheduler.ListJobsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_list_jobs( + self, response: cloudscheduler.ListJobsResponse + ) -> cloudscheduler.ListJobsResponse: + """Post-rpc interceptor for list_jobs + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_pause_job( + self, + request: cloudscheduler.PauseJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudscheduler.PauseJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for pause_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_pause_job(self, response: job.Job) -> job.Job: + """Post-rpc interceptor for pause_job + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_resume_job( + self, + request: cloudscheduler.ResumeJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudscheduler.ResumeJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for resume_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_resume_job(self, response: job.Job) -> job.Job: + """Post-rpc interceptor for resume_job + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_run_job( + self, request: cloudscheduler.RunJobRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudscheduler.RunJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_run_job(self, response: job.Job) -> job.Job: + """Post-rpc interceptor for run_job + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_update_job( + self, + request: cloudscheduler.UpdateJobRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudscheduler.UpdateJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_update_job(self, response: gcs_job.Job) -> gcs_job.Job: + """Post-rpc interceptor for update_job + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudScheduler server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CloudScheduler server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudSchedulerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudSchedulerRestInterceptor + + +class CloudSchedulerRestTransport(CloudSchedulerTransport): + """REST backend transport for CloudScheduler. + + The Cloud Scheduler API allows external entities to reliably + schedule asynchronous jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "cloudscheduler.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudSchedulerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudSchedulerRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateJob(CloudSchedulerRestStub): + def __hash__(self): + return hash("CreateJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.CreateJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_job.Job: + r"""Call the create job method over HTTP. + + Args: + request (~.cloudscheduler.CreateJobRequest): + The request object. Request message for + [CreateJob][google.cloud.scheduler.v1beta1.CloudScheduler.CreateJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcs_job.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{parent=projects/*/locations/*}/jobs", + "body": "job", + }, + ] + request, metadata = self._interceptor.pre_create_job(request, metadata) + pb_request = cloudscheduler.CreateJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcs_job.Job() + pb_resp = gcs_job.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_job(resp) + return resp + + class _DeleteJob(CloudSchedulerRestStub): + def __hash__(self): + return hash("DeleteJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.DeleteJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete job method over HTTP. + + Args: + request (~.cloudscheduler.DeleteJobRequest): + The request object. Request message for deleting a job using + [DeleteJob][google.cloud.scheduler.v1beta1.CloudScheduler.DeleteJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta1/{name=projects/*/locations/*/jobs/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_job(request, metadata) + pb_request = cloudscheduler.DeleteJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetJob(CloudSchedulerRestStub): + def __hash__(self): + return hash("GetJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.GetJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Call the get job method over HTTP. + + Args: + request (~.cloudscheduler.GetJobRequest): + The request object. Request message for + [GetJob][google.cloud.scheduler.v1beta1.CloudScheduler.GetJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.job.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*/jobs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_job(request, metadata) + pb_request = cloudscheduler.GetJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = job.Job() + pb_resp = job.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job(resp) + return resp + + class _ListJobs(CloudSchedulerRestStub): + def __hash__(self): + return hash("ListJobs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.ListJobsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudscheduler.ListJobsResponse: + r"""Call the list jobs method over HTTP. + + Args: + request (~.cloudscheduler.ListJobsRequest): + The request object. Request message for listing jobs using + [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudscheduler.ListJobsResponse: + Response message for listing jobs using + [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{parent=projects/*/locations/*}/jobs", + }, + ] + request, metadata = self._interceptor.pre_list_jobs(request, metadata) + pb_request = cloudscheduler.ListJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudscheduler.ListJobsResponse() + pb_resp = cloudscheduler.ListJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_jobs(resp) + return resp + + class _PauseJob(CloudSchedulerRestStub): + def __hash__(self): + return hash("PauseJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.PauseJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Call the pause job method over HTTP. + + Args: + request (~.cloudscheduler.PauseJobRequest): + The request object. Request message for + [PauseJob][google.cloud.scheduler.v1beta1.CloudScheduler.PauseJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.job.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{name=projects/*/locations/*/jobs/*}:pause", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_pause_job(request, metadata) + pb_request = cloudscheduler.PauseJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = job.Job() + pb_resp = job.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_pause_job(resp) + return resp + + class _ResumeJob(CloudSchedulerRestStub): + def __hash__(self): + return hash("ResumeJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.ResumeJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Call the resume job method over HTTP. + + Args: + request (~.cloudscheduler.ResumeJobRequest): + The request object. Request message for + [ResumeJob][google.cloud.scheduler.v1beta1.CloudScheduler.ResumeJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.job.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{name=projects/*/locations/*/jobs/*}:resume", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_resume_job(request, metadata) + pb_request = cloudscheduler.ResumeJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = job.Job() + pb_resp = job.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resume_job(resp) + return resp + + class _RunJob(CloudSchedulerRestStub): + def __hash__(self): + return hash("RunJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.RunJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> job.Job: + r"""Call the run job method over HTTP. + + Args: + request (~.cloudscheduler.RunJobRequest): + The request object. Request message for forcing a job to run now using + [RunJob][google.cloud.scheduler.v1beta1.CloudScheduler.RunJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.job.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{name=projects/*/locations/*/jobs/*}:run", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_run_job(request, metadata) + pb_request = cloudscheduler.RunJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = job.Job() + pb_resp = job.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_job(resp) + return resp + + class _UpdateJob(CloudSchedulerRestStub): + def __hash__(self): + return hash("UpdateJob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudscheduler.UpdateJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcs_job.Job: + r"""Call the update job method over HTTP. + + Args: + request (~.cloudscheduler.UpdateJobRequest): + The request object. Request message for + [UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcs_job.Job: + Configuration for a job. + The maximum allowed size for a job is + 1MB. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta1/{job.name=projects/*/locations/*/jobs/*}", + "body": "job", + }, + ] + request, metadata = self._interceptor.pre_update_job(request, metadata) + pb_request = cloudscheduler.UpdateJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcs_job.Job() + pb_resp = gcs_job.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_job(resp) + return resp + + @property + def create_job(self) -> Callable[[cloudscheduler.CreateJobRequest], gcs_job.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_job( + self, + ) -> Callable[[cloudscheduler.DeleteJobRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job(self) -> Callable[[cloudscheduler.GetJobRequest], job.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_jobs( + self, + ) -> Callable[[cloudscheduler.ListJobsRequest], cloudscheduler.ListJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def pause_job(self) -> Callable[[cloudscheduler.PauseJobRequest], job.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PauseJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def resume_job(self) -> Callable[[cloudscheduler.ResumeJobRequest], job.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ResumeJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_job(self) -> Callable[[cloudscheduler.RunJobRequest], job.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_job(self) -> Callable[[cloudscheduler.UpdateJobRequest], gcs_job.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(CloudSchedulerRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(CloudSchedulerRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CloudSchedulerRestTransport",) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/types/__init__.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/types/__init__.py new file mode 100644 index 000000000000..26a2dd654c46 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/types/__init__.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloudscheduler import ( + CreateJobRequest, + DeleteJobRequest, + GetJobRequest, + ListJobsRequest, + ListJobsResponse, + PauseJobRequest, + ResumeJobRequest, + RunJobRequest, + UpdateJobRequest, +) +from .job import Job, RetryConfig +from .target import ( + AppEngineHttpTarget, + AppEngineRouting, + HttpMethod, + HttpTarget, + OAuthToken, + OidcToken, + PubsubTarget, +) + +__all__ = ( + "CreateJobRequest", + "DeleteJobRequest", + "GetJobRequest", + "ListJobsRequest", + "ListJobsResponse", + "PauseJobRequest", + "ResumeJobRequest", + "RunJobRequest", + "UpdateJobRequest", + "Job", + "RetryConfig", + "AppEngineHttpTarget", + "AppEngineRouting", + "HttpTarget", + "OAuthToken", + "OidcToken", + "PubsubTarget", + "HttpMethod", +) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/types/cloudscheduler.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/types/cloudscheduler.py new file mode 100644 index 000000000000..404f69101eb7 --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/types/cloudscheduler.py @@ -0,0 +1,298 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.scheduler_v1beta1.types import job as gcs_job + +__protobuf__ = proto.module( + package="google.cloud.scheduler.v1beta1", + manifest={ + "ListJobsRequest", + "ListJobsResponse", + "GetJobRequest", + "CreateJobRequest", + "UpdateJobRequest", + "DeleteJobRequest", + "PauseJobRequest", + "ResumeJobRequest", + "RunJobRequest", + }, +) + + +class ListJobsRequest(proto.Message): + r"""Request message for listing jobs using + [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs]. + + Attributes: + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID``. + filter (str): + ``filter`` can be used to specify a subset of jobs. + + If ``filter`` equals ``target_config="HttpConfig"``, then + the http target jobs are retrieved. If ``filter`` equals + ``target_config="PubSubConfig"``, then the Pub/Sub target + jobs are retrieved. If ``filter`` equals + ``labels.foo=value1 labels.foo=value2`` then only jobs which + are labeled with foo=value1 AND foo=value2 will be returned. + page_size (int): + Requested page size. + + The maximum page size is 500. If unspecified, the page size + will be the maximum. Fewer jobs than requested might be + returned, even if more jobs exist; use next_page_token to + determine if more jobs exist. + page_token (str): + A token identifying a page of results the server will + return. To request the first page results, page_token must + be empty. To request the next page of results, page_token + must be the value of + [next_page_token][google.cloud.scheduler.v1beta1.ListJobsResponse.next_page_token] + returned from the previous call to + [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs]. + It is an error to switch the value of + [filter][google.cloud.scheduler.v1beta1.ListJobsRequest.filter] + or + [order_by][google.cloud.scheduler.v1beta1.ListJobsRequest.order_by] + while iterating through pages. + legacy_app_engine_cron (bool): + This field is used to manage the legacy App Engine Cron jobs + using the Cloud Scheduler API. If the field is set to true, + the jobs in the \__cron queue will be listed instead. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + page_size: int = proto.Field( + proto.INT32, + number=5, + ) + page_token: str = proto.Field( + proto.STRING, + number=6, + ) + legacy_app_engine_cron: bool = proto.Field( + proto.BOOL, + number=7, + ) + + +class ListJobsResponse(proto.Message): + r"""Response message for listing jobs using + [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs]. + + Attributes: + jobs (MutableSequence[google.cloud.scheduler_v1beta1.types.Job]): + The list of jobs. + next_page_token (str): + A token to retrieve next page of results. Pass this value in + the + [page_token][google.cloud.scheduler.v1beta1.ListJobsRequest.page_token] + field in the subsequent call to + [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs] + to retrieve the next page of results. If this is empty it + indicates that there are no more results through which to + paginate. + + The page token is valid for only 2 hours. + """ + + @property + def raw_page(self): + return self + + jobs: MutableSequence[gcs_job.Job] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcs_job.Job, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetJobRequest(proto.Message): + r"""Request message for + [GetJob][google.cloud.scheduler.v1beta1.CloudScheduler.GetJob]. + + Attributes: + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateJobRequest(proto.Message): + r"""Request message for + [CreateJob][google.cloud.scheduler.v1beta1.CloudScheduler.CreateJob]. + + Attributes: + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID``. + job (google.cloud.scheduler_v1beta1.types.Job): + Required. The job to add. The user can optionally specify a + name for the job in + [name][google.cloud.scheduler.v1beta1.Job.name]. + [name][google.cloud.scheduler.v1beta1.Job.name] cannot be + the same as an existing job. If a name is not specified then + the system will generate a random unique name that will be + returned ([name][google.cloud.scheduler.v1beta1.Job.name]) + in the response. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + job: gcs_job.Job = proto.Field( + proto.MESSAGE, + number=2, + message=gcs_job.Job, + ) + + +class UpdateJobRequest(proto.Message): + r"""Request message for + [UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob]. + + Attributes: + job (google.cloud.scheduler_v1beta1.types.Job): + Required. The new job properties. + [name][google.cloud.scheduler.v1beta1.Job.name] must be + specified. + + Output only fields cannot be modified using UpdateJob. Any + value specified for an output only field will be ignored. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields of the + job are being updated. + """ + + job: gcs_job.Job = proto.Field( + proto.MESSAGE, + number=1, + message=gcs_job.Job, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteJobRequest(proto.Message): + r"""Request message for deleting a job using + [DeleteJob][google.cloud.scheduler.v1beta1.CloudScheduler.DeleteJob]. + + Attributes: + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + legacy_app_engine_cron (bool): + This field is used to manage the legacy App Engine Cron jobs + using the Cloud Scheduler API. If the field is set to true, + the job in the \__cron queue with the corresponding name + will be deleted instead. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + legacy_app_engine_cron: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class PauseJobRequest(proto.Message): + r"""Request message for + [PauseJob][google.cloud.scheduler.v1beta1.CloudScheduler.PauseJob]. + + Attributes: + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ResumeJobRequest(proto.Message): + r"""Request message for + [ResumeJob][google.cloud.scheduler.v1beta1.CloudScheduler.ResumeJob]. + + Attributes: + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RunJobRequest(proto.Message): + r"""Request message for forcing a job to run now using + [RunJob][google.cloud.scheduler.v1beta1.CloudScheduler.RunJob]. + + Attributes: + name (str): + Required. The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + legacy_app_engine_cron (bool): + This field is used to manage the legacy App Engine Cron jobs + using the Cloud Scheduler API. If the field is set to true, + the job in the \__cron queue with the corresponding name + will be forced to run instead. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + legacy_app_engine_cron: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/types/job.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/types/job.py new file mode 100644 index 000000000000..92e3f25ccfbe --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/types/job.py @@ -0,0 +1,388 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.scheduler_v1beta1.types import target + +__protobuf__ = proto.module( + package="google.cloud.scheduler.v1beta1", + manifest={ + "Job", + "RetryConfig", + }, +) + + +class Job(proto.Message): + r"""Configuration for a job. + The maximum allowed size for a job is 1MB. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Optionally caller-specified in + [CreateJob][google.cloud.scheduler.v1beta1.CloudScheduler.CreateJob], + after which it becomes output only. + + The job name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the job's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``JOB_ID`` can contain only letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), or underscores (_). The maximum + length is 500 characters. + description (str): + Optionally caller-specified in + [CreateJob][google.cloud.scheduler.v1beta1.CloudScheduler.CreateJob] + or + [UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob]. + + A human-readable description for the job. This string must + not contain more than 500 characters. + pubsub_target (google.cloud.scheduler_v1beta1.types.PubsubTarget): + Pub/Sub target. + + This field is a member of `oneof`_ ``target``. + app_engine_http_target (google.cloud.scheduler_v1beta1.types.AppEngineHttpTarget): + App Engine HTTP target. + + This field is a member of `oneof`_ ``target``. + http_target (google.cloud.scheduler_v1beta1.types.HttpTarget): + HTTP target. + + This field is a member of `oneof`_ ``target``. + schedule (str): + Required, except when used with + [UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob]. + + Describes the schedule on which the job will be executed. + + The schedule can be either of the following types: + + - `Crontab `__ + - English-like + `schedule `__ + + As a general rule, execution ``n + 1`` of a job will not + begin until execution ``n`` has finished. Cloud Scheduler + will never allow two simultaneously outstanding executions. + For example, this implies that if the ``n+1``\ th execution + is scheduled to run at 16:00 but the ``n``\ th execution + takes until 16:15, the ``n+1``\ th execution will not start + until ``16:15``. A scheduled start time will be delayed if + the previous execution has not ended when its scheduled time + occurs. + + If + [retry_count][google.cloud.scheduler.v1beta1.RetryConfig.retry_count] + > 0 and a job attempt fails, the job will be tried a total + of + [retry_count][google.cloud.scheduler.v1beta1.RetryConfig.retry_count] + times, with exponential backoff, until the next scheduled + start time. + time_zone (str): + Specifies the time zone to be used in interpreting + [schedule][google.cloud.scheduler.v1beta1.Job.schedule]. The + value of this field must be a time zone name from the `tz + database `__. + + Note that some time zones include a provision for daylight + savings time. The rules for daylight saving time are + determined by the chosen tz. For UTC use the string "utc". + If a time zone is not specified, the default will be in UTC + (also known as GMT). + user_update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time of the job. + state (google.cloud.scheduler_v1beta1.types.Job.State): + Output only. State of the job. + status (google.rpc.status_pb2.Status): + Output only. The response from the target for + the last attempted execution. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The next time the job is + scheduled. Note that this may be a retry of a + previously failed attempt or the next execution + time according to the schedule. + last_attempt_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the last job attempt + started. + retry_config (google.cloud.scheduler_v1beta1.types.RetryConfig): + Settings that determine the retry behavior. + attempt_deadline (google.protobuf.duration_pb2.Duration): + The deadline for job attempts. If the request handler does + not respond by this deadline then the request is cancelled + and the attempt is marked as a ``DEADLINE_EXCEEDED`` + failure. The failed attempt can be viewed in execution logs. + Cloud Scheduler will retry the job according to the + [RetryConfig][google.cloud.scheduler.v1beta1.RetryConfig]. + + The default and the allowed values depend on the type of + target: + + - For [HTTP + targets][google.cloud.scheduler.v1beta1.Job.http_target], + the default is 3 minutes. The deadline must be in the + interval [15 seconds, 30 minutes]. + + - For [App Engine HTTP + targets][google.cloud.scheduler.v1beta1.Job.app_engine_http_target], + 0 indicates that the request has the default deadline. + The default deadline depends on the scaling type of the + service: 10 minutes for standard apps with automatic + scaling, 24 hours for standard apps with manual and basic + scaling, and 60 minutes for flex apps. If the request + deadline is set, it must be in the interval [15 seconds, + 24 hours 15 seconds]. + + - For [Pub/Sub + targets][google.cloud.scheduler.v1beta1.Job.pubsub_target], + this field is ignored. + legacy_app_engine_cron (bool): + Immutable. This field is used to manage the + legacy App Engine Cron jobs using the Cloud + Scheduler API. If the field is set to true, the + job will be considered a legacy job. Note that + App Engine Cron jobs have fewer features than + Cloud Scheduler jobs, e.g., are only limited to + App Engine targets. + """ + + class State(proto.Enum): + r"""State of the job. + + Values: + STATE_UNSPECIFIED (0): + Unspecified state. + ENABLED (1): + The job is executing normally. + PAUSED (2): + The job is paused by the user. It will not execute. A user + can intentionally pause the job using + [PauseJobRequest][google.cloud.scheduler.v1beta1.PauseJobRequest]. + DISABLED (3): + The job is disabled by the system due to + error. The user cannot directly set a job to be + disabled. + UPDATE_FAILED (4): + The job state resulting from a failed + [CloudScheduler.UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob] + operation. To recover a job from this state, retry + [CloudScheduler.UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob] + until a successful response is received. + """ + STATE_UNSPECIFIED = 0 + ENABLED = 1 + PAUSED = 2 + DISABLED = 3 + UPDATE_FAILED = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + pubsub_target: target.PubsubTarget = proto.Field( + proto.MESSAGE, + number=4, + oneof="target", + message=target.PubsubTarget, + ) + app_engine_http_target: target.AppEngineHttpTarget = proto.Field( + proto.MESSAGE, + number=5, + oneof="target", + message=target.AppEngineHttpTarget, + ) + http_target: target.HttpTarget = proto.Field( + proto.MESSAGE, + number=6, + oneof="target", + message=target.HttpTarget, + ) + schedule: str = proto.Field( + proto.STRING, + number=20, + ) + time_zone: str = proto.Field( + proto.STRING, + number=21, + ) + user_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=10, + enum=State, + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=11, + message=status_pb2.Status, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=17, + message=timestamp_pb2.Timestamp, + ) + last_attempt_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=18, + message=timestamp_pb2.Timestamp, + ) + retry_config: "RetryConfig" = proto.Field( + proto.MESSAGE, + number=19, + message="RetryConfig", + ) + attempt_deadline: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=22, + message=duration_pb2.Duration, + ) + legacy_app_engine_cron: bool = proto.Field( + proto.BOOL, + number=23, + ) + + +class RetryConfig(proto.Message): + r"""Settings that determine the retry behavior. + + By default, if a job does not complete successfully (meaning that an + acknowledgement is not received from the handler, then it will be + retried with exponential backoff according to the settings in + [RetryConfig][google.cloud.scheduler.v1beta1.RetryConfig]. + + Attributes: + retry_count (int): + The number of attempts that the system will make to run a + job using the exponential backoff procedure described by + [max_doublings][google.cloud.scheduler.v1beta1.RetryConfig.max_doublings]. + + The default value of retry_count is zero. + + If retry_count is zero, a job attempt will *not* be retried + if it fails. Instead the Cloud Scheduler system will wait + for the next scheduled execution time. + + If retry_count is set to a non-zero number then Cloud + Scheduler will retry failed attempts, using exponential + backoff, retry_count times, or until the next scheduled + execution time, whichever comes first. + + Values greater than 5 and negative values are not allowed. + max_retry_duration (google.protobuf.duration_pb2.Duration): + The time limit for retrying a failed job, measured from time + when an execution was first attempted. If specified with + [retry_count][google.cloud.scheduler.v1beta1.RetryConfig.retry_count], + the job will be retried until both limits are reached. + + The default value for max_retry_duration is zero, which + means retry duration is unlimited. + min_backoff_duration (google.protobuf.duration_pb2.Duration): + The minimum amount of time to wait before + retrying a job after it fails. + + The default value of this field is 5 seconds. + max_backoff_duration (google.protobuf.duration_pb2.Duration): + The maximum amount of time to wait before + retrying a job after it fails. + + The default value of this field is 1 hour. + max_doublings (int): + The time between retries will double ``max_doublings`` + times. + + A job's retry interval starts at + [min_backoff_duration][google.cloud.scheduler.v1beta1.RetryConfig.min_backoff_duration], + then doubles ``max_doublings`` times, then increases + linearly, and finally retries at intervals of + [max_backoff_duration][google.cloud.scheduler.v1beta1.RetryConfig.max_backoff_duration] + up to + [retry_count][google.cloud.scheduler.v1beta1.RetryConfig.retry_count] + times. + + For example, if + [min_backoff_duration][google.cloud.scheduler.v1beta1.RetryConfig.min_backoff_duration] + is 10s, + [max_backoff_duration][google.cloud.scheduler.v1beta1.RetryConfig.max_backoff_duration] + is 300s, and ``max_doublings`` is 3, then the a job will + first be retried in 10s. The retry interval will double + three times, and then increase linearly by 2^3 \* 10s. + Finally, the job will retry at intervals of + [max_backoff_duration][google.cloud.scheduler.v1beta1.RetryConfig.max_backoff_duration] + until the job has been attempted + [retry_count][google.cloud.scheduler.v1beta1.RetryConfig.retry_count] + times. Thus, the requests will retry at 10s, 20s, 40s, 80s, + 160s, 240s, 300s, 300s, .... + + The default value of this field is 5. + """ + + retry_count: int = proto.Field( + proto.INT32, + number=1, + ) + max_retry_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + min_backoff_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + max_backoff_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + max_doublings: int = proto.Field( + proto.INT32, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/types/target.py b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/types/target.py new file mode 100644 index 000000000000..ea180248383c --- /dev/null +++ b/packages/google-cloud-scheduler/google/cloud/scheduler_v1beta1/types/target.py @@ -0,0 +1,528 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.scheduler.v1beta1", + manifest={ + "HttpMethod", + "HttpTarget", + "AppEngineHttpTarget", + "PubsubTarget", + "AppEngineRouting", + "OAuthToken", + "OidcToken", + }, +) + + +class HttpMethod(proto.Enum): + r"""The HTTP method used to execute the job. + + Values: + HTTP_METHOD_UNSPECIFIED (0): + HTTP method unspecified. Defaults to POST. + POST (1): + HTTP POST + GET (2): + HTTP GET + HEAD (3): + HTTP HEAD + PUT (4): + HTTP PUT + DELETE (5): + HTTP DELETE + PATCH (6): + HTTP PATCH + OPTIONS (7): + HTTP OPTIONS + """ + HTTP_METHOD_UNSPECIFIED = 0 + POST = 1 + GET = 2 + HEAD = 3 + PUT = 4 + DELETE = 5 + PATCH = 6 + OPTIONS = 7 + + +class HttpTarget(proto.Message): + r"""Http target. The job will be pushed to the job handler by means of + an HTTP request via an + [http_method][google.cloud.scheduler.v1beta1.HttpTarget.http_method] + such as HTTP POST, HTTP GET, etc. The job is acknowledged by means + of an HTTP response code in the range [200 - 299]. A failure to + receive a response constitutes a failed execution. For a redirected + request, the response returned by the redirected request is + considered. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + Required. The full URI path that the request will be sent + to. This string must begin with either "http://" or + "https://". Some examples of valid values for + [uri][google.cloud.scheduler.v1beta1.HttpTarget.uri] are: + ``http://acme.com`` and ``https://acme.com/sales:8080``. + Cloud Scheduler will encode some characters for safety and + compatibility. The maximum allowed URL length is 2083 + characters after encoding. + http_method (google.cloud.scheduler_v1beta1.types.HttpMethod): + Which HTTP method to use for the request. + headers (MutableMapping[str, str]): + The user can specify HTTP request headers to send with the + job's HTTP request. This map contains the header field names + and values. Repeated headers are not supported, but a header + value can contain commas. These headers represent a subset + of the headers that will accompany the job's HTTP request. + Some HTTP request headers will be ignored or replaced. A + partial list of headers that will be ignored or replaced is + below: + + - Host: This will be computed by Cloud Scheduler and + derived from + [uri][google.cloud.scheduler.v1beta1.HttpTarget.uri]. + + - ``Content-Length``: This will be computed by Cloud + Scheduler. + - ``User-Agent``: This will be set to + ``"Google-Cloud-Scheduler"``. + - ``X-Google-*``: Google internal use only. + - ``X-AppEngine-*``: Google internal use only. + - ``X-CloudScheduler``: This header will be set to true. + - ``X-CloudScheduler-JobName``: This header will contain + the job name. + - ``X-CloudScheduler-ScheduleTime``: For Cloud Scheduler + jobs specified in the unix-cron format, this header will + contain the job schedule time in RFC3339 UTC "Zulu" + format. + + The total size of headers must be less than 80KB. + body (bytes): + HTTP request body. A request body is allowed only if the + HTTP method is POST, PUT, or PATCH. It is an error to set + body on a job with an incompatible + [HttpMethod][google.cloud.scheduler.v1beta1.HttpMethod]. + oauth_token (google.cloud.scheduler_v1beta1.types.OAuthToken): + If specified, an `OAuth + token `__ + will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization should generally only be used + when calling Google APIs hosted on \*.googleapis.com. + + This field is a member of `oneof`_ ``authorization_header``. + oidc_token (google.cloud.scheduler_v1beta1.types.OidcToken): + If specified, an + `OIDC `__ + token will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend + to validate the token yourself. + + This field is a member of `oneof`_ ``authorization_header``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + http_method: "HttpMethod" = proto.Field( + proto.ENUM, + number=2, + enum="HttpMethod", + ) + headers: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + body: bytes = proto.Field( + proto.BYTES, + number=4, + ) + oauth_token: "OAuthToken" = proto.Field( + proto.MESSAGE, + number=5, + oneof="authorization_header", + message="OAuthToken", + ) + oidc_token: "OidcToken" = proto.Field( + proto.MESSAGE, + number=6, + oneof="authorization_header", + message="OidcToken", + ) + + +class AppEngineHttpTarget(proto.Message): + r"""App Engine target. The job will be pushed to a job handler by means + of an HTTP request via an + [http_method][google.cloud.scheduler.v1beta1.AppEngineHttpTarget.http_method] + such as HTTP POST, HTTP GET, etc. The job is acknowledged by means + of an HTTP response code in the range [200 - 299]. Error 503 is + considered an App Engine system error instead of an application + error. Requests returning error 503 will be retried regardless of + retry configuration and not counted against retry counts. Any other + response code, or a failure to receive a response before the + deadline, constitutes a failed attempt. + + Attributes: + http_method (google.cloud.scheduler_v1beta1.types.HttpMethod): + The HTTP method to use for the request. PATCH + and OPTIONS are not permitted. + app_engine_routing (google.cloud.scheduler_v1beta1.types.AppEngineRouting): + App Engine Routing setting for the job. + relative_uri (str): + The relative URI. + + The relative URL must begin with "/" and must be a valid + HTTP relative URL. It can contain a path, query string + arguments, and ``#`` fragments. If the relative URL is + empty, then the root path "/" will be used. No spaces are + allowed, and the maximum length allowed is 2083 characters. + headers (MutableMapping[str, str]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the job is created. + + Cloud Scheduler sets some headers to default values: + + - ``User-Agent``: By default, this header is + ``"AppEngine-Google; (+http://code.google.com/appengine)"``. + This header can be modified, but Cloud Scheduler will + append + ``"AppEngine-Google; (+http://code.google.com/appengine)"`` + to the modified ``User-Agent``. + - ``X-CloudScheduler``: This header will be set to true. + - ``X-CloudScheduler-JobName``: This header will contain + the job name. + - ``X-CloudScheduler-ScheduleTime``: For Cloud Scheduler + jobs specified in the unix-cron format, this header will + contain the job schedule time in RFC3339 UTC "Zulu" + format. + + If the job has an + [body][google.cloud.scheduler.v1beta1.AppEngineHttpTarget.body], + Cloud Scheduler sets the following headers: + + - ``Content-Type``: By default, the ``Content-Type`` header + is set to ``"application/octet-stream"``. The default can + be overridden by explictly setting ``Content-Type`` to a + particular media type when the job is created. For + example, ``Content-Type`` can be set to + ``"application/json"``. + - ``Content-Length``: This is computed by Cloud Scheduler. + This value is output only. It cannot be changed. + + The headers below are output only. They cannot be set or + overridden: + + - ``X-Google-*``: For Google internal use only. + - ``X-AppEngine-*``: For Google internal use only. + + In addition, some App Engine headers, which contain + job-specific information, are also be sent to the job + handler. + body (bytes): + Body. + + HTTP request body. A request body is allowed only if the + HTTP method is POST or PUT. It will result in invalid + argument error to set a body on a job with an incompatible + [HttpMethod][google.cloud.scheduler.v1beta1.HttpMethod]. + """ + + http_method: "HttpMethod" = proto.Field( + proto.ENUM, + number=1, + enum="HttpMethod", + ) + app_engine_routing: "AppEngineRouting" = proto.Field( + proto.MESSAGE, + number=2, + message="AppEngineRouting", + ) + relative_uri: str = proto.Field( + proto.STRING, + number=3, + ) + headers: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + body: bytes = proto.Field( + proto.BYTES, + number=5, + ) + + +class PubsubTarget(proto.Message): + r"""Pub/Sub target. The job will be delivered by publishing a + message to the given Pub/Sub topic. + + Attributes: + topic_name (str): + Required. The name of the Cloud Pub/Sub topic to which + messages will be published when a job is delivered. The + topic name must be in the same format as required by + Pub/Sub's + `PublishRequest.name `__, + for example ``projects/PROJECT_ID/topics/TOPIC_ID``. + + The topic must be in the same project as the Cloud Scheduler + job. + data (bytes): + The message payload for PubsubMessage. + + Pubsub message must contain either non-empty + data, or at least one attribute. + attributes (MutableMapping[str, str]): + Attributes for PubsubMessage. + + Pubsub message must contain either non-empty + data, or at least one attribute. + """ + + topic_name: str = proto.Field( + proto.STRING, + number=1, + ) + data: bytes = proto.Field( + proto.BYTES, + number=3, + ) + attributes: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + +class AppEngineRouting(proto.Message): + r"""App Engine Routing. + + For more information about services, versions, and instances see `An + Overview of App + Engine `__, + `Microservices Architecture on Google App + Engine `__, + `App Engine Standard request + routing `__, + and `App Engine Flex request + routing `__. + + Attributes: + service (str): + App service. + + By default, the job is sent to the service which + is the default service when the job is + attempted. + version (str): + App version. + + By default, the job is sent to the version which + is the default version when the job is + attempted. + instance (str): + App instance. + + By default, the job is sent to an instance which is + available when the job is attempted. + + Requests can only be sent to a specific instance if `manual + scaling is used in App Engine + Standard `__. + App Engine Flex does not support instances. For more + information, see `App Engine Standard request + routing `__ + and `App Engine Flex request + routing `__. + host (str): + Output only. The host that the job is sent to. + + For more information about how App Engine requests are + routed, see + `here `__. + + The host is constructed as: + + - ``host = [application_domain_name]``\ + ``| [service] + '.' + [application_domain_name]``\ + ``| [version] + '.' + [application_domain_name]``\ + ``| [version_dot_service]+ '.' + [application_domain_name]``\ + ``| [instance] + '.' + [application_domain_name]``\ + ``| [instance_dot_service] + '.' + [application_domain_name]``\ + ``| [instance_dot_version] + '.' + [application_domain_name]``\ + ``| [instance_dot_version_dot_service] + '.' + [application_domain_name]`` + + - ``application_domain_name`` = The domain name of the app, + for example .appspot.com, which is associated with the + job's project ID. + + - ``service =`` + [service][google.cloud.scheduler.v1beta1.AppEngineRouting.service] + + - ``version =`` + [version][google.cloud.scheduler.v1beta1.AppEngineRouting.version] + + - ``version_dot_service =`` + [version][google.cloud.scheduler.v1beta1.AppEngineRouting.version] + ``+ '.' +`` + [service][google.cloud.scheduler.v1beta1.AppEngineRouting.service] + + - ``instance =`` + [instance][google.cloud.scheduler.v1beta1.AppEngineRouting.instance] + + - ``instance_dot_service =`` + [instance][google.cloud.scheduler.v1beta1.AppEngineRouting.instance] + ``+ '.' +`` + [service][google.cloud.scheduler.v1beta1.AppEngineRouting.service] + + - ``instance_dot_version =`` + [instance][google.cloud.scheduler.v1beta1.AppEngineRouting.instance] + ``+ '.' +`` + [version][google.cloud.scheduler.v1beta1.AppEngineRouting.version] + + - ``instance_dot_version_dot_service =`` + [instance][google.cloud.scheduler.v1beta1.AppEngineRouting.instance] + ``+ '.' +`` + [version][google.cloud.scheduler.v1beta1.AppEngineRouting.version] + ``+ '.' +`` + [service][google.cloud.scheduler.v1beta1.AppEngineRouting.service] + + If + [service][google.cloud.scheduler.v1beta1.AppEngineRouting.service] + is empty, then the job will be sent to the service which is + the default service when the job is attempted. + + If + [version][google.cloud.scheduler.v1beta1.AppEngineRouting.version] + is empty, then the job will be sent to the version which is + the default version when the job is attempted. + + If + [instance][google.cloud.scheduler.v1beta1.AppEngineRouting.instance] + is empty, then the job will be sent to an instance which is + available when the job is attempted. + + If + [service][google.cloud.scheduler.v1beta1.AppEngineRouting.service], + [version][google.cloud.scheduler.v1beta1.AppEngineRouting.version], + or + [instance][google.cloud.scheduler.v1beta1.AppEngineRouting.instance] + is invalid, then the job will be sent to the default version + of the default service when the job is attempted. + """ + + service: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + instance: str = proto.Field( + proto.STRING, + number=3, + ) + host: str = proto.Field( + proto.STRING, + number=4, + ) + + +class OAuthToken(proto.Message): + r"""Contains information needed for generating an `OAuth + token `__. + This type of authorization should generally only be used when + calling Google APIs hosted on \*.googleapis.com. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OAuth token. The service account + must be within the same project as the job. The caller must + have iam.serviceAccounts.actAs permission for the service + account. + scope (str): + OAuth scope to be used for generating OAuth + access token. If not specified, + "https://www.googleapis.com/auth/cloud-platform" + will be used. + """ + + service_account_email: str = proto.Field( + proto.STRING, + number=1, + ) + scope: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OidcToken(proto.Message): + r"""Contains information needed for generating an `OpenID Connect + token `__. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the + token yourself. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OIDC token. The service account + must be within the same project as the job. The caller must + have iam.serviceAccounts.actAs permission for the service + account. + audience (str): + Audience to be used when generating OIDC + token. If not specified, the URI specified in + target will be used. + """ + + service_account_email: str = proto.Field( + proto.STRING, + number=1, + ) + audience: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-scheduler/mypy.ini b/packages/google-cloud-scheduler/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-scheduler/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-scheduler/noxfile.py b/packages/google-cloud-scheduler/noxfile.py new file mode 100644 index 000000000000..be54712bfa8f --- /dev/null +++ b/packages/google-cloud-scheduler/noxfile.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-scheduler/renovate.json b/packages/google-cloud-scheduler/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-scheduler/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-scheduler/samples/AUTHORING_GUIDE.md b/packages/google-cloud-scheduler/samples/AUTHORING_GUIDE.md new file mode 100644 index 000000000000..55c97b32f4c1 --- /dev/null +++ b/packages/google-cloud-scheduler/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/google-cloud-scheduler/samples/CONTRIBUTING.md b/packages/google-cloud-scheduler/samples/CONTRIBUTING.md new file mode 100644 index 000000000000..34c882b6f1a3 --- /dev/null +++ b/packages/google-cloud-scheduler/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/packages/google-cloud-scheduler/samples/snippets/README.md b/packages/google-cloud-scheduler/samples/snippets/README.md new file mode 100644 index 000000000000..643674ca7f50 --- /dev/null +++ b/packages/google-cloud-scheduler/samples/snippets/README.md @@ -0,0 +1,4 @@ +Samples migrated +================ + +The samples have moved to a new location: https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/scheduler/snippets diff --git a/packages/google-cloud-scheduler/scheduler-v1beta1-py.tar.gz b/packages/google-cloud-scheduler/scheduler-v1beta1-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-scheduler/scripts/decrypt-secrets.sh b/packages/google-cloud-scheduler/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-scheduler/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-scheduler/scripts/fixup_scheduler_v1_keywords.py b/packages/google-cloud-scheduler/scripts/fixup_scheduler_v1_keywords.py new file mode 100644 index 000000000000..5768b9d44bf4 --- /dev/null +++ b/packages/google-cloud-scheduler/scripts/fixup_scheduler_v1_keywords.py @@ -0,0 +1,183 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class schedulerCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_job': ('parent', 'job', ), + 'delete_job': ('name', ), + 'get_job': ('name', ), + 'list_jobs': ('parent', 'page_size', 'page_token', ), + 'pause_job': ('name', ), + 'resume_job': ('name', ), + 'run_job': ('name', ), + 'update_job': ('job', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=schedulerCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the scheduler client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-scheduler/scripts/fixup_scheduler_v1beta1_keywords.py b/packages/google-cloud-scheduler/scripts/fixup_scheduler_v1beta1_keywords.py new file mode 100644 index 000000000000..955c0956e71d --- /dev/null +++ b/packages/google-cloud-scheduler/scripts/fixup_scheduler_v1beta1_keywords.py @@ -0,0 +1,183 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class schedulerCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_job': ('parent', 'job', ), + 'delete_job': ('name', 'legacy_app_engine_cron', ), + 'get_job': ('name', ), + 'list_jobs': ('parent', 'filter', 'page_size', 'page_token', 'legacy_app_engine_cron', ), + 'pause_job': ('name', ), + 'resume_job': ('name', ), + 'run_job': ('name', 'legacy_app_engine_cron', ), + 'update_job': ('job', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=schedulerCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the scheduler client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-scheduler/scripts/readme-gen/readme_gen.py b/packages/google-cloud-scheduler/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..1acc119835b5 --- /dev/null +++ b/packages/google-cloud-scheduler/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-scheduler/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-scheduler/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-scheduler/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-scheduler/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-scheduler/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-scheduler/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-scheduler/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-scheduler/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-scheduler/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-scheduler/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-scheduler/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-scheduler/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-scheduler/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-scheduler/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-scheduler/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-scheduler/setup.cfg b/packages/google-cloud-scheduler/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-scheduler/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-scheduler/setup.py b/packages/google-cloud-scheduler/setup.py new file mode 100644 index 000000000000..96157fb5f76d --- /dev/null +++ b/packages/google-cloud-scheduler/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-scheduler" + + +description = "Google Cloud Scheduler API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/scheduler/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-scheduler/testing/.gitignore b/packages/google-cloud-scheduler/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-scheduler/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-scheduler/testing/constraints-3.10.txt b/packages/google-cloud-scheduler/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-scheduler/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-scheduler/testing/constraints-3.11.txt b/packages/google-cloud-scheduler/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-scheduler/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-scheduler/testing/constraints-3.12.txt b/packages/google-cloud-scheduler/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-scheduler/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-scheduler/testing/constraints-3.7.txt b/packages/google-cloud-scheduler/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-scheduler/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-scheduler/testing/constraints-3.8.txt b/packages/google-cloud-scheduler/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-scheduler/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-scheduler/testing/constraints-3.9.txt b/packages/google-cloud-scheduler/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-scheduler/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-scheduler/tests/__init__.py b/packages/google-cloud-scheduler/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-scheduler/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-scheduler/tests/system/__init__.py b/packages/google-cloud-scheduler/tests/system/__init__.py new file mode 100644 index 000000000000..6cfb48f4cdf8 --- /dev/null +++ b/packages/google-cloud-scheduler/tests/system/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-scheduler/tests/system/smoke_test.py b/packages/google-cloud-scheduler/tests/system/smoke_test.py new file mode 100644 index 000000000000..73d2e746903b --- /dev/null +++ b/packages/google-cloud-scheduler/tests/system/smoke_test.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + +from google.cloud import scheduler_v1 + + +@pytest.fixture(scope="session") +def project_id(): + return os.environ["PROJECT_ID"] + + +@pytest.mark.parametrize("transport", ["grpc", "rest"]) +def test_list_jobs(project_id: str, transport: str): + client = scheduler_v1.CloudSchedulerClient(transport=transport) + + parent = client.common_location_path(project_id, location="us-central1") + client.list_jobs(parent=parent) + + # The purpose of this smoke test is to test the communication with the API server, + # rather than API-specific functionality. + # If the smoke test fails, we won't reach this line. + assert True diff --git a/packages/google-cloud-scheduler/tests/unit/__init__.py b/packages/google-cloud-scheduler/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-scheduler/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-scheduler/tests/unit/gapic/__init__.py b/packages/google-cloud-scheduler/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-scheduler/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1/__init__.py b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py new file mode 100644 index 000000000000..736ee526ce85 --- /dev/null +++ b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1/test_cloud_scheduler.py @@ -0,0 +1,6509 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.scheduler_v1.services.cloud_scheduler import ( + CloudSchedulerAsyncClient, + CloudSchedulerClient, + pagers, + transports, +) +from google.cloud.scheduler_v1.types import cloudscheduler +from google.cloud.scheduler_v1.types import job +from google.cloud.scheduler_v1.types import job as gcs_job +from google.cloud.scheduler_v1.types import target + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudSchedulerClient._get_default_mtls_endpoint(None) is None + assert ( + CloudSchedulerClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudSchedulerClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudSchedulerClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudSchedulerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudSchedulerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudSchedulerClient, "grpc"), + (CloudSchedulerAsyncClient, "grpc_asyncio"), + (CloudSchedulerClient, "rest"), + ], +) +def test_cloud_scheduler_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudscheduler.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudscheduler.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CloudSchedulerGrpcTransport, "grpc"), + (transports.CloudSchedulerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudSchedulerRestTransport, "rest"), + ], +) +def test_cloud_scheduler_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudSchedulerClient, "grpc"), + (CloudSchedulerAsyncClient, "grpc_asyncio"), + (CloudSchedulerClient, "rest"), + ], +) +def test_cloud_scheduler_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudscheduler.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudscheduler.googleapis.com" + ) + + +def test_cloud_scheduler_client_get_transport_class(): + transport = CloudSchedulerClient.get_transport_class() + available_transports = [ + transports.CloudSchedulerGrpcTransport, + transports.CloudSchedulerRestTransport, + ] + assert transport in available_transports + + transport = CloudSchedulerClient.get_transport_class("grpc") + assert transport == transports.CloudSchedulerGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudSchedulerClient, transports.CloudSchedulerGrpcTransport, "grpc"), + ( + CloudSchedulerAsyncClient, + transports.CloudSchedulerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudSchedulerClient, transports.CloudSchedulerRestTransport, "rest"), + ], +) +@mock.patch.object( + CloudSchedulerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudSchedulerClient), +) +@mock.patch.object( + CloudSchedulerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudSchedulerAsyncClient), +) +def test_cloud_scheduler_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudSchedulerClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudSchedulerClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CloudSchedulerClient, transports.CloudSchedulerGrpcTransport, "grpc", "true"), + ( + CloudSchedulerAsyncClient, + transports.CloudSchedulerGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CloudSchedulerClient, transports.CloudSchedulerGrpcTransport, "grpc", "false"), + ( + CloudSchedulerAsyncClient, + transports.CloudSchedulerGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (CloudSchedulerClient, transports.CloudSchedulerRestTransport, "rest", "true"), + (CloudSchedulerClient, transports.CloudSchedulerRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + CloudSchedulerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudSchedulerClient), +) +@mock.patch.object( + CloudSchedulerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudSchedulerAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_scheduler_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [CloudSchedulerClient, CloudSchedulerAsyncClient] +) +@mock.patch.object( + CloudSchedulerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudSchedulerClient), +) +@mock.patch.object( + CloudSchedulerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudSchedulerAsyncClient), +) +def test_cloud_scheduler_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudSchedulerClient, transports.CloudSchedulerGrpcTransport, "grpc"), + ( + CloudSchedulerAsyncClient, + transports.CloudSchedulerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudSchedulerClient, transports.CloudSchedulerRestTransport, "rest"), + ], +) +def test_cloud_scheduler_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CloudSchedulerClient, + transports.CloudSchedulerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CloudSchedulerAsyncClient, + transports.CloudSchedulerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (CloudSchedulerClient, transports.CloudSchedulerRestTransport, "rest", None), + ], +) +def test_cloud_scheduler_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_cloud_scheduler_client_client_options_from_dict(): + with mock.patch( + "google.cloud.scheduler_v1.services.cloud_scheduler.transports.CloudSchedulerGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudSchedulerClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CloudSchedulerClient, + transports.CloudSchedulerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CloudSchedulerAsyncClient, + transports.CloudSchedulerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cloud_scheduler_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudscheduler.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="cloudscheduler.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.ListJobsRequest, + dict, + ], +) +def test_list_jobs(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudscheduler.ListJobsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + client.list_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.ListJobsRequest() + + +@pytest.mark.asyncio +async def test_list_jobs_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.ListJobsRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudscheduler.ListJobsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_jobs_async_from_dict(): + await test_list_jobs_async(request_type=dict) + + +def test_list_jobs_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.ListJobsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value = cloudscheduler.ListJobsResponse() + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_jobs_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.ListJobsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudscheduler.ListJobsResponse() + ) + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_jobs_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudscheduler.ListJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_jobs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_jobs_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + cloudscheduler.ListJobsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_jobs_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudscheduler.ListJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudscheduler.ListJobsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_jobs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_jobs_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_jobs( + cloudscheduler.ListJobsRequest(), + parent="parent_value", + ) + + +def test_list_jobs_pager(transport_name: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + cloudscheduler.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_jobs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, job.Job) for i in results) + + +def test_list_jobs_pages(transport_name: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + cloudscheduler.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, + ) + pages = list(client.list_jobs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_jobs_async_pager(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + cloudscheduler.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_jobs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, job.Job) for i in responses) + + +@pytest.mark.asyncio +async def test_list_jobs_async_pages(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + cloudscheduler.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.GetJobRequest, + dict, + ], +) +def test_get_job(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + ) + response = client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.GetJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + + +def test_get_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + client.get_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.GetJobRequest() + + +@pytest.mark.asyncio +async def test_get_job_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.GetJobRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + ) + ) + response = await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.GetJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + + +@pytest.mark.asyncio +async def test_get_job_async_from_dict(): + await test_get_job_async(request_type=dict) + + +def test_get_job_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.GetJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + call.return_value = job.Job() + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_job_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.GetJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_job_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_job_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + cloudscheduler.GetJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_job_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_job_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job( + cloudscheduler.GetJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.CreateJobRequest, + dict, + ], +) +def test_create_job(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=gcs_job.Job.State.ENABLED, + ) + response = client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.CreateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == gcs_job.Job.State.ENABLED + + +def test_create_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_job), "__call__") as call: + client.create_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.CreateJobRequest() + + +@pytest.mark.asyncio +async def test_create_job_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.CreateJobRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcs_job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=gcs_job.Job.State.ENABLED, + ) + ) + response = await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.CreateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == gcs_job.Job.State.ENABLED + + +@pytest.mark.asyncio +async def test_create_job_async_from_dict(): + await test_create_job_async(request_type=dict) + + +def test_create_job_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.CreateJobRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_job), "__call__") as call: + call.return_value = gcs_job.Job() + client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_job_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.CreateJobRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcs_job.Job()) + await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_job_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_job.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job( + parent="parent_value", + job=gcs_job.Job(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].job + mock_val = gcs_job.Job(name="name_value") + assert arg == mock_val + + +def test_create_job_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job( + cloudscheduler.CreateJobRequest(), + parent="parent_value", + job=gcs_job.Job(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_job_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_job.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcs_job.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job( + parent="parent_value", + job=gcs_job.Job(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].job + mock_val = gcs_job.Job(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_job_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job( + cloudscheduler.CreateJobRequest(), + parent="parent_value", + job=gcs_job.Job(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.UpdateJobRequest, + dict, + ], +) +def test_update_job(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=gcs_job.Job.State.ENABLED, + ) + response = client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.UpdateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == gcs_job.Job.State.ENABLED + + +def test_update_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + client.update_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.UpdateJobRequest() + + +@pytest.mark.asyncio +async def test_update_job_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.UpdateJobRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcs_job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=gcs_job.Job.State.ENABLED, + ) + ) + response = await client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.UpdateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == gcs_job.Job.State.ENABLED + + +@pytest.mark.asyncio +async def test_update_job_async_from_dict(): + await test_update_job_async(request_type=dict) + + +def test_update_job_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.UpdateJobRequest() + + request.job.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + call.return_value = gcs_job.Job() + client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "job.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_job_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.UpdateJobRequest() + + request.job.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcs_job.Job()) + await client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "job.name=name_value", + ) in kw["metadata"] + + +def test_update_job_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_job.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_job( + job=gcs_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].job + mock_val = gcs_job.Job(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_job_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job( + cloudscheduler.UpdateJobRequest(), + job=gcs_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_job_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_job.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcs_job.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_job( + job=gcs_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].job + mock_val = gcs_job.Job(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_job_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_job( + cloudscheduler.UpdateJobRequest(), + job=gcs_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.DeleteJobRequest, + dict, + ], +) +def test_delete_job(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.DeleteJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + client.delete_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.DeleteJobRequest() + + +@pytest.mark.asyncio +async def test_delete_job_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.DeleteJobRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.DeleteJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_async_from_dict(): + await test_delete_job_async(request_type=dict) + + +def test_delete_job_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.DeleteJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + call.return_value = None + client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_job_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.DeleteJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_job_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_job_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job( + cloudscheduler.DeleteJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_job_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_job_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job( + cloudscheduler.DeleteJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.PauseJobRequest, + dict, + ], +) +def test_pause_job(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + ) + response = client.pause_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.PauseJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + + +def test_pause_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_job), "__call__") as call: + client.pause_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.PauseJobRequest() + + +@pytest.mark.asyncio +async def test_pause_job_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.PauseJobRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + ) + ) + response = await client.pause_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.PauseJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + + +@pytest.mark.asyncio +async def test_pause_job_async_from_dict(): + await test_pause_job_async(request_type=dict) + + +def test_pause_job_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.PauseJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_job), "__call__") as call: + call.return_value = job.Job() + client.pause_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_pause_job_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.PauseJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + await client.pause_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_pause_job_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pause_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_pause_job_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_job( + cloudscheduler.PauseJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_pause_job_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pause_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_pause_job_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pause_job( + cloudscheduler.PauseJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.ResumeJobRequest, + dict, + ], +) +def test_resume_job(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + ) + response = client.resume_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.ResumeJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + + +def test_resume_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_job), "__call__") as call: + client.resume_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.ResumeJobRequest() + + +@pytest.mark.asyncio +async def test_resume_job_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.ResumeJobRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + ) + ) + response = await client.resume_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.ResumeJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + + +@pytest.mark.asyncio +async def test_resume_job_async_from_dict(): + await test_resume_job_async(request_type=dict) + + +def test_resume_job_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.ResumeJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_job), "__call__") as call: + call.return_value = job.Job() + client.resume_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_resume_job_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.ResumeJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + await client.resume_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_resume_job_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resume_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_resume_job_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_job( + cloudscheduler.ResumeJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_resume_job_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resume_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_resume_job_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resume_job( + cloudscheduler.ResumeJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.RunJobRequest, + dict, + ], +) +def test_run_job(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + ) + response = client.run_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.RunJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + + +def test_run_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_job), "__call__") as call: + client.run_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.RunJobRequest() + + +@pytest.mark.asyncio +async def test_run_job_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.RunJobRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + ) + ) + response = await client.run_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.RunJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + + +@pytest.mark.asyncio +async def test_run_job_async_from_dict(): + await test_run_job_async(request_type=dict) + + +def test_run_job_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.RunJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_job), "__call__") as call: + call.return_value = job.Job() + client.run_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_run_job_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.RunJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + await client.run_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_run_job_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_run_job_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_job( + cloudscheduler.RunJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_run_job_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_run_job_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_job( + cloudscheduler.RunJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.ListJobsRequest, + dict, + ], +) +def test_list_jobs_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudscheduler.ListJobsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudscheduler.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_jobs_rest_required_fields(request_type=cloudscheduler.ListJobsRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudscheduler.ListJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudscheduler.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_jobs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_jobs_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_jobs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_jobs_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "post_list_jobs" + ) as post, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_list_jobs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudscheduler.ListJobsRequest.pb(cloudscheduler.ListJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudscheduler.ListJobsResponse.to_json( + cloudscheduler.ListJobsResponse() + ) + + request = cloudscheduler.ListJobsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudscheduler.ListJobsResponse() + + client.list_jobs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_jobs_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.ListJobsRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_jobs(request) + + +def test_list_jobs_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudscheduler.ListJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudscheduler.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/jobs" % client.transport._host, + args[1], + ) + + +def test_list_jobs_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + cloudscheduler.ListJobsRequest(), + parent="parent_value", + ) + + +def test_list_jobs_rest_pager(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + cloudscheduler.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudscheduler.ListJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, job.Job) for i in results) + + pages = list(client.list_jobs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.GetJobRequest, + dict, + ], +) +def test_get_job_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + + +def test_get_job_rest_required_fields(request_type=cloudscheduler.GetJobRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = job.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_job_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "post_get_job" + ) as post, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_get_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudscheduler.GetJobRequest.pb(cloudscheduler.GetJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = job.Job.to_json(job.Job()) + + request = cloudscheduler.GetJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = job.Job() + + client.get_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.GetJobRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job(request) + + +def test_get_job_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/jobs/*}" % client.transport._host, + args[1], + ) + + +def test_get_job_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + cloudscheduler.GetJobRequest(), + name="name_value", + ) + + +def test_get_job_rest_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.CreateJobRequest, + dict, + ], +) +def test_create_job_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["job"] = { + "name": "name_value", + "description": "description_value", + "pubsub_target": { + "topic_name": "topic_name_value", + "data": b"data_blob", + "attributes": {}, + }, + "app_engine_http_target": { + "http_method": 1, + "app_engine_routing": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + }, + "relative_uri": "relative_uri_value", + "headers": {}, + "body": b"body_blob", + }, + "http_target": { + "uri": "uri_value", + "http_method": 1, + "headers": {}, + "body": b"body_blob", + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "schedule": "schedule_value", + "time_zone": "time_zone_value", + "user_update_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "schedule_time": {}, + "last_attempt_time": {}, + "retry_config": { + "retry_count": 1214, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff_duration": {}, + "max_backoff_duration": {}, + "max_doublings": 1388, + }, + "attempt_deadline": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcs_job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=gcs_job.Job.State.ENABLED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcs_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == gcs_job.Job.State.ENABLED + + +def test_create_job_rest_required_fields(request_type=cloudscheduler.CreateJobRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcs_job.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcs_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_job_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_job._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "job", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "post_create_job" + ) as post, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_create_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudscheduler.CreateJobRequest.pb( + cloudscheduler.CreateJobRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcs_job.Job.to_json(gcs_job.Job()) + + request = cloudscheduler.CreateJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcs_job.Job() + + client.create_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.CreateJobRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["job"] = { + "name": "name_value", + "description": "description_value", + "pubsub_target": { + "topic_name": "topic_name_value", + "data": b"data_blob", + "attributes": {}, + }, + "app_engine_http_target": { + "http_method": 1, + "app_engine_routing": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + }, + "relative_uri": "relative_uri_value", + "headers": {}, + "body": b"body_blob", + }, + "http_target": { + "uri": "uri_value", + "http_method": 1, + "headers": {}, + "body": b"body_blob", + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "schedule": "schedule_value", + "time_zone": "time_zone_value", + "user_update_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "schedule_time": {}, + "last_attempt_time": {}, + "retry_config": { + "retry_count": 1214, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff_duration": {}, + "max_backoff_duration": {}, + "max_doublings": 1388, + }, + "attempt_deadline": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job(request) + + +def test_create_job_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcs_job.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + job=gcs_job.Job(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcs_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/jobs" % client.transport._host, + args[1], + ) + + +def test_create_job_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job( + cloudscheduler.CreateJobRequest(), + parent="parent_value", + job=gcs_job.Job(name="name_value"), + ) + + +def test_create_job_rest_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.UpdateJobRequest, + dict, + ], +) +def test_update_job_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"job": {"name": "projects/sample1/locations/sample2/jobs/sample3"}} + request_init["job"] = { + "name": "projects/sample1/locations/sample2/jobs/sample3", + "description": "description_value", + "pubsub_target": { + "topic_name": "topic_name_value", + "data": b"data_blob", + "attributes": {}, + }, + "app_engine_http_target": { + "http_method": 1, + "app_engine_routing": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + }, + "relative_uri": "relative_uri_value", + "headers": {}, + "body": b"body_blob", + }, + "http_target": { + "uri": "uri_value", + "http_method": 1, + "headers": {}, + "body": b"body_blob", + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "schedule": "schedule_value", + "time_zone": "time_zone_value", + "user_update_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "schedule_time": {}, + "last_attempt_time": {}, + "retry_config": { + "retry_count": 1214, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff_duration": {}, + "max_backoff_duration": {}, + "max_doublings": 1388, + }, + "attempt_deadline": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcs_job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=gcs_job.Job.State.ENABLED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcs_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == gcs_job.Job.State.ENABLED + + +def test_update_job_rest_required_fields(request_type=cloudscheduler.UpdateJobRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_job._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcs_job.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcs_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_job_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("job",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_job_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "post_update_job" + ) as post, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_update_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudscheduler.UpdateJobRequest.pb( + cloudscheduler.UpdateJobRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcs_job.Job.to_json(gcs_job.Job()) + + request = cloudscheduler.UpdateJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcs_job.Job() + + client.update_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_job_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.UpdateJobRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"job": {"name": "projects/sample1/locations/sample2/jobs/sample3"}} + request_init["job"] = { + "name": "projects/sample1/locations/sample2/jobs/sample3", + "description": "description_value", + "pubsub_target": { + "topic_name": "topic_name_value", + "data": b"data_blob", + "attributes": {}, + }, + "app_engine_http_target": { + "http_method": 1, + "app_engine_routing": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + }, + "relative_uri": "relative_uri_value", + "headers": {}, + "body": b"body_blob", + }, + "http_target": { + "uri": "uri_value", + "http_method": 1, + "headers": {}, + "body": b"body_blob", + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "schedule": "schedule_value", + "time_zone": "time_zone_value", + "user_update_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "schedule_time": {}, + "last_attempt_time": {}, + "retry_config": { + "retry_count": 1214, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff_duration": {}, + "max_backoff_duration": {}, + "max_doublings": 1388, + }, + "attempt_deadline": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_job(request) + + +def test_update_job_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcs_job.Job() + + # get arguments that satisfy an http rule for this method + sample_request = { + "job": {"name": "projects/sample1/locations/sample2/jobs/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + job=gcs_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcs_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{job.name=projects/*/locations/*/jobs/*}" % client.transport._host, + args[1], + ) + + +def test_update_job_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job( + cloudscheduler.UpdateJobRequest(), + job=gcs_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_job_rest_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.DeleteJobRequest, + dict, + ], +) +def test_delete_job_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_rest_required_fields(request_type=cloudscheduler.DeleteJobRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_job_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_job_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_delete_job" + ) as pre: + pre.assert_not_called() + pb_message = cloudscheduler.DeleteJobRequest.pb( + cloudscheduler.DeleteJobRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cloudscheduler.DeleteJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_job_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.DeleteJobRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_job(request) + + +def test_delete_job_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/jobs/*}" % client.transport._host, + args[1], + ) + + +def test_delete_job_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job( + cloudscheduler.DeleteJobRequest(), + name="name_value", + ) + + +def test_delete_job_rest_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.PauseJobRequest, + dict, + ], +) +def test_pause_job_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.pause_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + + +def test_pause_job_rest_required_fields(request_type=cloudscheduler.PauseJobRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pause_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pause_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = job.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.pause_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_pause_job_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.pause_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_pause_job_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "post_pause_job" + ) as post, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_pause_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudscheduler.PauseJobRequest.pb(cloudscheduler.PauseJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = job.Job.to_json(job.Job()) + + request = cloudscheduler.PauseJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = job.Job() + + client.pause_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_pause_job_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.PauseJobRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.pause_job(request) + + +def test_pause_job_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.pause_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/jobs/*}:pause" % client.transport._host, + args[1], + ) + + +def test_pause_job_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_job( + cloudscheduler.PauseJobRequest(), + name="name_value", + ) + + +def test_pause_job_rest_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.ResumeJobRequest, + dict, + ], +) +def test_resume_job_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.resume_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + + +def test_resume_job_rest_required_fields(request_type=cloudscheduler.ResumeJobRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resume_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resume_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = job.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.resume_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_resume_job_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.resume_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resume_job_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "post_resume_job" + ) as post, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_resume_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudscheduler.ResumeJobRequest.pb( + cloudscheduler.ResumeJobRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = job.Job.to_json(job.Job()) + + request = cloudscheduler.ResumeJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = job.Job() + + client.resume_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resume_job_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.ResumeJobRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resume_job(request) + + +def test_resume_job_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.resume_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/jobs/*}:resume" + % client.transport._host, + args[1], + ) + + +def test_resume_job_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_job( + cloudscheduler.ResumeJobRequest(), + name="name_value", + ) + + +def test_resume_job_rest_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.RunJobRequest, + dict, + ], +) +def test_run_job_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + + +def test_run_job_rest_required_fields(request_type=cloudscheduler.RunJobRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = job.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.run_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_run_job_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.run_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_job_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "post_run_job" + ) as post, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_run_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudscheduler.RunJobRequest.pb(cloudscheduler.RunJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = job.Job.to_json(job.Job()) + + request = cloudscheduler.RunJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = job.Job() + + client.run_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_job_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.RunJobRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_job(request) + + +def test_run_job_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.run_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/jobs/*}:run" % client.transport._host, + args[1], + ) + + +def test_run_job_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_job( + cloudscheduler.RunJobRequest(), + name="name_value", + ) + + +def test_run_job_rest_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudSchedulerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudSchedulerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudSchedulerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudSchedulerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudSchedulerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudSchedulerClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudSchedulerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudSchedulerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudSchedulerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudSchedulerClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudSchedulerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudSchedulerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudSchedulerGrpcTransport, + transports.CloudSchedulerGrpcAsyncIOTransport, + transports.CloudSchedulerRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CloudSchedulerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudSchedulerGrpcTransport, + ) + + +def test_cloud_scheduler_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudSchedulerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_scheduler_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.scheduler_v1.services.cloud_scheduler.transports.CloudSchedulerTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudSchedulerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_jobs", + "get_job", + "create_job", + "update_job", + "delete_job", + "pause_job", + "resume_job", + "run_job", + "get_location", + "list_locations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cloud_scheduler_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.scheduler_v1.services.cloud_scheduler.transports.CloudSchedulerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudSchedulerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_scheduler_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.scheduler_v1.services.cloud_scheduler.transports.CloudSchedulerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudSchedulerTransport() + adc.assert_called_once() + + +def test_cloud_scheduler_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudSchedulerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudSchedulerGrpcTransport, + transports.CloudSchedulerGrpcAsyncIOTransport, + ], +) +def test_cloud_scheduler_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudSchedulerGrpcTransport, + transports.CloudSchedulerGrpcAsyncIOTransport, + transports.CloudSchedulerRestTransport, + ], +) +def test_cloud_scheduler_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudSchedulerGrpcTransport, grpc_helpers), + (transports.CloudSchedulerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_cloud_scheduler_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "cloudscheduler.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="cloudscheduler.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudSchedulerGrpcTransport, + transports.CloudSchedulerGrpcAsyncIOTransport, + ], +) +def test_cloud_scheduler_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_cloud_scheduler_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CloudSchedulerRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cloud_scheduler_host_no_port(transport_name): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudscheduler.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudscheduler.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudscheduler.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cloud_scheduler_host_with_port(transport_name): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudscheduler.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudscheduler.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudscheduler.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_cloud_scheduler_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudSchedulerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudSchedulerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_jobs._session + session2 = client2.transport.list_jobs._session + assert session1 != session2 + session1 = client1.transport.get_job._session + session2 = client2.transport.get_job._session + assert session1 != session2 + session1 = client1.transport.create_job._session + session2 = client2.transport.create_job._session + assert session1 != session2 + session1 = client1.transport.update_job._session + session2 = client2.transport.update_job._session + assert session1 != session2 + session1 = client1.transport.delete_job._session + session2 = client2.transport.delete_job._session + assert session1 != session2 + session1 = client1.transport.pause_job._session + session2 = client2.transport.pause_job._session + assert session1 != session2 + session1 = client1.transport.resume_job._session + session2 = client2.transport.resume_job._session + assert session1 != session2 + session1 = client1.transport.run_job._session + session2 = client2.transport.run_job._session + assert session1 != session2 + + +def test_cloud_scheduler_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudSchedulerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_scheduler_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudSchedulerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudSchedulerGrpcTransport, + transports.CloudSchedulerGrpcAsyncIOTransport, + ], +) +def test_cloud_scheduler_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudSchedulerGrpcTransport, + transports.CloudSchedulerGrpcAsyncIOTransport, + ], +) +def test_cloud_scheduler_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_job_path(): + project = "squid" + location = "clam" + job = "whelk" + expected = "projects/{project}/locations/{location}/jobs/{job}".format( + project=project, + location=location, + job=job, + ) + actual = CloudSchedulerClient.job_path(project, location, job) + assert expected == actual + + +def test_parse_job_path(): + expected = { + "project": "octopus", + "location": "oyster", + "job": "nudibranch", + } + path = CloudSchedulerClient.job_path(**expected) + + # Check that the path construction is reversible. + actual = CloudSchedulerClient.parse_job_path(path) + assert expected == actual + + +def test_topic_path(): + project = "cuttlefish" + topic = "mussel" + expected = "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) + actual = CloudSchedulerClient.topic_path(project, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "winkle", + "topic": "nautilus", + } + path = CloudSchedulerClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = CloudSchedulerClient.parse_topic_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CloudSchedulerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = CloudSchedulerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudSchedulerClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = CloudSchedulerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = CloudSchedulerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudSchedulerClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = CloudSchedulerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = CloudSchedulerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudSchedulerClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = CloudSchedulerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = CloudSchedulerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudSchedulerClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = CloudSchedulerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = CloudSchedulerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudSchedulerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudSchedulerTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudSchedulerTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudSchedulerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations(transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = CloudSchedulerClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CloudSchedulerClient, transports.CloudSchedulerGrpcTransport), + (CloudSchedulerAsyncClient, transports.CloudSchedulerGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1beta1/__init__.py b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1beta1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1beta1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py new file mode 100644 index 000000000000..c8b137754412 --- /dev/null +++ b/packages/google-cloud-scheduler/tests/unit/gapic/scheduler_v1beta1/test_cloud_scheduler.py @@ -0,0 +1,6558 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.scheduler_v1beta1.services.cloud_scheduler import ( + CloudSchedulerAsyncClient, + CloudSchedulerClient, + pagers, + transports, +) +from google.cloud.scheduler_v1beta1.types import cloudscheduler +from google.cloud.scheduler_v1beta1.types import job +from google.cloud.scheduler_v1beta1.types import job as gcs_job +from google.cloud.scheduler_v1beta1.types import target + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudSchedulerClient._get_default_mtls_endpoint(None) is None + assert ( + CloudSchedulerClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudSchedulerClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudSchedulerClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudSchedulerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudSchedulerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudSchedulerClient, "grpc"), + (CloudSchedulerAsyncClient, "grpc_asyncio"), + (CloudSchedulerClient, "rest"), + ], +) +def test_cloud_scheduler_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudscheduler.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudscheduler.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CloudSchedulerGrpcTransport, "grpc"), + (transports.CloudSchedulerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudSchedulerRestTransport, "rest"), + ], +) +def test_cloud_scheduler_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudSchedulerClient, "grpc"), + (CloudSchedulerAsyncClient, "grpc_asyncio"), + (CloudSchedulerClient, "rest"), + ], +) +def test_cloud_scheduler_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudscheduler.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudscheduler.googleapis.com" + ) + + +def test_cloud_scheduler_client_get_transport_class(): + transport = CloudSchedulerClient.get_transport_class() + available_transports = [ + transports.CloudSchedulerGrpcTransport, + transports.CloudSchedulerRestTransport, + ] + assert transport in available_transports + + transport = CloudSchedulerClient.get_transport_class("grpc") + assert transport == transports.CloudSchedulerGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudSchedulerClient, transports.CloudSchedulerGrpcTransport, "grpc"), + ( + CloudSchedulerAsyncClient, + transports.CloudSchedulerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudSchedulerClient, transports.CloudSchedulerRestTransport, "rest"), + ], +) +@mock.patch.object( + CloudSchedulerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudSchedulerClient), +) +@mock.patch.object( + CloudSchedulerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudSchedulerAsyncClient), +) +def test_cloud_scheduler_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudSchedulerClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudSchedulerClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CloudSchedulerClient, transports.CloudSchedulerGrpcTransport, "grpc", "true"), + ( + CloudSchedulerAsyncClient, + transports.CloudSchedulerGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CloudSchedulerClient, transports.CloudSchedulerGrpcTransport, "grpc", "false"), + ( + CloudSchedulerAsyncClient, + transports.CloudSchedulerGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (CloudSchedulerClient, transports.CloudSchedulerRestTransport, "rest", "true"), + (CloudSchedulerClient, transports.CloudSchedulerRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + CloudSchedulerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudSchedulerClient), +) +@mock.patch.object( + CloudSchedulerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudSchedulerAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_scheduler_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [CloudSchedulerClient, CloudSchedulerAsyncClient] +) +@mock.patch.object( + CloudSchedulerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudSchedulerClient), +) +@mock.patch.object( + CloudSchedulerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudSchedulerAsyncClient), +) +def test_cloud_scheduler_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudSchedulerClient, transports.CloudSchedulerGrpcTransport, "grpc"), + ( + CloudSchedulerAsyncClient, + transports.CloudSchedulerGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudSchedulerClient, transports.CloudSchedulerRestTransport, "rest"), + ], +) +def test_cloud_scheduler_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CloudSchedulerClient, + transports.CloudSchedulerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CloudSchedulerAsyncClient, + transports.CloudSchedulerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (CloudSchedulerClient, transports.CloudSchedulerRestTransport, "rest", None), + ], +) +def test_cloud_scheduler_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_cloud_scheduler_client_client_options_from_dict(): + with mock.patch( + "google.cloud.scheduler_v1beta1.services.cloud_scheduler.transports.CloudSchedulerGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudSchedulerClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CloudSchedulerClient, + transports.CloudSchedulerGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CloudSchedulerAsyncClient, + transports.CloudSchedulerGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cloud_scheduler_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudscheduler.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="cloudscheduler.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.ListJobsRequest, + dict, + ], +) +def test_list_jobs(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudscheduler.ListJobsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + client.list_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.ListJobsRequest() + + +@pytest.mark.asyncio +async def test_list_jobs_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.ListJobsRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudscheduler.ListJobsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_jobs_async_from_dict(): + await test_list_jobs_async(request_type=dict) + + +def test_list_jobs_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.ListJobsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value = cloudscheduler.ListJobsResponse() + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_jobs_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.ListJobsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudscheduler.ListJobsResponse() + ) + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_jobs_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudscheduler.ListJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_jobs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_jobs_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + cloudscheduler.ListJobsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_jobs_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudscheduler.ListJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudscheduler.ListJobsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_jobs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_jobs_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_jobs( + cloudscheduler.ListJobsRequest(), + parent="parent_value", + ) + + +def test_list_jobs_pager(transport_name: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + cloudscheduler.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_jobs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, job.Job) for i in results) + + +def test_list_jobs_pages(transport_name: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + cloudscheduler.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, + ) + pages = list(client.list_jobs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_jobs_async_pager(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + cloudscheduler.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_jobs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, job.Job) for i in responses) + + +@pytest.mark.asyncio +async def test_list_jobs_async_pages(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + cloudscheduler.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_jobs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.GetJobRequest, + dict, + ], +) +def test_get_job(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + response = client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.GetJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +def test_get_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + client.get_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.GetJobRequest() + + +@pytest.mark.asyncio +async def test_get_job_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.GetJobRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + ) + response = await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.GetJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +@pytest.mark.asyncio +async def test_get_job_async_from_dict(): + await test_get_job_async(request_type=dict) + + +def test_get_job_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.GetJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + call.return_value = job.Job() + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_job_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.GetJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_job_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_job_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + cloudscheduler.GetJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_job_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_job_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job( + cloudscheduler.GetJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.CreateJobRequest, + dict, + ], +) +def test_create_job(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=gcs_job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + response = client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.CreateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == gcs_job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +def test_create_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_job), "__call__") as call: + client.create_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.CreateJobRequest() + + +@pytest.mark.asyncio +async def test_create_job_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.CreateJobRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcs_job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=gcs_job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + ) + response = await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.CreateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == gcs_job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +@pytest.mark.asyncio +async def test_create_job_async_from_dict(): + await test_create_job_async(request_type=dict) + + +def test_create_job_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.CreateJobRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_job), "__call__") as call: + call.return_value = gcs_job.Job() + client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_job_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.CreateJobRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcs_job.Job()) + await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_job_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_job.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job( + parent="parent_value", + job=gcs_job.Job(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].job + mock_val = gcs_job.Job(name="name_value") + assert arg == mock_val + + +def test_create_job_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job( + cloudscheduler.CreateJobRequest(), + parent="parent_value", + job=gcs_job.Job(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_job_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_job.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcs_job.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job( + parent="parent_value", + job=gcs_job.Job(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].job + mock_val = gcs_job.Job(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_job_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job( + cloudscheduler.CreateJobRequest(), + parent="parent_value", + job=gcs_job.Job(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.UpdateJobRequest, + dict, + ], +) +def test_update_job(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=gcs_job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + response = client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.UpdateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == gcs_job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +def test_update_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + client.update_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.UpdateJobRequest() + + +@pytest.mark.asyncio +async def test_update_job_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.UpdateJobRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcs_job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=gcs_job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + ) + response = await client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.UpdateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == gcs_job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +@pytest.mark.asyncio +async def test_update_job_async_from_dict(): + await test_update_job_async(request_type=dict) + + +def test_update_job_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.UpdateJobRequest() + + request.job.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + call.return_value = gcs_job.Job() + client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "job.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_job_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.UpdateJobRequest() + + request.job.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcs_job.Job()) + await client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "job.name=name_value", + ) in kw["metadata"] + + +def test_update_job_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_job.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_job( + job=gcs_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].job + mock_val = gcs_job.Job(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_job_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job( + cloudscheduler.UpdateJobRequest(), + job=gcs_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_job_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcs_job.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcs_job.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_job( + job=gcs_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].job + mock_val = gcs_job.Job(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_job_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_job( + cloudscheduler.UpdateJobRequest(), + job=gcs_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.DeleteJobRequest, + dict, + ], +) +def test_delete_job(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.DeleteJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + client.delete_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.DeleteJobRequest() + + +@pytest.mark.asyncio +async def test_delete_job_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.DeleteJobRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.DeleteJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_async_from_dict(): + await test_delete_job_async(request_type=dict) + + +def test_delete_job_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.DeleteJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + call.return_value = None + client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_job_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.DeleteJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_job_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_job_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job( + cloudscheduler.DeleteJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_job_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_job_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job( + cloudscheduler.DeleteJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.PauseJobRequest, + dict, + ], +) +def test_pause_job(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + response = client.pause_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.PauseJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +def test_pause_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_job), "__call__") as call: + client.pause_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.PauseJobRequest() + + +@pytest.mark.asyncio +async def test_pause_job_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.PauseJobRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + ) + response = await client.pause_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.PauseJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +@pytest.mark.asyncio +async def test_pause_job_async_from_dict(): + await test_pause_job_async(request_type=dict) + + +def test_pause_job_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.PauseJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_job), "__call__") as call: + call.return_value = job.Job() + client.pause_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_pause_job_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.PauseJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + await client.pause_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_pause_job_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pause_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_pause_job_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_job( + cloudscheduler.PauseJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_pause_job_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pause_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_pause_job_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pause_job( + cloudscheduler.PauseJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.ResumeJobRequest, + dict, + ], +) +def test_resume_job(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + response = client.resume_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.ResumeJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +def test_resume_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_job), "__call__") as call: + client.resume_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.ResumeJobRequest() + + +@pytest.mark.asyncio +async def test_resume_job_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.ResumeJobRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + ) + response = await client.resume_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.ResumeJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +@pytest.mark.asyncio +async def test_resume_job_async_from_dict(): + await test_resume_job_async(request_type=dict) + + +def test_resume_job_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.ResumeJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_job), "__call__") as call: + call.return_value = job.Job() + client.resume_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_resume_job_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.ResumeJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + await client.resume_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_resume_job_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resume_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_resume_job_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_job( + cloudscheduler.ResumeJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_resume_job_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resume_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_resume_job_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resume_job( + cloudscheduler.ResumeJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.RunJobRequest, + dict, + ], +) +def test_run_job(request_type, transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + response = client.run_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.RunJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +def test_run_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_job), "__call__") as call: + client.run_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.RunJobRequest() + + +@pytest.mark.asyncio +async def test_run_job_async( + transport: str = "grpc_asyncio", request_type=cloudscheduler.RunJobRequest +): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + ) + response = await client.run_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudscheduler.RunJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +@pytest.mark.asyncio +async def test_run_job_async_from_dict(): + await test_run_job_async(request_type=dict) + + +def test_run_job_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.RunJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_job), "__call__") as call: + call.return_value = job.Job() + client.run_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_run_job_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudscheduler.RunJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + await client.run_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_run_job_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_run_job_flattened_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_job( + cloudscheduler.RunJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_run_job_flattened_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = job.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_run_job_flattened_error_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_job( + cloudscheduler.RunJobRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.ListJobsRequest, + dict, + ], +) +def test_list_jobs_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudscheduler.ListJobsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudscheduler.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_jobs_rest_required_fields(request_type=cloudscheduler.ListJobsRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "legacy_app_engine_cron", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudscheduler.ListJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudscheduler.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_jobs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_jobs_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_jobs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "legacyAppEngineCron", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_jobs_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "post_list_jobs" + ) as post, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_list_jobs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudscheduler.ListJobsRequest.pb(cloudscheduler.ListJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudscheduler.ListJobsResponse.to_json( + cloudscheduler.ListJobsResponse() + ) + + request = cloudscheduler.ListJobsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudscheduler.ListJobsResponse() + + client.list_jobs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_jobs_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.ListJobsRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_jobs(request) + + +def test_list_jobs_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudscheduler.ListJobsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudscheduler.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_jobs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{parent=projects/*/locations/*}/jobs" % client.transport._host, + args[1], + ) + + +def test_list_jobs_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + cloudscheduler.ListJobsRequest(), + parent="parent_value", + ) + + +def test_list_jobs_rest_pager(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + job.Job(), + ], + next_page_token="abc", + ), + cloudscheduler.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + ], + next_page_token="ghi", + ), + cloudscheduler.ListJobsResponse( + jobs=[ + job.Job(), + job.Job(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudscheduler.ListJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, job.Job) for i in results) + + pages = list(client.list_jobs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.GetJobRequest, + dict, + ], +) +def test_get_job_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +def test_get_job_rest_required_fields(request_type=cloudscheduler.GetJobRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = job.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_job_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "post_get_job" + ) as post, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_get_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudscheduler.GetJobRequest.pb(cloudscheduler.GetJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = job.Job.to_json(job.Job()) + + request = cloudscheduler.GetJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = job.Job() + + client.get_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.GetJobRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job(request) + + +def test_get_job_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{name=projects/*/locations/*/jobs/*}" % client.transport._host, + args[1], + ) + + +def test_get_job_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + cloudscheduler.GetJobRequest(), + name="name_value", + ) + + +def test_get_job_rest_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.CreateJobRequest, + dict, + ], +) +def test_create_job_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["job"] = { + "name": "name_value", + "description": "description_value", + "pubsub_target": { + "topic_name": "topic_name_value", + "data": b"data_blob", + "attributes": {}, + }, + "app_engine_http_target": { + "http_method": 1, + "app_engine_routing": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + }, + "relative_uri": "relative_uri_value", + "headers": {}, + "body": b"body_blob", + }, + "http_target": { + "uri": "uri_value", + "http_method": 1, + "headers": {}, + "body": b"body_blob", + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "schedule": "schedule_value", + "time_zone": "time_zone_value", + "user_update_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "schedule_time": {}, + "last_attempt_time": {}, + "retry_config": { + "retry_count": 1214, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff_duration": {}, + "max_backoff_duration": {}, + "max_doublings": 1388, + }, + "attempt_deadline": {}, + "legacy_app_engine_cron": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcs_job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=gcs_job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcs_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == gcs_job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +def test_create_job_rest_required_fields(request_type=cloudscheduler.CreateJobRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcs_job.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcs_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_job_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_job._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "job", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "post_create_job" + ) as post, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_create_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudscheduler.CreateJobRequest.pb( + cloudscheduler.CreateJobRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcs_job.Job.to_json(gcs_job.Job()) + + request = cloudscheduler.CreateJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcs_job.Job() + + client.create_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.CreateJobRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["job"] = { + "name": "name_value", + "description": "description_value", + "pubsub_target": { + "topic_name": "topic_name_value", + "data": b"data_blob", + "attributes": {}, + }, + "app_engine_http_target": { + "http_method": 1, + "app_engine_routing": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + }, + "relative_uri": "relative_uri_value", + "headers": {}, + "body": b"body_blob", + }, + "http_target": { + "uri": "uri_value", + "http_method": 1, + "headers": {}, + "body": b"body_blob", + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "schedule": "schedule_value", + "time_zone": "time_zone_value", + "user_update_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "schedule_time": {}, + "last_attempt_time": {}, + "retry_config": { + "retry_count": 1214, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff_duration": {}, + "max_backoff_duration": {}, + "max_doublings": 1388, + }, + "attempt_deadline": {}, + "legacy_app_engine_cron": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job(request) + + +def test_create_job_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcs_job.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + job=gcs_job.Job(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcs_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{parent=projects/*/locations/*}/jobs" % client.transport._host, + args[1], + ) + + +def test_create_job_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job( + cloudscheduler.CreateJobRequest(), + parent="parent_value", + job=gcs_job.Job(name="name_value"), + ) + + +def test_create_job_rest_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.UpdateJobRequest, + dict, + ], +) +def test_update_job_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"job": {"name": "projects/sample1/locations/sample2/jobs/sample3"}} + request_init["job"] = { + "name": "projects/sample1/locations/sample2/jobs/sample3", + "description": "description_value", + "pubsub_target": { + "topic_name": "topic_name_value", + "data": b"data_blob", + "attributes": {}, + }, + "app_engine_http_target": { + "http_method": 1, + "app_engine_routing": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + }, + "relative_uri": "relative_uri_value", + "headers": {}, + "body": b"body_blob", + }, + "http_target": { + "uri": "uri_value", + "http_method": 1, + "headers": {}, + "body": b"body_blob", + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "schedule": "schedule_value", + "time_zone": "time_zone_value", + "user_update_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "schedule_time": {}, + "last_attempt_time": {}, + "retry_config": { + "retry_count": 1214, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff_duration": {}, + "max_backoff_duration": {}, + "max_doublings": 1388, + }, + "attempt_deadline": {}, + "legacy_app_engine_cron": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcs_job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=gcs_job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcs_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcs_job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == gcs_job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +def test_update_job_rest_required_fields(request_type=cloudscheduler.UpdateJobRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_job._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcs_job.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gcs_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_job_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("job",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_job_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "post_update_job" + ) as post, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_update_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudscheduler.UpdateJobRequest.pb( + cloudscheduler.UpdateJobRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gcs_job.Job.to_json(gcs_job.Job()) + + request = cloudscheduler.UpdateJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcs_job.Job() + + client.update_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_job_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.UpdateJobRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"job": {"name": "projects/sample1/locations/sample2/jobs/sample3"}} + request_init["job"] = { + "name": "projects/sample1/locations/sample2/jobs/sample3", + "description": "description_value", + "pubsub_target": { + "topic_name": "topic_name_value", + "data": b"data_blob", + "attributes": {}, + }, + "app_engine_http_target": { + "http_method": 1, + "app_engine_routing": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + }, + "relative_uri": "relative_uri_value", + "headers": {}, + "body": b"body_blob", + }, + "http_target": { + "uri": "uri_value", + "http_method": 1, + "headers": {}, + "body": b"body_blob", + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "schedule": "schedule_value", + "time_zone": "time_zone_value", + "user_update_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "schedule_time": {}, + "last_attempt_time": {}, + "retry_config": { + "retry_count": 1214, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff_duration": {}, + "max_backoff_duration": {}, + "max_doublings": 1388, + }, + "attempt_deadline": {}, + "legacy_app_engine_cron": True, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_job(request) + + +def test_update_job_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcs_job.Job() + + # get arguments that satisfy an http rule for this method + sample_request = { + "job": {"name": "projects/sample1/locations/sample2/jobs/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + job=gcs_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gcs_job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{job.name=projects/*/locations/*/jobs/*}" + % client.transport._host, + args[1], + ) + + +def test_update_job_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job( + cloudscheduler.UpdateJobRequest(), + job=gcs_job.Job(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_job_rest_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.DeleteJobRequest, + dict, + ], +) +def test_delete_job_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_job(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_rest_required_fields(request_type=cloudscheduler.DeleteJobRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_job._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("legacy_app_engine_cron",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_job_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(("legacyAppEngineCron",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_job_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_delete_job" + ) as pre: + pre.assert_not_called() + pb_message = cloudscheduler.DeleteJobRequest.pb( + cloudscheduler.DeleteJobRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cloudscheduler.DeleteJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_job_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.DeleteJobRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_job(request) + + +def test_delete_job_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{name=projects/*/locations/*/jobs/*}" % client.transport._host, + args[1], + ) + + +def test_delete_job_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job( + cloudscheduler.DeleteJobRequest(), + name="name_value", + ) + + +def test_delete_job_rest_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.PauseJobRequest, + dict, + ], +) +def test_pause_job_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.pause_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +def test_pause_job_rest_required_fields(request_type=cloudscheduler.PauseJobRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pause_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pause_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = job.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.pause_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_pause_job_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.pause_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_pause_job_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "post_pause_job" + ) as post, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_pause_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudscheduler.PauseJobRequest.pb(cloudscheduler.PauseJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = job.Job.to_json(job.Job()) + + request = cloudscheduler.PauseJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = job.Job() + + client.pause_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_pause_job_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.PauseJobRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.pause_job(request) + + +def test_pause_job_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.pause_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{name=projects/*/locations/*/jobs/*}:pause" + % client.transport._host, + args[1], + ) + + +def test_pause_job_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_job( + cloudscheduler.PauseJobRequest(), + name="name_value", + ) + + +def test_pause_job_rest_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.ResumeJobRequest, + dict, + ], +) +def test_resume_job_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.resume_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +def test_resume_job_rest_required_fields(request_type=cloudscheduler.ResumeJobRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resume_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resume_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = job.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.resume_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_resume_job_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.resume_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resume_job_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "post_resume_job" + ) as post, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_resume_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudscheduler.ResumeJobRequest.pb( + cloudscheduler.ResumeJobRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = job.Job.to_json(job.Job()) + + request = cloudscheduler.ResumeJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = job.Job() + + client.resume_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resume_job_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.ResumeJobRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resume_job(request) + + +def test_resume_job_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.resume_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{name=projects/*/locations/*/jobs/*}:resume" + % client.transport._host, + args[1], + ) + + +def test_resume_job_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_job( + cloudscheduler.ResumeJobRequest(), + name="name_value", + ) + + +def test_resume_job_rest_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudscheduler.RunJobRequest, + dict, + ], +) +def test_run_job_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job( + name="name_value", + description="description_value", + schedule="schedule_value", + time_zone="time_zone_value", + state=job.Job.State.ENABLED, + legacy_app_engine_cron=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, job.Job) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.schedule == "schedule_value" + assert response.time_zone == "time_zone_value" + assert response.state == job.Job.State.ENABLED + assert response.legacy_app_engine_cron is True + + +def test_run_job_rest_required_fields(request_type=cloudscheduler.RunJobRequest): + transport_class = transports.CloudSchedulerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = job.Job() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.run_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_run_job_rest_unset_required_fields(): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.run_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_job_rest_interceptors(null_interceptor): + transport = transports.CloudSchedulerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudSchedulerRestInterceptor(), + ) + client = CloudSchedulerClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "post_run_job" + ) as post, mock.patch.object( + transports.CloudSchedulerRestInterceptor, "pre_run_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudscheduler.RunJobRequest.pb(cloudscheduler.RunJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = job.Job.to_json(job.Job()) + + request = cloudscheduler.RunJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = job.Job() + + client.run_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_job_rest_bad_request( + transport: str = "rest", request_type=cloudscheduler.RunJobRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_job(request) + + +def test_run_job_rest_flattened(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = job.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.run_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{name=projects/*/locations/*/jobs/*}:run" + % client.transport._host, + args[1], + ) + + +def test_run_job_rest_flattened_error(transport: str = "rest"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_job( + cloudscheduler.RunJobRequest(), + name="name_value", + ) + + +def test_run_job_rest_error(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudSchedulerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudSchedulerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudSchedulerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudSchedulerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudSchedulerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudSchedulerClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudSchedulerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudSchedulerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudSchedulerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudSchedulerClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudSchedulerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudSchedulerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudSchedulerGrpcTransport, + transports.CloudSchedulerGrpcAsyncIOTransport, + transports.CloudSchedulerRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CloudSchedulerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudSchedulerGrpcTransport, + ) + + +def test_cloud_scheduler_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudSchedulerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_scheduler_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.scheduler_v1beta1.services.cloud_scheduler.transports.CloudSchedulerTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudSchedulerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_jobs", + "get_job", + "create_job", + "update_job", + "delete_job", + "pause_job", + "resume_job", + "run_job", + "get_location", + "list_locations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cloud_scheduler_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.scheduler_v1beta1.services.cloud_scheduler.transports.CloudSchedulerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudSchedulerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_scheduler_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.scheduler_v1beta1.services.cloud_scheduler.transports.CloudSchedulerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudSchedulerTransport() + adc.assert_called_once() + + +def test_cloud_scheduler_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudSchedulerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudSchedulerGrpcTransport, + transports.CloudSchedulerGrpcAsyncIOTransport, + ], +) +def test_cloud_scheduler_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudSchedulerGrpcTransport, + transports.CloudSchedulerGrpcAsyncIOTransport, + transports.CloudSchedulerRestTransport, + ], +) +def test_cloud_scheduler_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudSchedulerGrpcTransport, grpc_helpers), + (transports.CloudSchedulerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_cloud_scheduler_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "cloudscheduler.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="cloudscheduler.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudSchedulerGrpcTransport, + transports.CloudSchedulerGrpcAsyncIOTransport, + ], +) +def test_cloud_scheduler_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_cloud_scheduler_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CloudSchedulerRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cloud_scheduler_host_no_port(transport_name): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudscheduler.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudscheduler.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudscheduler.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cloud_scheduler_host_with_port(transport_name): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudscheduler.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudscheduler.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudscheduler.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_cloud_scheduler_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudSchedulerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudSchedulerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_jobs._session + session2 = client2.transport.list_jobs._session + assert session1 != session2 + session1 = client1.transport.get_job._session + session2 = client2.transport.get_job._session + assert session1 != session2 + session1 = client1.transport.create_job._session + session2 = client2.transport.create_job._session + assert session1 != session2 + session1 = client1.transport.update_job._session + session2 = client2.transport.update_job._session + assert session1 != session2 + session1 = client1.transport.delete_job._session + session2 = client2.transport.delete_job._session + assert session1 != session2 + session1 = client1.transport.pause_job._session + session2 = client2.transport.pause_job._session + assert session1 != session2 + session1 = client1.transport.resume_job._session + session2 = client2.transport.resume_job._session + assert session1 != session2 + session1 = client1.transport.run_job._session + session2 = client2.transport.run_job._session + assert session1 != session2 + + +def test_cloud_scheduler_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudSchedulerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_scheduler_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudSchedulerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudSchedulerGrpcTransport, + transports.CloudSchedulerGrpcAsyncIOTransport, + ], +) +def test_cloud_scheduler_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudSchedulerGrpcTransport, + transports.CloudSchedulerGrpcAsyncIOTransport, + ], +) +def test_cloud_scheduler_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_job_path(): + project = "squid" + location = "clam" + job = "whelk" + expected = "projects/{project}/locations/{location}/jobs/{job}".format( + project=project, + location=location, + job=job, + ) + actual = CloudSchedulerClient.job_path(project, location, job) + assert expected == actual + + +def test_parse_job_path(): + expected = { + "project": "octopus", + "location": "oyster", + "job": "nudibranch", + } + path = CloudSchedulerClient.job_path(**expected) + + # Check that the path construction is reversible. + actual = CloudSchedulerClient.parse_job_path(path) + assert expected == actual + + +def test_topic_path(): + project = "cuttlefish" + topic = "mussel" + expected = "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) + actual = CloudSchedulerClient.topic_path(project, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "winkle", + "topic": "nautilus", + } + path = CloudSchedulerClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = CloudSchedulerClient.parse_topic_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CloudSchedulerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = CloudSchedulerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudSchedulerClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = CloudSchedulerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = CloudSchedulerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudSchedulerClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = CloudSchedulerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = CloudSchedulerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudSchedulerClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = CloudSchedulerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = CloudSchedulerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudSchedulerClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = CloudSchedulerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = CloudSchedulerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudSchedulerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudSchedulerTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudSchedulerTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudSchedulerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations(transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = CloudSchedulerClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CloudSchedulerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = CloudSchedulerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CloudSchedulerClient, transports.CloudSchedulerGrpcTransport), + (CloudSchedulerAsyncClient, transports.CloudSchedulerGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-tasks/.OwlBot.yaml b/packages/google-cloud-tasks/.OwlBot.yaml new file mode 100644 index 000000000000..4785cd8b8642 --- /dev/null +++ b/packages/google-cloud-tasks/.OwlBot.yaml @@ -0,0 +1,24 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/tasks/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-tasks/$1 + +begin-after-commit-hash: 130ce904e5d546c312943d10f48799590f9c0f66 + diff --git a/packages/google-cloud-tasks/.coveragerc b/packages/google-cloud-tasks/.coveragerc new file mode 100644 index 000000000000..239f73fed5a2 --- /dev/null +++ b/packages/google-cloud-tasks/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/tasks/__init__.py + google/cloud/tasks/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-tasks/.flake8 b/packages/google-cloud-tasks/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-tasks/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-tasks/.gitignore b/packages/google-cloud-tasks/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-tasks/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-tasks/.repo-metadata.json b/packages/google-cloud-tasks/.repo-metadata.json new file mode 100644 index 000000000000..5aaed2e0e585 --- /dev/null +++ b/packages/google-cloud-tasks/.repo-metadata.json @@ -0,0 +1,18 @@ +{ + "name": "cloudtasks", + "name_pretty": "Cloud Tasks", + "product_documentation": "https://cloud.google.com/tasks/docs/", + "client_documentation": "https://cloud.google.com/python/docs/reference/cloudtasks/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5433985", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-tasks", + "api_id": "cloudtasks.googleapis.com", + "requires_billing": true, + "default_version": "v2", + "codeowner_team": "@googleapis/aap-dpes", + "api_shortname": "cloudtasks", + "api_description": "a fully managed service that allows you to manage the execution, dispatch and delivery of a large number of distributed tasks. You can asynchronously perform work outside of a user request. Your tasks can be executed on App Engine or any arbitrary HTTP endpoint." +} diff --git a/packages/google-cloud-tasks/CHANGELOG.md b/packages/google-cloud-tasks/CHANGELOG.md new file mode 100644 index 000000000000..52bf4c64da5d --- /dev/null +++ b/packages/google-cloud-tasks/CHANGELOG.md @@ -0,0 +1,565 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-tasks/#history + +## [2.14.2](https://github.com/googleapis/python-tasks/compare/v2.14.1...v2.14.2) (2023-09-13) + + +### Documentation + +* Minor formatting ([4635f43](https://github.com/googleapis/python-tasks/commit/4635f43809f41e581e217f404815184d7e1a11dc)) + +## [2.14.1](https://github.com/googleapis/python-tasks/compare/v2.14.0...v2.14.1) (2023-08-02) + + +### Documentation + +* Minor formatting ([#366](https://github.com/googleapis/python-tasks/issues/366)) ([aebc917](https://github.com/googleapis/python-tasks/commit/aebc91785c9934866aa3f1b0e27e41fc0d51b773)) + +## [2.14.0](https://github.com/googleapis/python-tasks/compare/v2.13.2...v2.14.0) (2023-07-25) + + +### Features + +* **v2:** Add YAML config for GetLocation and ListLocation ([92c3ef2](https://github.com/googleapis/python-tasks/commit/92c3ef264deb779216c8808df3eec11a9206a8ed)) +* **v2beta2:** Add UploadQueueYaml, BufferTask RPC method for CloudTasks service ([92c3ef2](https://github.com/googleapis/python-tasks/commit/92c3ef264deb779216c8808df3eec11a9206a8ed)) +* **v2beta2:** Set deadline for GetLocation, ListLocations and UploadQueueYaml RPCs ([92c3ef2](https://github.com/googleapis/python-tasks/commit/92c3ef264deb779216c8808df3eec11a9206a8ed)) +* **v2beta3:** Add BufferTask RPC method for CloudTasks service ([92c3ef2](https://github.com/googleapis/python-tasks/commit/92c3ef264deb779216c8808df3eec11a9206a8ed)) +* **v2beta3:** Add YAML config for GetLocation and ListLocations ([92c3ef2](https://github.com/googleapis/python-tasks/commit/92c3ef264deb779216c8808df3eec11a9206a8ed)) +* **v2:** Increase timeout of RPC methods to 20s ([92c3ef2](https://github.com/googleapis/python-tasks/commit/92c3ef264deb779216c8808df3eec11a9206a8ed)) + +## [2.13.2](https://github.com/googleapis/python-tasks/compare/v2.13.1...v2.13.2) (2023-07-04) + + +### Bug Fixes + +* Add async context manager return types ([#351](https://github.com/googleapis/python-tasks/issues/351)) ([ba48edc](https://github.com/googleapis/python-tasks/commit/ba48edc3c95ba025450db0f8ce9bb35cf4f1194c)) + +## [2.13.1](https://github.com/googleapis/python-tasks/compare/v2.13.0...v2.13.1) (2023-03-23) + + +### Documentation + +* Fix formatting of request arg in docstring ([#342](https://github.com/googleapis/python-tasks/issues/342)) ([85141f8](https://github.com/googleapis/python-tasks/commit/85141f82f6dabf02b39e34420a3bbcc754227040)) + +## [2.13.0](https://github.com/googleapis/python-tasks/compare/v2.12.1...v2.13.0) (2023-02-28) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([0fb917e](https://github.com/googleapis/python-tasks/commit/0fb917e507fdcc5f7f532f3d6fcaf6a13cf0620b)) + +## [2.12.1](https://github.com/googleapis/python-tasks/compare/v2.12.0...v2.12.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([248ab5b](https://github.com/googleapis/python-tasks/commit/248ab5b10b40c4fc1dbe846dd5788bce696b4dc5)) + + +### Documentation + +* Add documentation for enums ([248ab5b](https://github.com/googleapis/python-tasks/commit/248ab5b10b40c4fc1dbe846dd5788bce696b4dc5)) + +## [2.12.0](https://github.com/googleapis/python-tasks/compare/v2.11.0...v2.12.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#326](https://github.com/googleapis/python-tasks/issues/326)) ([f931289](https://github.com/googleapis/python-tasks/commit/f9312894076c50b55b964b216f76f1b4d34e82b6)) + +## [2.11.0](https://github.com/googleapis/python-tasks/compare/v2.10.4...v2.11.0) (2022-12-15) + + +### Features + +* Add support for `google.cloud.tasks.__version__` ([d51539f](https://github.com/googleapis/python-tasks/commit/d51539fc4e6b7b5a3f6f34d014752f3a8989b016)) +* Add typing to proto.Message based class attributes ([d51539f](https://github.com/googleapis/python-tasks/commit/d51539fc4e6b7b5a3f6f34d014752f3a8989b016)) + + +### Bug Fixes + +* Add dict typing for client_options ([d51539f](https://github.com/googleapis/python-tasks/commit/d51539fc4e6b7b5a3f6f34d014752f3a8989b016)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([5752acb](https://github.com/googleapis/python-tasks/commit/5752acb09be8771f6695de4928444c47849fafc4)) +* Drop usage of pkg_resources ([5752acb](https://github.com/googleapis/python-tasks/commit/5752acb09be8771f6695de4928444c47849fafc4)) +* Fix timeout default values ([5752acb](https://github.com/googleapis/python-tasks/commit/5752acb09be8771f6695de4928444c47849fafc4)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([d51539f](https://github.com/googleapis/python-tasks/commit/d51539fc4e6b7b5a3f6f34d014752f3a8989b016)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([5752acb](https://github.com/googleapis/python-tasks/commit/5752acb09be8771f6695de4928444c47849fafc4)) + +## [2.10.4](https://github.com/googleapis/python-tasks/compare/v2.10.3...v2.10.4) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#309](https://github.com/googleapis/python-tasks/issues/309)) ([c96e91c](https://github.com/googleapis/python-tasks/commit/c96e91c82b46860dd435857f49dbc0458835324a)) + +## [2.10.3](https://github.com/googleapis/python-tasks/compare/v2.10.2...v2.10.3) (2022-09-29) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#306](https://github.com/googleapis/python-tasks/issues/306)) ([146ce62](https://github.com/googleapis/python-tasks/commit/146ce62f4a9a56cb396b2c8554680daec67457dc)) + +## [2.10.2](https://github.com/googleapis/python-tasks/compare/v2.10.1...v2.10.2) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#288](https://github.com/googleapis/python-tasks/issues/288)) ([98f46e9](https://github.com/googleapis/python-tasks/commit/98f46e97707972cf31c5d90a256ce01c65af01f2)) +* **deps:** require proto-plus >= 1.22.0 ([98f46e9](https://github.com/googleapis/python-tasks/commit/98f46e97707972cf31c5d90a256ce01c65af01f2)) + +## [2.10.1](https://github.com/googleapis/python-tasks/compare/v2.10.0...v2.10.1) (2022-08-09) + + +### Documentation + +* **sample:** update protobuf in create_http_task.py ([#283](https://github.com/googleapis/python-tasks/issues/283)) ([b685da5](https://github.com/googleapis/python-tasks/commit/b685da5c2e315965a6fb294d89ecf98a6d684162)) + +## [2.10.0](https://github.com/googleapis/python-tasks/compare/v2.9.1...v2.10.0) (2022-07-16) + + +### Features + +* add audience parameter ([ad01839](https://github.com/googleapis/python-tasks/commit/ad0183951c7f1a23738004a11144b3870a91842e)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#277](https://github.com/googleapis/python-tasks/issues/277)) ([239789d](https://github.com/googleapis/python-tasks/commit/239789da46254961a27a51837441ff2035423c14)) +* require python 3.7+ ([#275](https://github.com/googleapis/python-tasks/issues/275)) ([85fd179](https://github.com/googleapis/python-tasks/commit/85fd179fda7556e9a1568ff93a4b5dd22ec01036)) + +## [2.9.1](https://github.com/googleapis/python-tasks/compare/v2.9.0...v2.9.1) (2022-06-06) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#262](https://github.com/googleapis/python-tasks/issues/262)) ([db23558](https://github.com/googleapis/python-tasks/commit/db23558d053d56ff84a4447f7af2525ff4459309)) + + +### Documentation + +* fix changelog header to consistent size ([#263](https://github.com/googleapis/python-tasks/issues/263)) ([048d907](https://github.com/googleapis/python-tasks/commit/048d907b1929f2ced1dc1d1b3536f38265994330)) + +## [2.9.0](https://github.com/googleapis/python-tasks/compare/v2.8.1...v2.9.0) (2022-05-05) + + +### Features + +* AuditConfig for IAM v1 ([7b7a294](https://github.com/googleapis/python-tasks/commit/7b7a2946a8554a06d8fdc57b13c2726c5d8a443b)) + + +### Bug Fixes + +* **deps:** require grpc-google-iam-v1 >=0.12.4 ([7b7a294](https://github.com/googleapis/python-tasks/commit/7b7a2946a8554a06d8fdc57b13c2726c5d8a443b)) + + +### Documentation + +* fix type in docstring for map fields ([7b7a294](https://github.com/googleapis/python-tasks/commit/7b7a2946a8554a06d8fdc57b13c2726c5d8a443b)) + +## [2.8.1](https://github.com/googleapis/python-tasks/compare/v2.8.0...v2.8.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#232](https://github.com/googleapis/python-tasks/issues/232)) ([2b35888](https://github.com/googleapis/python-tasks/commit/2b3588834794ce7ac6d5c762f2d45849122ddc1b)) +* **deps:** require proto-plus>=1.15.0 ([2b35888](https://github.com/googleapis/python-tasks/commit/2b3588834794ce7ac6d5c762f2d45849122ddc1b)) + +## [2.8.0](https://github.com/googleapis/python-tasks/compare/v2.7.2...v2.8.0) (2022-02-14) + + +### Features + +* add api key support ([#214](https://github.com/googleapis/python-tasks/issues/214)) ([ce21598](https://github.com/googleapis/python-tasks/commit/ce215987f969cbc6347fb58cd2163394a6fc7f1c)) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([8dd8aec](https://github.com/googleapis/python-tasks/commit/8dd8aec79d2302007e3f9511daeab817f05d2aa6)) + +## [2.7.2](https://www.github.com/googleapis/python-tasks/compare/v2.7.1...v2.7.2) (2022-01-08) + + +### Documentation + +* fix docstring formatting ([#196](https://www.github.com/googleapis/python-tasks/issues/196)) ([e7a3461](https://www.github.com/googleapis/python-tasks/commit/e7a3461a34229c210e63590370fa6eee4d06630a)) + +## [2.7.1](https://www.github.com/googleapis/python-tasks/compare/v2.7.0...v2.7.1) (2021-11-01) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([72f150f](https://www.github.com/googleapis/python-tasks/commit/72f150fe39313173ac6c02616b6ca4466f5855fe)) +* **deps:** require google-api-core >= 1.28.0 ([72f150f](https://www.github.com/googleapis/python-tasks/commit/72f150fe39313173ac6c02616b6ca4466f5855fe)) + + +### Documentation + +* list oneofs in docstring ([72f150f](https://www.github.com/googleapis/python-tasks/commit/72f150fe39313173ac6c02616b6ca4466f5855fe)) + +## [2.7.0](https://www.github.com/googleapis/python-tasks/compare/v2.6.0...v2.7.0) (2021-10-15) + + +### Features + +* add support for python 3.10 ([#181](https://www.github.com/googleapis/python-tasks/issues/181)) ([0a40ab0](https://www.github.com/googleapis/python-tasks/commit/0a40ab01070018fc3ca32008f55c18e2b65aa23b)) + +## [2.6.0](https://www.github.com/googleapis/python-tasks/compare/v2.5.3...v2.6.0) (2021-10-08) + + +### Features + +* add context manager support in client ([#173](https://www.github.com/googleapis/python-tasks/issues/173)) ([ceec8f1](https://www.github.com/googleapis/python-tasks/commit/ceec8f173af696d26cf367af2d969bf98987df2a)) + +## [2.6.0](https://www.github.com/googleapis/python-tasks/compare/v2.5.3...v2.6.0) (2021-10-07) + + +### Features + +* add context manager support in client ([#173](https://www.github.com/googleapis/python-tasks/issues/173)) ([ceec8f1](https://www.github.com/googleapis/python-tasks/commit/ceec8f173af696d26cf367af2d969bf98987df2a)) + +## [2.5.3](https://www.github.com/googleapis/python-tasks/compare/v2.5.2...v2.5.3) (2021-09-30) + + +### Bug Fixes + +* improper types in pagers generation ([0feec1e](https://www.github.com/googleapis/python-tasks/commit/0feec1e0d1e4847e2722920c8afdc597ecd92e3f)) + +## [2.5.2](https://www.github.com/googleapis/python-tasks/compare/v2.5.1...v2.5.2) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([42d768b](https://www.github.com/googleapis/python-tasks/commit/42d768b9f302aef3258f4abc413199070bcd2a8d)) + +## [2.5.1](https://www.github.com/googleapis/python-tasks/compare/v2.5.0...v2.5.1) (2021-07-28) + + +### Bug Fixes + +* enable self signed jwt for grpc chore: use gapic-generator-python 0.50.5 ([#143](https://www.github.com/googleapis/python-tasks/issues/143)) ([b8ec21e](https://www.github.com/googleapis/python-tasks/commit/b8ec21e2d3bc173249a33f34b27373e0f6c08cd2)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#135](https://www.github.com/googleapis/python-tasks/issues/135)) ([ccfc32d](https://www.github.com/googleapis/python-tasks/commit/ccfc32d56c5d0750a8f14ce244e1bc40eb4e31bd)) + + +### Miscellaneous Chores + +* release as 2.5.1 ([#144](https://www.github.com/googleapis/python-tasks/issues/144)) ([28ffe6b](https://www.github.com/googleapis/python-tasks/commit/28ffe6b149fd1267c967a8432ef41042620c314e)) + +## [2.5.0](https://www.github.com/googleapis/python-tasks/compare/v2.4.0...v2.5.0) (2021-07-21) + + +### Features + +* Set `audience` field in authenticated HTTP task example ([#138](https://www.github.com/googleapis/python-tasks/issues/138)) ([7a5a0c6](https://www.github.com/googleapis/python-tasks/commit/7a5a0c6ca5372035521d5366373054a7ba95f2bd)) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#134](https://www.github.com/googleapis/python-tasks/issues/134)) ([fd3cb31](https://www.github.com/googleapis/python-tasks/commit/fd3cb31bc1d36e5b6373bfa3d3bb9bb65aeb3f90)) + +## [2.4.0](https://www.github.com/googleapis/python-tasks/compare/v2.3.0...v2.4.0) (2021-07-01) + + +### Features + +* add always_use_jwt_access ([#122](https://www.github.com/googleapis/python-tasks/issues/122)) ([87c9ccc](https://www.github.com/googleapis/python-tasks/commit/87c9cccb42237eb421c72411652985a7fbe1c16a)) + + +### Bug Fixes + +* disable always_use_jwt_access ([#126](https://www.github.com/googleapis/python-tasks/issues/126)) ([54d2286](https://www.github.com/googleapis/python-tasks/commit/54d2286b153c36b7a50b5a936517aa59e10ad27c)) +* update sample for task name ([#120](https://www.github.com/googleapis/python-tasks/issues/120)) ([b1be2de](https://www.github.com/googleapis/python-tasks/commit/b1be2de174fc37d0eb90bbf877851c11ddb14907)) + + +### Documentation + +* omit mention of Python 2.7 in CONTRIBUTING.rst ([#116](https://www.github.com/googleapis/python-tasks/issues/116)) ([0732ab7](https://www.github.com/googleapis/python-tasks/commit/0732ab7d726fdf564897fad009f8a5da45b5c017)), closes [#1126](https://www.github.com/googleapis/python-tasks/issues/1126) + +## [2.3.0](https://www.github.com/googleapis/python-tasks/compare/v2.2.0...v2.3.0) (2021-05-28) + + +### Features + +* add `from_service_account_info` ([#80](https://www.github.com/googleapis/python-tasks/issues/80)) ([2498225](https://www.github.com/googleapis/python-tasks/commit/2498225112ddb4b112b387dec71631c29a6db71e)) +* support self-signed JWT flow for service accounts ([1acf20c](https://www.github.com/googleapis/python-tasks/commit/1acf20ca440a5396ee03205b5c2301b84e368926)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([1acf20c](https://www.github.com/googleapis/python-tasks/commit/1acf20ca440a5396ee03205b5c2301b84e368926)) +* use correct retry deadlines ([2498225](https://www.github.com/googleapis/python-tasks/commit/2498225112ddb4b112b387dec71631c29a6db71e)) + + +### Documentation + +* fix grammar in documentation ([#112](https://www.github.com/googleapis/python-tasks/issues/112)) ([6f93a19](https://www.github.com/googleapis/python-tasks/commit/6f93a190311bd5468827496685072388a951e670)) + +## [2.2.0](https://www.github.com/googleapis/python-tasks/compare/v2.1.0...v2.2.0) (2021-02-24) + + +### Features + +* add from_service_account_info method to clients ([e1fdc76](https://www.github.com/googleapis/python-tasks/commit/e1fdc76f5369e53067a1748aecce9fa3940d9ee1)) +* **v2beta3, v2beta2:** introducing fields: ListQueuesRequest.read_mask, GetQueueRequest.read_mask, Queue.task_ttl, Queue.tombstone_ttl, Queue.stats and introducing messages: QueueStats ([e1fdc76](https://www.github.com/googleapis/python-tasks/commit/e1fdc76f5369e53067a1748aecce9fa3940d9ee1)) + + +### Bug Fixes + +* remove client recv msg limit fix: add enums to `types/__init__.py` ([#56](https://www.github.com/googleapis/python-tasks/issues/56)) ([6a5bfaf](https://www.github.com/googleapis/python-tasks/commit/6a5bfaf63b46567897c36907772b10ea4b0dff43)) +* Update sample comments ([#58](https://www.github.com/googleapis/python-tasks/issues/58)) ([3eb30b3](https://www.github.com/googleapis/python-tasks/commit/3eb30b349b9092a2a2fb08116855139418ebd371)) + + +### Documentation + +* fix type references in docstrings ([e1fdc76](https://www.github.com/googleapis/python-tasks/commit/e1fdc76f5369e53067a1748aecce9fa3940d9ee1)) +* **v2beta2:** updates to AppEngineHttpRequest description ([e1fdc76](https://www.github.com/googleapis/python-tasks/commit/e1fdc76f5369e53067a1748aecce9fa3940d9ee1)) +* **v2beta3:** updates to max burst size description ([e1fdc76](https://www.github.com/googleapis/python-tasks/commit/e1fdc76f5369e53067a1748aecce9fa3940d9ee1)) + +## [2.1.0](https://www.github.com/googleapis/python-tasks/compare/v2.0.0...v2.1.0) (2020-12-07) + + +### Features + +* add common resource helpers; expose client transport; add shebang to fixup scripts ([#34](https://www.github.com/googleapis/python-tasks/issues/34)) ([511e9f3](https://www.github.com/googleapis/python-tasks/commit/511e9f3d5da4c8b86adca8bddc65dc37a989edcf)) + +## [2.0.0](https://www.github.com/googleapis/python-tasks/compare/v1.5.0...v2.0.0) (2020-09-02) + + +### ⚠ BREAKING CHANGES + +* migrate to use microgen (#38) + +### Features + +* introduce field Queue.type; update default retry configs ([#29](https://www.github.com/googleapis/python-tasks/issues/29)) ([6b8ba85](https://www.github.com/googleapis/python-tasks/commit/6b8ba85de5998b0c2138bbf771fa16ba8f9bbf07)) +* migrate to use microgen ([#38](https://www.github.com/googleapis/python-tasks/issues/38)) ([18e146c](https://www.github.com/googleapis/python-tasks/commit/18e146cab5e2b669538ca6b1d58603e72d58ae88)) + + +### Documentation + +* add samples from python-docs-samples/tasks ([#36](https://www.github.com/googleapis/python-tasks/issues/36)) ([9d022f7](https://www.github.com/googleapis/python-tasks/commit/9d022f736912df8a0f4d13e2a98dd53cf506f2dc)), closes [#1068](https://www.github.com/googleapis/python-tasks/issues/1068) [#1116](https://www.github.com/googleapis/python-tasks/issues/1116) [#1133](https://www.github.com/googleapis/python-tasks/issues/1133) [#1186](https://www.github.com/googleapis/python-tasks/issues/1186) [#1217](https://www.github.com/googleapis/python-tasks/issues/1217) [#1254](https://www.github.com/googleapis/python-tasks/issues/1254) [#1271](https://www.github.com/googleapis/python-tasks/issues/1271) [#1288](https://www.github.com/googleapis/python-tasks/issues/1288) [#1309](https://www.github.com/googleapis/python-tasks/issues/1309) [#1311](https://www.github.com/googleapis/python-tasks/issues/1311) [#1329](https://www.github.com/googleapis/python-tasks/issues/1329) [#1320](https://www.github.com/googleapis/python-tasks/issues/1320) [#1355](https://www.github.com/googleapis/python-tasks/issues/1355) [#1359](https://www.github.com/googleapis/python-tasks/issues/1359) [#1529](https://www.github.com/googleapis/python-tasks/issues/1529) [#1532](https://www.github.com/googleapis/python-tasks/issues/1532) [#1541](https://www.github.com/googleapis/python-tasks/issues/1541) [#1563](https://www.github.com/googleapis/python-tasks/issues/1563) [#1552](https://www.github.com/googleapis/python-tasks/issues/1552) [#1566](https://www.github.com/googleapis/python-tasks/issues/1566) [#1698](https://www.github.com/googleapis/python-tasks/issues/1698) [#2114](https://www.github.com/googleapis/python-tasks/issues/2114) [#2113](https://www.github.com/googleapis/python-tasks/issues/2113) [#2156](https://www.github.com/googleapis/python-tasks/issues/2156) [#2208](https://www.github.com/googleapis/python-tasks/issues/2208) [#2250](https://www.github.com/googleapis/python-tasks/issues/2250) [#2316](https://www.github.com/googleapis/python-tasks/issues/2316) [#2187](https://www.github.com/googleapis/python-tasks/issues/2187) [#2439](https://www.github.com/googleapis/python-tasks/issues/2439) [#2516](https://www.github.com/googleapis/python-tasks/issues/2516) [#2543](https://www.github.com/googleapis/python-tasks/issues/2543) [#2700](https://www.github.com/googleapis/python-tasks/issues/2700) [#3168](https://www.github.com/googleapis/python-tasks/issues/3168) [#3171](https://www.github.com/googleapis/python-tasks/issues/3171) + +## [1.5.0](https://www.github.com/googleapis/python-tasks/compare/v1.4.0...v1.5.0) (2020-02-24) + + +### Features + +* **tasks:** add support for stackdriver logging config; update retry config (via synth) ([#8](https://www.github.com/googleapis/python-tasks/issues/8)) ([70b597a](https://www.github.com/googleapis/python-tasks/commit/70b597a615c75976a4993ab223328d7cba3bd139)) + +## [1.4.0](https://www.github.com/googleapis/python-tasks/compare/v1.3.0...v1.4.0) (2020-02-06) + + +### Features + +* **tasks:** undeprecate resource helper methods; add py2 deprecation warning; change default timeouts; add 3.8 tests; edit docstrings (via synth)([#10074](https://www.github.com/googleapis/python-tasks/issues/10074)) ([5577817](https://www.github.com/googleapis/python-tasks/commit/5577817fbe6435af03d862761fa08288b02cc69a)) + + +### Bug Fixes + +* **tasks:** change default timeout values; bump copyright year to 2020; change line breaks in docstrings (via synth) ([#10271](https://www.github.com/googleapis/python-tasks/issues/10271)) ([f68536d](https://www.github.com/googleapis/python-tasks/commit/f68536d95b6e320e4140d6720cc0c47c184dd694)) +* **tasks:** deprecate resource name helper methods (via synth) ([#9864](https://www.github.com/googleapis/python-tasks/issues/9864)) ([ccf2cab](https://www.github.com/googleapis/python-tasks/commit/ccf2cabbe32d91988bd9456dc777622182beb658)) + +## 1.3.0 + +11-04-2019 10:06 PST + +### Implementation Changes +- Add proto annotations (via synth) ([#9352](https://github.com/googleapis/google-cloud-python/pull/9352)) + +### New Features +- Add HTTP tasks, OAuth tokens, and OIDC tokens (via synth) ([#9588](https://github.com/googleapis/google-cloud-python/pull/9588)) + +### Documentation +- Tweak docstrings (via synth) ([#9433](https://github.com/googleapis/google-cloud-python/pull/9433)) +- Disambiguate client requests from cloud task requests ([#9398](https://github.com/googleapis/google-cloud-python/pull/9398)) +- Change requests intersphinx url (via synth) ([#9409](https://github.com/googleapis/google-cloud-python/pull/9409)) +- Update documentation (via synth) ([#9069](https://github.com/googleapis/google-cloud-python/pull/9069)) +- Remove compatibility badges from READMEs ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) + +## 1.2.1 + +08-12-2019 13:50 PDT + +### Implementation Changes +- Remove send/recv msg size limit (via synth). ([#8971](https://github.com/googleapis/google-cloud-python/pull/8971)) + +### Documentation +- Fix links to googleapis.dev ([#8998](https://github.com/googleapis/google-cloud-python/pull/8998)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + +## 1.2.0 + +07-24-2019 17:41 PDT + + +### Implementation Changes +- Allow kwargs to be passed to create_channel (via synth). ([#8406](https://github.com/googleapis/google-cloud-python/pull/8406)) + +### New Features +- Add 'options_' argument to clients' 'get_iam_policy'; pin black version (via synth). ([#8660](https://github.com/googleapis/google-cloud-python/pull/8660)) +- Add 'client_options' support, update list method docstrings (via synth). ([#8524](https://github.com/googleapis/google-cloud-python/pull/8524)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) +- Update pin for 'grpc-google-iam-v1' to 0.12.3+. ([#8647](https://github.com/googleapis/google-cloud-python/pull/8647)) + +### Documentation + +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) +- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) +- Fix typo in README. ([#8606](https://github.com/googleapis/google-cloud-python/pull/8606)) + +### Internal / Testing Changes +- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) +- Declare encoding as utf-8 in pb2 files (via synth). ([#8366](https://github.com/googleapis/google-cloud-python/pull/8366)) +- Add disclaimer to auto-generated template files (via synth). ([#8330](https://github.com/googleapis/google-cloud-python/pull/8330)) +- Suppress checking 'cov-fail-under' in nox default session (via synth). ([#8254](https://github.com/googleapis/google-cloud-python/pull/8254)) +- Fix coverage in 'types.py' (via synth). ([#8166](https://github.com/googleapis/google-cloud-python/pull/8166)) +- Blacken noxfile.py, setup.py (via synth). ([#8134](https://github.com/googleapis/google-cloud-python/pull/8134)) +- Add empty lines (via synth). ([#8074](https://github.com/googleapis/google-cloud-python/pull/8074)) + +## 1.1.0 + +05-14-2019 15:30 PDT + +### Implementation Changes +- Remove log_sampling_ratio, add stackdriver_logging_config (via synth). ([#7950](https://github.com/googleapis/google-cloud-python/pull/7950)) + +### Documentation +- Update docstrings (via synth). ([#7963](https://github.com/googleapis/google-cloud-python/pull/7963)) +- Update docstrings (via synth). ([#7940](https://github.com/googleapis/google-cloud-python/pull/7940)) + +### Internal / Testing Changes +- Add nox session `docs`, reorder methods (via synth). ([#7783](https://github.com/googleapis/google-cloud-python/pull/7783)) + +## 1.0.0 + +04-29-2019 16:35 PDT + +### Documentation +- Correct docs/index.rst. ([#7808](https://github.com/googleapis/google-cloud-python/pull/7808)) + +### Internal / Testing Changes +- Add smoke test. ([#7808](https://github.com/googleapis/google-cloud-python/pull/7808)) + +## 0.7.0 + +04-15-2019 10:21 PDT + + +### New Features +- Add auth and stackdriver logging configuration (via synth). ([#7666](https://github.com/googleapis/google-cloud-python/pull/7666)) + +### Documentation +- Tasks: Format docstrings for enums (via synth). ([#7601](https://github.com/googleapis/google-cloud-python/pull/7601)) + +## 0.6.0 + +03-26-2019 13:35 PDT + + +### Implementation Changes +- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) + +### New Features +- Generate v2. ([#7547](https://github.com/googleapis/google-cloud-python/pull/7547)) + +## 0.5.0 + +03-06-2019 15:03 PST + + +### Implementation Changes +- Remove unused message exports (via synth). ([#7276](https://github.com/googleapis/google-cloud-python/pull/7276)) +- Protoc-generated serialization update. ([#7096](https://github.com/googleapis/google-cloud-python/pull/7096)) + +### New Features +- Add 'Task.http_request' and associated message type (via synth). ([#7432](https://github.com/googleapis/google-cloud-python/pull/7432)) +- Add 'Task.dispatch_deadline' via synth. ([#7211](https://github.com/googleapis/google-cloud-python/pull/7211)) + +### Documentation +- Updated client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) +- Update copyright headers +- Restore expanded example from PR [#7025](https://github.com/googleapis/google-cloud-python/pull/7025) after synth. ([#7062](https://github.com/googleapis/google-cloud-python/pull/7062)) +- Add working example for 'create_queue'. ([#7025](https://github.com/googleapis/google-cloud-python/pull/7025)) +- Pick up stub docstring fix in GAPIC generator. ([#6983](https://github.com/googleapis/google-cloud-python/pull/6983)) + +### Internal / Testing Changes +- Copy lintified proto files (via synth). ([#7471](https://github.com/googleapis/google-cloud-python/pull/7471)) +- Add clarifying comment to blacken nox target. ([#7405](https://github.com/googleapis/google-cloud-python/pull/7405)) +- Copy proto files alongside protoc versions +- Add protos as an artifact to library ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) + +## 0.4.0 + +12-18-2018 09:50 PST + + +### Implementation Changes +- Import `iam.policy` from `google.api_core`. ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) +- Pick up enum fixes in the GAPIC generator. ([#6616](https://github.com/googleapis/google-cloud-python/pull/6616)) +- Fix `client_info` bug, update docstrings and timeouts. ([#6422](https://github.com/googleapis/google-cloud-python/pull/6422)) +- Re-generate library using tasks/synth.py ([#5980](https://github.com/googleapis/google-cloud-python/pull/5980)) + +### New Features +- Pick up changes to GAPIC generator, drop 'Code' enum. ([#6509](https://github.com/googleapis/google-cloud-python/pull/6509)) + +### Dependencies +- Bump minimum `api_core` version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391)) +- Avoid broken 'google-common-apis 1.5.4' release. ([#6355](https://github.com/googleapis/google-cloud-python/pull/6355)) + +### Documentation +- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) +- Separate / distinguish API docs for different API versions. ([#6306](https://github.com/googleapis/google-cloud-python/pull/6306)) +- Docstring tweaks from protos. ([#6261](https://github.com/googleapis/google-cloud-python/pull/6261)) +- Normalize use of support level badges ([#6159](https://github.com/googleapis/google-cloud-python/pull/6159)) +- Remove autosynth / tweaks for 'README.rst' / 'setup.py'. ([#5957](https://github.com/googleapis/google-cloud-python/pull/5957)) +- Replace links to `/stable/` with `/latest/`. ([#5901](https://github.com/googleapis/google-cloud-python/pull/5901)) + +### Internal / Testing Changes +- Add baseline for synth.metadata +- Update noxfile. +- Blacken all gen'd libs ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) +- Omit local deps ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) +- Run black at end of synth.py ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) +- Run Black on Generated libraries ([#6666](https://github.com/googleapis/google-cloud-python/pull/6666)) +- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) +- Add 'mock' to unit test dependencies for autogen libs. ([#6402](https://github.com/googleapis/google-cloud-python/pull/6402)) +- Add / fix badges for PyPI / versions. ([#6158](https://github.com/googleapis/google-cloud-python/pull/6158)) +- Don't update nox in 'tasks/synth.py'. ([#6232](https://github.com/googleapis/google-cloud-python/pull/6232)) +- Use new Nox ([#6175](https://github.com/googleapis/google-cloud-python/pull/6175)) + +## 0.3.0 + +### Implementation Changes +- Regenerate tasks to fix API enablement URL (#5579) + +### New Features +- Tasks: Add v2beta3 endpoint (#5880) + +### Documentation +- update Task library doc link (#5708) +- tasks missing from docs (#5656) + +## 0.2.0 + +### Implementation Changes +- regenerate tasks v2beta2 (#5469) +- Avoid overwriting '__module__' of messages from shared modules. (#5364) + +## 0.1.0 + +### New Features +- Add v2beta2 endpoint for Tasks diff --git a/packages/google-cloud-tasks/CODE_OF_CONDUCT.md b/packages/google-cloud-tasks/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-tasks/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-tasks/CONTRIBUTING.rst b/packages/google-cloud-tasks/CONTRIBUTING.rst new file mode 100644 index 000000000000..cf7aa3b9193b --- /dev/null +++ b/packages/google-cloud-tasks/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.11 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-tasks + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-tasks/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-tasks/LICENSE b/packages/google-cloud-tasks/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-tasks/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-tasks/MANIFEST.in b/packages/google-cloud-tasks/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-tasks/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-tasks/README.rst b/packages/google-cloud-tasks/README.rst new file mode 100644 index 000000000000..d2824908a018 --- /dev/null +++ b/packages/google-cloud-tasks/README.rst @@ -0,0 +1,108 @@ +Python Client for Cloud Tasks +============================= + +|stable| |pypi| |versions| + +`Cloud Tasks`_: a fully managed service that allows you to manage the execution, dispatch and delivery of a large number of distributed tasks. You can asynchronously perform work outside of a user request. Your tasks can be executed on App Engine or any arbitrary HTTP endpoint. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-tasks.svg + :target: https://pypi.org/project/google-cloud-tasks/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-tasks.svg + :target: https://pypi.org/project/google-cloud-tasks/ +.. _Cloud Tasks: https://cloud.google.com/tasks/docs/ +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/cloudtasks/latest +.. _Product Documentation: https://cloud.google.com/tasks/docs/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud Tasks.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud Tasks.: https://cloud.google.com/tasks/docs/ +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tasks/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-tasks + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-tasks + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud Tasks + to see other available methods on the client. +- Read the `Cloud Tasks Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Cloud Tasks Product documentation: https://cloud.google.com/tasks/docs/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-tasks/SECURITY.md b/packages/google-cloud-tasks/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-tasks/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-tasks/docs/CHANGELOG.md b/packages/google-cloud-tasks/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-tasks/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-tasks/docs/README.rst b/packages/google-cloud-tasks/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-tasks/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-tasks/docs/_static/custom.css b/packages/google-cloud-tasks/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-tasks/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-tasks/docs/_templates/layout.html b/packages/google-cloud-tasks/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-tasks/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-tasks/docs/conf.py b/packages/google-cloud-tasks/docs/conf.py new file mode 100644 index 000000000000..a9fed95332f0 --- /dev/null +++ b/packages/google-cloud-tasks/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-tasks documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-tasks" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-tasks", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-tasks-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-tasks.tex", + "google-cloud-tasks Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-tasks", + "google-cloud-tasks Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-tasks", + "google-cloud-tasks Documentation", + author, + "google-cloud-tasks", + "google-cloud-tasks Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-tasks/docs/index.rst b/packages/google-cloud-tasks/docs/index.rst new file mode 100644 index 000000000000..c95dbd921d76 --- /dev/null +++ b/packages/google-cloud-tasks/docs/index.rst @@ -0,0 +1,42 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of Cloud Tasks. +By default, you will get version ``tasks_v2``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + tasks_v2/services + tasks_v2/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + tasks_v2beta2/services + tasks_v2beta2/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + tasks_v2beta3/services + tasks_v2beta3/types + + +Changelog +--------- + +For a list of all ``google-cloud-tasks`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-tasks/docs/multiprocessing.rst b/packages/google-cloud-tasks/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-tasks/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-tasks/docs/tasks_v2/cloud_tasks.rst b/packages/google-cloud-tasks/docs/tasks_v2/cloud_tasks.rst new file mode 100644 index 000000000000..11481d7c02e9 --- /dev/null +++ b/packages/google-cloud-tasks/docs/tasks_v2/cloud_tasks.rst @@ -0,0 +1,10 @@ +CloudTasks +---------------------------- + +.. automodule:: google.cloud.tasks_v2.services.cloud_tasks + :members: + :inherited-members: + +.. automodule:: google.cloud.tasks_v2.services.cloud_tasks.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-tasks/docs/tasks_v2/services.rst b/packages/google-cloud-tasks/docs/tasks_v2/services.rst new file mode 100644 index 000000000000..f24b73b1b5b3 --- /dev/null +++ b/packages/google-cloud-tasks/docs/tasks_v2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Tasks v2 API +====================================== +.. toctree:: + :maxdepth: 2 + + cloud_tasks diff --git a/packages/google-cloud-tasks/docs/tasks_v2/types.rst b/packages/google-cloud-tasks/docs/tasks_v2/types.rst new file mode 100644 index 000000000000..a43c04fdcb47 --- /dev/null +++ b/packages/google-cloud-tasks/docs/tasks_v2/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Tasks v2 API +=================================== + +.. automodule:: google.cloud.tasks_v2.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-tasks/docs/tasks_v2beta2/cloud_tasks.rst b/packages/google-cloud-tasks/docs/tasks_v2beta2/cloud_tasks.rst new file mode 100644 index 000000000000..eacf8fb7b385 --- /dev/null +++ b/packages/google-cloud-tasks/docs/tasks_v2beta2/cloud_tasks.rst @@ -0,0 +1,10 @@ +CloudTasks +---------------------------- + +.. automodule:: google.cloud.tasks_v2beta2.services.cloud_tasks + :members: + :inherited-members: + +.. automodule:: google.cloud.tasks_v2beta2.services.cloud_tasks.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-tasks/docs/tasks_v2beta2/services.rst b/packages/google-cloud-tasks/docs/tasks_v2beta2/services.rst new file mode 100644 index 000000000000..4273c20e6a03 --- /dev/null +++ b/packages/google-cloud-tasks/docs/tasks_v2beta2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Tasks v2beta2 API +=========================================== +.. toctree:: + :maxdepth: 2 + + cloud_tasks diff --git a/packages/google-cloud-tasks/docs/tasks_v2beta2/types.rst b/packages/google-cloud-tasks/docs/tasks_v2beta2/types.rst new file mode 100644 index 000000000000..3a8eb123ac44 --- /dev/null +++ b/packages/google-cloud-tasks/docs/tasks_v2beta2/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Tasks v2beta2 API +======================================== + +.. automodule:: google.cloud.tasks_v2beta2.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-tasks/docs/tasks_v2beta3/cloud_tasks.rst b/packages/google-cloud-tasks/docs/tasks_v2beta3/cloud_tasks.rst new file mode 100644 index 000000000000..ef422e09dc98 --- /dev/null +++ b/packages/google-cloud-tasks/docs/tasks_v2beta3/cloud_tasks.rst @@ -0,0 +1,10 @@ +CloudTasks +---------------------------- + +.. automodule:: google.cloud.tasks_v2beta3.services.cloud_tasks + :members: + :inherited-members: + +.. automodule:: google.cloud.tasks_v2beta3.services.cloud_tasks.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-tasks/docs/tasks_v2beta3/services.rst b/packages/google-cloud-tasks/docs/tasks_v2beta3/services.rst new file mode 100644 index 000000000000..bd97721b2c5a --- /dev/null +++ b/packages/google-cloud-tasks/docs/tasks_v2beta3/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Tasks v2beta3 API +=========================================== +.. toctree:: + :maxdepth: 2 + + cloud_tasks diff --git a/packages/google-cloud-tasks/docs/tasks_v2beta3/types.rst b/packages/google-cloud-tasks/docs/tasks_v2beta3/types.rst new file mode 100644 index 000000000000..37d181e6fd79 --- /dev/null +++ b/packages/google-cloud-tasks/docs/tasks_v2beta3/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Tasks v2beta3 API +======================================== + +.. automodule:: google.cloud.tasks_v2beta3.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-tasks/google/cloud/tasks/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks/__init__.py new file mode 100644 index 000000000000..286d221c8010 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks/__init__.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.tasks import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.tasks_v2.services.cloud_tasks.async_client import ( + CloudTasksAsyncClient, +) +from google.cloud.tasks_v2.services.cloud_tasks.client import CloudTasksClient +from google.cloud.tasks_v2.types.cloudtasks import ( + CreateQueueRequest, + CreateTaskRequest, + DeleteQueueRequest, + DeleteTaskRequest, + GetQueueRequest, + GetTaskRequest, + ListQueuesRequest, + ListQueuesResponse, + ListTasksRequest, + ListTasksResponse, + PauseQueueRequest, + PurgeQueueRequest, + ResumeQueueRequest, + RunTaskRequest, + UpdateQueueRequest, +) +from google.cloud.tasks_v2.types.queue import ( + Queue, + RateLimits, + RetryConfig, + StackdriverLoggingConfig, +) +from google.cloud.tasks_v2.types.target import ( + AppEngineHttpRequest, + AppEngineRouting, + HttpMethod, + HttpRequest, + OAuthToken, + OidcToken, +) +from google.cloud.tasks_v2.types.task import Attempt, Task + +__all__ = ( + "CloudTasksClient", + "CloudTasksAsyncClient", + "CreateQueueRequest", + "CreateTaskRequest", + "DeleteQueueRequest", + "DeleteTaskRequest", + "GetQueueRequest", + "GetTaskRequest", + "ListQueuesRequest", + "ListQueuesResponse", + "ListTasksRequest", + "ListTasksResponse", + "PauseQueueRequest", + "PurgeQueueRequest", + "ResumeQueueRequest", + "RunTaskRequest", + "UpdateQueueRequest", + "Queue", + "RateLimits", + "RetryConfig", + "StackdriverLoggingConfig", + "AppEngineHttpRequest", + "AppEngineRouting", + "HttpRequest", + "OAuthToken", + "OidcToken", + "HttpMethod", + "Attempt", + "Task", +) diff --git a/packages/google-cloud-tasks/google/cloud/tasks/gapic_version.py b/packages/google-cloud-tasks/google/cloud/tasks/gapic_version.py new file mode 100644 index 000000000000..3344051a673d --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.14.2" # {x-release-please-version} diff --git a/packages/google-cloud-tasks/google/cloud/tasks/py.typed b/packages/google-cloud-tasks/google/cloud/tasks/py.typed new file mode 100644 index 000000000000..41f0b1b8d473 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-tasks package uses inline types. diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/__init__.py new file mode 100644 index 000000000000..55764f444797 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/__init__.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.tasks_v2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.cloud_tasks import CloudTasksAsyncClient, CloudTasksClient +from .types.cloudtasks import ( + CreateQueueRequest, + CreateTaskRequest, + DeleteQueueRequest, + DeleteTaskRequest, + GetQueueRequest, + GetTaskRequest, + ListQueuesRequest, + ListQueuesResponse, + ListTasksRequest, + ListTasksResponse, + PauseQueueRequest, + PurgeQueueRequest, + ResumeQueueRequest, + RunTaskRequest, + UpdateQueueRequest, +) +from .types.queue import Queue, RateLimits, RetryConfig, StackdriverLoggingConfig +from .types.target import ( + AppEngineHttpRequest, + AppEngineRouting, + HttpMethod, + HttpRequest, + OAuthToken, + OidcToken, +) +from .types.task import Attempt, Task + +__all__ = ( + "CloudTasksAsyncClient", + "AppEngineHttpRequest", + "AppEngineRouting", + "Attempt", + "CloudTasksClient", + "CreateQueueRequest", + "CreateTaskRequest", + "DeleteQueueRequest", + "DeleteTaskRequest", + "GetQueueRequest", + "GetTaskRequest", + "HttpMethod", + "HttpRequest", + "ListQueuesRequest", + "ListQueuesResponse", + "ListTasksRequest", + "ListTasksResponse", + "OAuthToken", + "OidcToken", + "PauseQueueRequest", + "PurgeQueueRequest", + "Queue", + "RateLimits", + "ResumeQueueRequest", + "RetryConfig", + "RunTaskRequest", + "StackdriverLoggingConfig", + "Task", + "UpdateQueueRequest", +) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/gapic_metadata.json b/packages/google-cloud-tasks/google/cloud/tasks_v2/gapic_metadata.json new file mode 100644 index 000000000000..cf63592d655c --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/gapic_metadata.json @@ -0,0 +1,268 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.tasks_v2", + "protoPackage": "google.cloud.tasks.v2", + "schema": "1.0", + "services": { + "CloudTasks": { + "clients": { + "grpc": { + "libraryClient": "CloudTasksClient", + "rpcs": { + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudTasksAsyncClient", + "rpcs": { + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + } + } + }, + "rest": { + "libraryClient": "CloudTasksClient", + "rpcs": { + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/gapic_version.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/gapic_version.py new file mode 100644 index 000000000000..3344051a673d --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.14.2" # {x-release-please-version} diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/py.typed b/packages/google-cloud-tasks/google/cloud/tasks_v2/py.typed new file mode 100644 index 000000000000..41f0b1b8d473 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-tasks package uses inline types. diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/services/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/__init__.py new file mode 100644 index 000000000000..eddc5977fd56 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import CloudTasksAsyncClient +from .client import CloudTasksClient + +__all__ = ( + "CloudTasksClient", + "CloudTasksAsyncClient", +) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/async_client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/async_client.py new file mode 100644 index 000000000000..4056b22626f5 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/async_client.py @@ -0,0 +1,2382 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.tasks_v2.services.cloud_tasks import pagers +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import target +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task + +from .client import CloudTasksClient +from .transports.base import DEFAULT_CLIENT_INFO, CloudTasksTransport +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +class CloudTasksAsyncClient: + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + _client: CloudTasksClient + + DEFAULT_ENDPOINT = CloudTasksClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudTasksClient.DEFAULT_MTLS_ENDPOINT + + queue_path = staticmethod(CloudTasksClient.queue_path) + parse_queue_path = staticmethod(CloudTasksClient.parse_queue_path) + task_path = staticmethod(CloudTasksClient.task_path) + parse_task_path = staticmethod(CloudTasksClient.parse_task_path) + common_billing_account_path = staticmethod( + CloudTasksClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudTasksClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(CloudTasksClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudTasksClient.parse_common_folder_path) + common_organization_path = staticmethod(CloudTasksClient.common_organization_path) + parse_common_organization_path = staticmethod( + CloudTasksClient.parse_common_organization_path + ) + common_project_path = staticmethod(CloudTasksClient.common_project_path) + parse_common_project_path = staticmethod(CloudTasksClient.parse_common_project_path) + common_location_path = staticmethod(CloudTasksClient.common_location_path) + parse_common_location_path = staticmethod( + CloudTasksClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksAsyncClient: The constructed client. + """ + return CloudTasksClient.from_service_account_info.__func__(CloudTasksAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksAsyncClient: The constructed client. + """ + return CloudTasksClient.from_service_account_file.__func__(CloudTasksAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CloudTasksClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CloudTasksTransport: + """Returns the transport used by the client instance. + + Returns: + CloudTasksTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(CloudTasksClient).get_transport_class, type(CloudTasksClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CloudTasksTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CloudTasksClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_queues( + self, + request: Optional[Union[cloudtasks.ListQueuesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesAsyncPager: + r"""Lists queues. + + Queues are returned in lexicographical order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + async def sample_list_queues(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2.ListQueuesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_queues(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2.types.ListQueuesRequest, dict]]): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.services.cloud_tasks.pagers.ListQueuesAsyncPager: + Response message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ListQueuesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_queues, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListQueuesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_queue( + self, + request: Optional[Union[cloudtasks.GetQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + async def sample_get_queue(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2.GetQueueRequest( + name="name_value", + ) + + # Make the request + response = await client.get_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2.types.GetQueueRequest, dict]]): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. + name (:class:`str`): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.GetQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_queue( + self, + request: Optional[Union[cloudtasks.CreateQueueRequest, dict]] = None, + *, + parent: Optional[str] = None, + queue: Optional[gct_queue.Queue] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + async def sample_create_queue(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2.CreateQueueRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2.types.CreateQueueRequest, dict]]): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. + parent (:class:`str`): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (:class:`google.cloud.tasks_v2.types.Queue`): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2.Queue.name] cannot + be the same as an existing queue. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.CreateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_queue( + self, + request: Optional[Union[cloudtasks.UpdateQueueRequest, dict]] = None, + *, + queue: Optional[gct_queue.Queue] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + async def sample_update_queue(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2.UpdateQueueRequest( + ) + + # Make the request + response = await client.update_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2.types.UpdateQueueRequest, dict]]): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. + queue (:class:`google.cloud.tasks_v2.types.Queue`): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2.Queue.name] + must be specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2.Queue.name] cannot be + changed. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.UpdateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("queue.name", request.queue.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_queue( + self, + request: Optional[Union[cloudtasks.DeleteQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + async def sample_delete_queue(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2.DeleteQueueRequest( + name="name_value", + ) + + # Make the request + await client.delete_queue(request=request) + + Args: + request (Optional[Union[google.cloud.tasks_v2.types.DeleteQueueRequest, dict]]): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.DeleteQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def purge_queue( + self, + request: Optional[Union[cloudtasks.PurgeQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + + All tasks created before this method is called are + permanently deleted. + + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + async def sample_purge_queue(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2.PurgeQueueRequest( + name="name_value", + ) + + # Make the request + response = await client.purge_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2.types.PurgeQueueRequest, dict]]): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.PurgeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.purge_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def pause_queue( + self, + request: Optional[Union[cloudtasks.PauseQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2.Queue.state] is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + async def sample_pause_queue(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2.PauseQueueRequest( + name="name_value", + ) + + # Make the request + response = await client.pause_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2.types.PauseQueueRequest, dict]]): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.PauseQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pause_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def resume_queue( + self, + request: Optional[Union[cloudtasks.ResumeQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2.Queue.state]; after calling this + method it will be set to + [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + async def sample_resume_queue(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2.ResumeQueueRequest( + name="name_value", + ) + + # Make the request + response = await client.resume_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2.types.ResumeQueueRequest, dict]]): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ResumeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if + the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest( + resource=resource, + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2.Queue]. If the resource does not + exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`MutableSequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, + permissions=permissions, + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_tasks( + self, + request: Optional[Union[cloudtasks.ListTasksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksAsyncPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved + due to performance considerations; + [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + async def sample_list_tasks(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2.ListTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tasks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2.types.ListTasksRequest, dict]]): + The request object. Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.services.cloud_tasks.pagers.ListTasksAsyncPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ListTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tasks, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTasksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_task( + self, + request: Optional[Union[cloudtasks.GetTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + async def sample_get_task(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2.GetTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.get_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2.types.GetTaskRequest, dict]]): + The request object. Request message for getting a task using + [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.GetTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_task( + self, + request: Optional[Union[cloudtasks.CreateTaskRequest, dict]] = None, + *, + parent: Optional[str] = None, + task: Optional[gct_task.Task] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + async def sample_create_task(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2.CreateTaskRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2.types.CreateTaskRequest, dict]]): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (:class:`google.cloud.tasks_v2.types.Task`): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2.Task.name]. If a name is + not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or executed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour + after the original task was deleted or executed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9days after the original task was deleted or + executed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.CreateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_task, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_task( + self, + request: Optional[Union[cloudtasks.DeleteTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + async def sample_delete_task(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2.DeleteTaskRequest( + name="name_value", + ) + + # Make the request + await client.delete_task(request=request) + + Args: + request (Optional[Union[google.cloud.tasks_v2.types.DeleteTaskRequest, dict]]): + The request object. Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.DeleteTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def run_task( + self, + request: Optional[Union[cloudtasks.RunTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can + be used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will + be reset to the time that + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called + plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + async def sample_run_task(): + # Create a client + client = tasks_v2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2.RunTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.run_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2.types.RunTaskRequest, dict]]): + The request object. Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.RunTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_task, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "CloudTasksAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CloudTasksAsyncClient",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/client.py new file mode 100644 index 000000000000..38edb56aad09 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/client.py @@ -0,0 +1,2556 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.tasks_v2.services.cloud_tasks import pagers +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import target +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task + +from .transports.base import DEFAULT_CLIENT_INFO, CloudTasksTransport +from .transports.grpc import CloudTasksGrpcTransport +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport +from .transports.rest import CloudTasksRestTransport + + +class CloudTasksClientMeta(type): + """Metaclass for the CloudTasks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] + _transport_registry["grpc"] = CloudTasksGrpcTransport + _transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport + _transport_registry["rest"] = CloudTasksRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CloudTasksTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudTasksClient(metaclass=CloudTasksClientMeta): + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudtasks.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudTasksTransport: + """Returns the transport used by the client instance. + + Returns: + CloudTasksTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def queue_path( + project: str, + location: str, + queue: str, + ) -> str: + """Returns a fully-qualified queue string.""" + return "projects/{project}/locations/{location}/queues/{queue}".format( + project=project, + location=location, + queue=queue, + ) + + @staticmethod + def parse_queue_path(path: str) -> Dict[str, str]: + """Parses a queue path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def task_path( + project: str, + location: str, + queue: str, + task: str, + ) -> str: + """Returns a fully-qualified task string.""" + return "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format( + project=project, + location=location, + queue=queue, + task=task, + ) + + @staticmethod + def parse_task_path(path: str) -> Dict[str, str]: + """Parses a task path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)/tasks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudTasksTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudTasksTransport): + # transport is a CloudTasksTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_queues( + self, + request: Optional[Union[cloudtasks.ListQueuesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesPager: + r"""Lists queues. + + Queues are returned in lexicographical order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + def sample_list_queues(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2.ListQueuesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_queues(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.tasks_v2.types.ListQueuesRequest, dict]): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.services.cloud_tasks.pagers.ListQueuesPager: + Response message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListQueuesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListQueuesRequest): + request = cloudtasks.ListQueuesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_queues] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListQueuesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_queue( + self, + request: Optional[Union[cloudtasks.GetQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + def sample_get_queue(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2.GetQueueRequest( + name="name_value", + ) + + # Make the request + response = client.get_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2.types.GetQueueRequest, dict]): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetQueueRequest): + request = cloudtasks.GetQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_queue( + self, + request: Optional[Union[cloudtasks.CreateQueueRequest, dict]] = None, + *, + parent: Optional[str] = None, + queue: Optional[gct_queue.Queue] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + def sample_create_queue(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2.CreateQueueRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2.types.CreateQueueRequest, dict]): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (google.cloud.tasks_v2.types.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2.Queue.name] cannot + be the same as an existing queue. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateQueueRequest): + request = cloudtasks.CreateQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_queue( + self, + request: Optional[Union[cloudtasks.UpdateQueueRequest, dict]] = None, + *, + queue: Optional[gct_queue.Queue] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + def sample_update_queue(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2.UpdateQueueRequest( + ) + + # Make the request + response = client.update_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2.types.UpdateQueueRequest, dict]): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. + queue (google.cloud.tasks_v2.types.Queue): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2.Queue.name] + must be specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2.Queue.name] cannot be + changed. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.UpdateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.UpdateQueueRequest): + request = cloudtasks.UpdateQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("queue.name", request.queue.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_queue( + self, + request: Optional[Union[cloudtasks.DeleteQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + def sample_delete_queue(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2.DeleteQueueRequest( + name="name_value", + ) + + # Make the request + client.delete_queue(request=request) + + Args: + request (Union[google.cloud.tasks_v2.types.DeleteQueueRequest, dict]): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteQueueRequest): + request = cloudtasks.DeleteQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def purge_queue( + self, + request: Optional[Union[cloudtasks.PurgeQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + + All tasks created before this method is called are + permanently deleted. + + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + def sample_purge_queue(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2.PurgeQueueRequest( + name="name_value", + ) + + # Make the request + response = client.purge_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2.types.PurgeQueueRequest, dict]): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PurgeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PurgeQueueRequest): + request = cloudtasks.PurgeQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def pause_queue( + self, + request: Optional[Union[cloudtasks.PauseQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2.Queue.state] is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + def sample_pause_queue(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2.PauseQueueRequest( + name="name_value", + ) + + # Make the request + response = client.pause_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2.types.PauseQueueRequest, dict]): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PauseQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PauseQueueRequest): + request = cloudtasks.PauseQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pause_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resume_queue( + self, + request: Optional[Union[cloudtasks.ResumeQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2.Queue.state]; after calling this + method it will be set to + [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + def sample_resume_queue(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2.ResumeQueueRequest( + name="name_value", + ) + + # Make the request + response = client.resume_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2.types.ResumeQueueRequest, dict]): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ResumeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ResumeQueueRequest): + request = cloudtasks.ResumeQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if + the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2.Queue]. If the resource does not + exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + resource (str): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (MutableSequence[str]): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + if resource is not None: + request.resource = resource + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_tasks( + self, + request: Optional[Union[cloudtasks.ListTasksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved + due to performance considerations; + [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + def sample_list_tasks(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2.ListTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tasks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.tasks_v2.types.ListTasksRequest, dict]): + The request object. Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.services.cloud_tasks.pagers.ListTasksPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListTasksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListTasksRequest): + request = cloudtasks.ListTasksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTasksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_task( + self, + request: Optional[Union[cloudtasks.GetTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + def sample_get_task(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2.GetTaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2.types.GetTaskRequest, dict]): + The request object. Request message for getting a task using + [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetTaskRequest): + request = cloudtasks.GetTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_task( + self, + request: Optional[Union[cloudtasks.CreateTaskRequest, dict]] = None, + *, + parent: Optional[str] = None, + task: Optional[gct_task.Task] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + def sample_create_task(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2.CreateTaskRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2.types.CreateTaskRequest, dict]): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (google.cloud.tasks_v2.types.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2.Task.name]. If a name is + not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or executed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour + after the original task was deleted or executed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9days after the original task was deleted or + executed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateTaskRequest): + request = cloudtasks.CreateTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_task( + self, + request: Optional[Union[cloudtasks.DeleteTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + def sample_delete_task(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2.DeleteTaskRequest( + name="name_value", + ) + + # Make the request + client.delete_task(request=request) + + Args: + request (Union[google.cloud.tasks_v2.types.DeleteTaskRequest, dict]): + The request object. Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteTaskRequest): + request = cloudtasks.DeleteTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def run_task( + self, + request: Optional[Union[cloudtasks.RunTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can + be used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will + be reset to the time that + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called + plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2 + + def sample_run_task(): + # Create a client + client = tasks_v2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2.RunTaskRequest( + name="name_value", + ) + + # Make the request + response = client.run_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2.types.RunTaskRequest, dict]): + The request object. Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.RunTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.RunTaskRequest): + request = cloudtasks.RunTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CloudTasksClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CloudTasksClient",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/pagers.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/pagers.py new file mode 100644 index 000000000000..ea049a54dd0b --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/pagers.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.tasks_v2.types import cloudtasks, queue, task + + +class ListQueuesPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2.types.ListQueuesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2.types.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudtasks.ListQueuesResponse], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2.types.ListQueuesRequest): + The initial request object. + response (google.cloud.tasks_v2.types.ListQueuesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[queue.Queue]: + for page in self.pages: + yield from page.queues + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListQueuesAsyncPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2.types.ListQueuesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2.types.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudtasks.ListQueuesResponse]], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2.types.ListQueuesRequest): + The initial request object. + response (google.cloud.tasks_v2.types.ListQueuesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[queue.Queue]: + async def async_generator(): + async for page in self.pages: + for response in page.queues: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTasksPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2.types.ListTasksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudtasks.ListTasksResponse], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2.types.ListTasksRequest): + The initial request object. + response (google.cloud.tasks_v2.types.ListTasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[task.Task]: + for page in self.pages: + yield from page.tasks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTasksAsyncPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2.types.ListTasksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudtasks.ListTasksResponse]], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2.types.ListTasksRequest): + The initial request object. + response (google.cloud.tasks_v2.types.ListTasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[task.Task]: + async def async_generator(): + async for page in self.pages: + for response in page.tasks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py new file mode 100644 index 000000000000..0a9dffaab4fe --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudTasksTransport +from .grpc import CloudTasksGrpcTransport +from .grpc_asyncio import CloudTasksGrpcAsyncIOTransport +from .rest import CloudTasksRestInterceptor, CloudTasksRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] +_transport_registry["grpc"] = CloudTasksGrpcTransport +_transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport +_transport_registry["rest"] = CloudTasksRestTransport + +__all__ = ( + "CloudTasksTransport", + "CloudTasksGrpcTransport", + "CloudTasksGrpcAsyncIOTransport", + "CloudTasksRestTransport", + "CloudTasksRestInterceptor", +) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py new file mode 100644 index 000000000000..b62af84ff1cf --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/base.py @@ -0,0 +1,466 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.tasks_v2 import gapic_version as package_version +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class CloudTasksTransport(abc.ABC): + """Abstract transport class for CloudTasks.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "cloudtasks.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_queues: gapic_v1.method.wrap_method( + self.list_queues, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_queue: gapic_v1.method.wrap_method( + self.get_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_queue: gapic_v1.method.wrap_method( + self.create_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.update_queue: gapic_v1.method.wrap_method( + self.update_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.delete_queue: gapic_v1.method.wrap_method( + self.delete_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.purge_queue: gapic_v1.method.wrap_method( + self.purge_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.pause_queue: gapic_v1.method.wrap_method( + self.pause_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.resume_queue: gapic_v1.method.wrap_method( + self.resume_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=20.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_tasks: gapic_v1.method.wrap_method( + self.list_tasks, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_task: gapic_v1.method.wrap_method( + self.get_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_task: gapic_v1.method.wrap_method( + self.create_task, + default_timeout=20.0, + client_info=client_info, + ), + self.delete_task: gapic_v1.method.wrap_method( + self.delete_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.run_task: gapic_v1.method.wrap_method( + self.run_task, + default_timeout=20.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_queues( + self, + ) -> Callable[ + [cloudtasks.ListQueuesRequest], + Union[cloudtasks.ListQueuesResponse, Awaitable[cloudtasks.ListQueuesResponse]], + ]: + raise NotImplementedError() + + @property + def get_queue( + self, + ) -> Callable[ + [cloudtasks.GetQueueRequest], Union[queue.Queue, Awaitable[queue.Queue]] + ]: + raise NotImplementedError() + + @property + def create_queue( + self, + ) -> Callable[ + [cloudtasks.CreateQueueRequest], + Union[gct_queue.Queue, Awaitable[gct_queue.Queue]], + ]: + raise NotImplementedError() + + @property + def update_queue( + self, + ) -> Callable[ + [cloudtasks.UpdateQueueRequest], + Union[gct_queue.Queue, Awaitable[gct_queue.Queue]], + ]: + raise NotImplementedError() + + @property + def delete_queue( + self, + ) -> Callable[ + [cloudtasks.DeleteQueueRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def purge_queue( + self, + ) -> Callable[ + [cloudtasks.PurgeQueueRequest], Union[queue.Queue, Awaitable[queue.Queue]] + ]: + raise NotImplementedError() + + @property + def pause_queue( + self, + ) -> Callable[ + [cloudtasks.PauseQueueRequest], Union[queue.Queue, Awaitable[queue.Queue]] + ]: + raise NotImplementedError() + + @property + def resume_queue( + self, + ) -> Callable[ + [cloudtasks.ResumeQueueRequest], Union[queue.Queue, Awaitable[queue.Queue]] + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_tasks( + self, + ) -> Callable[ + [cloudtasks.ListTasksRequest], + Union[cloudtasks.ListTasksResponse, Awaitable[cloudtasks.ListTasksResponse]], + ]: + raise NotImplementedError() + + @property + def get_task( + self, + ) -> Callable[[cloudtasks.GetTaskRequest], Union[task.Task, Awaitable[task.Task]]]: + raise NotImplementedError() + + @property + def create_task( + self, + ) -> Callable[ + [cloudtasks.CreateTaskRequest], Union[gct_task.Task, Awaitable[gct_task.Task]] + ]: + raise NotImplementedError() + + @property + def delete_task( + self, + ) -> Callable[ + [cloudtasks.DeleteTaskRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def run_task( + self, + ) -> Callable[[cloudtasks.RunTaskRequest], Union[task.Task, Awaitable[task.Task]]]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("CloudTasksTransport",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py new file mode 100644 index 000000000000..fe23dfc33336 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc.py @@ -0,0 +1,823 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task + +from .base import DEFAULT_CLIENT_INFO, CloudTasksTransport + + +class CloudTasksGrpcTransport(CloudTasksTransport): + """gRPC backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_queues( + self, + ) -> Callable[[cloudtasks.ListQueuesRequest], cloudtasks.ListQueuesResponse]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + ~.ListQueuesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_queues" not in self._stubs: + self._stubs["list_queues"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/ListQueues", + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs["list_queues"] + + @property + def get_queue(self) -> Callable[[cloudtasks.GetQueueRequest], queue.Queue]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_queue" not in self._stubs: + self._stubs["get_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/GetQueue", + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["get_queue"] + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], gct_queue.Queue]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_queue" not in self._stubs: + self._stubs["create_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/CreateQueue", + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["create_queue"] + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], gct_queue.Queue]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_queue" not in self._stubs: + self._stubs["update_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/UpdateQueue", + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["update_queue"] + + @property + def delete_queue( + self, + ) -> Callable[[cloudtasks.DeleteQueueRequest], empty_pb2.Empty]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_queue" not in self._stubs: + self._stubs["delete_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/DeleteQueue", + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_queue"] + + @property + def purge_queue(self) -> Callable[[cloudtasks.PurgeQueueRequest], queue.Queue]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + + All tasks created before this method is called are + permanently deleted. + + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_queue" not in self._stubs: + self._stubs["purge_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/PurgeQueue", + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["purge_queue"] + + @property + def pause_queue(self) -> Callable[[cloudtasks.PauseQueueRequest], queue.Queue]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2.Queue.state] is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_queue" not in self._stubs: + self._stubs["pause_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/PauseQueue", + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["pause_queue"] + + @property + def resume_queue(self) -> Callable[[cloudtasks.ResumeQueueRequest], queue.Queue]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2.Queue.state]; after calling this + method it will be set to + [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_queue" not in self._stubs: + self._stubs["resume_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/ResumeQueue", + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["resume_queue"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if + the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2.Queue]. If the resource does not + exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def list_tasks( + self, + ) -> Callable[[cloudtasks.ListTasksRequest], cloudtasks.ListTasksResponse]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved + due to performance considerations; + [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + ~.ListTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tasks" not in self._stubs: + self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/ListTasks", + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs["list_tasks"] + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], task.Task]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_task" not in self._stubs: + self._stubs["get_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/GetTask", + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["get_task"] + + @property + def create_task(self) -> Callable[[cloudtasks.CreateTaskRequest], gct_task.Task]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Returns: + Callable[[~.CreateTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_task" not in self._stubs: + self._stubs["create_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/CreateTask", + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs["create_task"] + + @property + def delete_task(self) -> Callable[[cloudtasks.DeleteTaskRequest], empty_pb2.Empty]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_task" not in self._stubs: + self._stubs["delete_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/DeleteTask", + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_task"] + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], task.Task]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can + be used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will + be reset to the time that + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called + plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Returns: + Callable[[~.RunTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_task" not in self._stubs: + self._stubs["run_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/RunTask", + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["run_task"] + + def close(self): + self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("CloudTasksGrpcTransport",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py new file mode 100644 index 000000000000..eaae6f36728a --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/grpc_asyncio.py @@ -0,0 +1,838 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task + +from .base import DEFAULT_CLIENT_INFO, CloudTasksTransport +from .grpc import CloudTasksGrpcTransport + + +class CloudTasksGrpcAsyncIOTransport(CloudTasksTransport): + """gRPC AsyncIO backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_queues( + self, + ) -> Callable[ + [cloudtasks.ListQueuesRequest], Awaitable[cloudtasks.ListQueuesResponse] + ]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + Awaitable[~.ListQueuesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_queues" not in self._stubs: + self._stubs["list_queues"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/ListQueues", + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs["list_queues"] + + @property + def get_queue( + self, + ) -> Callable[[cloudtasks.GetQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_queue" not in self._stubs: + self._stubs["get_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/GetQueue", + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["get_queue"] + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], Awaitable[gct_queue.Queue]]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_queue" not in self._stubs: + self._stubs["create_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/CreateQueue", + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["create_queue"] + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], Awaitable[gct_queue.Queue]]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_queue" not in self._stubs: + self._stubs["update_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/UpdateQueue", + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["update_queue"] + + @property + def delete_queue( + self, + ) -> Callable[[cloudtasks.DeleteQueueRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_queue" not in self._stubs: + self._stubs["delete_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/DeleteQueue", + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_queue"] + + @property + def purge_queue( + self, + ) -> Callable[[cloudtasks.PurgeQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + + All tasks created before this method is called are + permanently deleted. + + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_queue" not in self._stubs: + self._stubs["purge_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/PurgeQueue", + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["purge_queue"] + + @property + def pause_queue( + self, + ) -> Callable[[cloudtasks.PauseQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2.Queue.state] is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_queue" not in self._stubs: + self._stubs["pause_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/PauseQueue", + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["pause_queue"] + + @property + def resume_queue( + self, + ) -> Callable[[cloudtasks.ResumeQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2.Queue.state]; after calling this + method it will be set to + [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_queue" not in self._stubs: + self._stubs["resume_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/ResumeQueue", + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["resume_queue"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Returns an empty policy if + the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2.Queue]. If the resource does not + exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def list_tasks( + self, + ) -> Callable[ + [cloudtasks.ListTasksRequest], Awaitable[cloudtasks.ListTasksResponse] + ]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved + due to performance considerations; + [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + Awaitable[~.ListTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tasks" not in self._stubs: + self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/ListTasks", + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs["list_tasks"] + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], Awaitable[task.Task]]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_task" not in self._stubs: + self._stubs["get_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/GetTask", + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["get_task"] + + @property + def create_task( + self, + ) -> Callable[[cloudtasks.CreateTaskRequest], Awaitable[gct_task.Task]]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Returns: + Callable[[~.CreateTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_task" not in self._stubs: + self._stubs["create_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/CreateTask", + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs["create_task"] + + @property + def delete_task( + self, + ) -> Callable[[cloudtasks.DeleteTaskRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_task" not in self._stubs: + self._stubs["delete_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/DeleteTask", + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_task"] + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], Awaitable[task.Task]]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can + be used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will + be reset to the time that + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called + plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Returns: + Callable[[~.RunTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_task" not in self._stubs: + self._stubs["run_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2.CloudTasks/RunTask", + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["run_task"] + + def close(self): + return self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("CloudTasksGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/rest.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/rest.py new file mode 100644 index 000000000000..15f113508f6f --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/services/cloud_tasks/transports/rest.py @@ -0,0 +1,2555 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task + +from .base import CloudTasksTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CloudTasksRestInterceptor: + """Interceptor for CloudTasks. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudTasksRestTransport. + + .. code-block:: python + class MyCustomCloudTasksInterceptor(CloudTasksRestInterceptor): + def pre_create_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_queues(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_queues(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_tasks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_tasks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_pause_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_pause_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_purge_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_purge_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_resume_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_resume_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_queue(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CloudTasksRestTransport(interceptor=MyCustomCloudTasksInterceptor()) + client = CloudTasksClient(transport=transport) + + + """ + + def pre_create_queue( + self, + request: cloudtasks.CreateQueueRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudtasks.CreateQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_create_queue(self, response: gct_queue.Queue) -> gct_queue.Queue: + """Post-rpc interceptor for create_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_create_task( + self, request: cloudtasks.CreateTaskRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.CreateTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_create_task(self, response: gct_task.Task) -> gct_task.Task: + """Post-rpc interceptor for create_task + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_delete_queue( + self, + request: cloudtasks.DeleteQueueRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudtasks.DeleteQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def pre_delete_task( + self, request: cloudtasks.DeleteTaskRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.DeleteTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_get_queue( + self, request: cloudtasks.GetQueueRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.GetQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_get_queue(self, response: queue.Queue) -> queue.Queue: + """Post-rpc interceptor for get_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_get_task( + self, request: cloudtasks.GetTaskRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.GetTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_get_task(self, response: task.Task) -> task.Task: + """Post-rpc interceptor for get_task + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_list_queues( + self, request: cloudtasks.ListQueuesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.ListQueuesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_queues + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_list_queues( + self, response: cloudtasks.ListQueuesResponse + ) -> cloudtasks.ListQueuesResponse: + """Post-rpc interceptor for list_queues + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_list_tasks( + self, request: cloudtasks.ListTasksRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.ListTasksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_tasks + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_list_tasks( + self, response: cloudtasks.ListTasksResponse + ) -> cloudtasks.ListTasksResponse: + """Post-rpc interceptor for list_tasks + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_pause_queue( + self, request: cloudtasks.PauseQueueRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.PauseQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for pause_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_pause_queue(self, response: queue.Queue) -> queue.Queue: + """Post-rpc interceptor for pause_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_purge_queue( + self, request: cloudtasks.PurgeQueueRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.PurgeQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for purge_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_purge_queue(self, response: queue.Queue) -> queue.Queue: + """Post-rpc interceptor for purge_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_resume_queue( + self, + request: cloudtasks.ResumeQueueRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudtasks.ResumeQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for resume_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_resume_queue(self, response: queue.Queue) -> queue.Queue: + """Post-rpc interceptor for resume_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_run_task( + self, request: cloudtasks.RunTaskRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.RunTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_run_task(self, response: task.Task) -> task.Task: + """Post-rpc interceptor for run_task + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_update_queue( + self, + request: cloudtasks.UpdateQueueRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudtasks.UpdateQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_update_queue(self, response: gct_queue.Queue) -> gct_queue.Queue: + """Post-rpc interceptor for update_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudTasksRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudTasksRestInterceptor + + +class CloudTasksRestTransport(CloudTasksTransport): + """REST backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudTasksRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudTasksRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateQueue(CloudTasksRestStub): + def __hash__(self): + return hash("CreateQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.CreateQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Call the create queue method over HTTP. + + Args: + request (~.cloudtasks.CreateQueueRequest): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*}/queues", + "body": "queue", + }, + ] + request, metadata = self._interceptor.pre_create_queue(request, metadata) + pb_request = cloudtasks.CreateQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gct_queue.Queue() + pb_resp = gct_queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_queue(resp) + return resp + + class _CreateTask(CloudTasksRestStub): + def __hash__(self): + return hash("CreateTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.CreateTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Call the create task method over HTTP. + + Args: + request (~.cloudtasks.CreateTaskRequest): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_task.Task: + A unit of scheduled work. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{parent=projects/*/locations/*/queues/*}/tasks", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_task(request, metadata) + pb_request = cloudtasks.CreateTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gct_task.Task() + pb_resp = gct_task.Task.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_task(resp) + return resp + + class _DeleteQueue(CloudTasksRestStub): + def __hash__(self): + return hash("DeleteQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.DeleteQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete queue method over HTTP. + + Args: + request (~.cloudtasks.DeleteQueueRequest): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/queues/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_queue(request, metadata) + pb_request = cloudtasks.DeleteQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteTask(CloudTasksRestStub): + def __hash__(self): + return hash("DeleteTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.DeleteTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete task method over HTTP. + + Args: + request (~.cloudtasks.DeleteTaskRequest): + The request object. Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2/{name=projects/*/locations/*/queues/*/tasks/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_task(request, metadata) + pb_request = cloudtasks.DeleteTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetIamPolicy(CloudTasksRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{resource=projects/*/locations/*/queues/*}:getIamPolicy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _GetQueue(CloudTasksRestStub): + def __hash__(self): + return hash("GetQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.GetQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Call the get queue method over HTTP. + + Args: + request (~.cloudtasks.GetQueueRequest): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/queues/*}", + }, + ] + request, metadata = self._interceptor.pre_get_queue(request, metadata) + pb_request = cloudtasks.GetQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = queue.Queue() + pb_resp = queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_queue(resp) + return resp + + class _GetTask(CloudTasksRestStub): + def __hash__(self): + return hash("GetTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.GetTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Call the get task method over HTTP. + + Args: + request (~.cloudtasks.GetTaskRequest): + The request object. Request message for getting a task using + [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*/queues/*/tasks/*}", + }, + ] + request, metadata = self._interceptor.pre_get_task(request, metadata) + pb_request = cloudtasks.GetTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = task.Task() + pb_resp = task.Task.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_task(resp) + return resp + + class _ListQueues(CloudTasksRestStub): + def __hash__(self): + return hash("ListQueues") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.ListQueuesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.ListQueuesResponse: + r"""Call the list queues method over HTTP. + + Args: + request (~.cloudtasks.ListQueuesRequest): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudtasks.ListQueuesResponse: + Response message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*}/queues", + }, + ] + request, metadata = self._interceptor.pre_list_queues(request, metadata) + pb_request = cloudtasks.ListQueuesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudtasks.ListQueuesResponse() + pb_resp = cloudtasks.ListQueuesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_queues(resp) + return resp + + class _ListTasks(CloudTasksRestStub): + def __hash__(self): + return hash("ListTasks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.ListTasksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.ListTasksResponse: + r"""Call the list tasks method over HTTP. + + Args: + request (~.cloudtasks.ListTasksRequest): + The request object. Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudtasks.ListTasksResponse: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{parent=projects/*/locations/*/queues/*}/tasks", + }, + ] + request, metadata = self._interceptor.pre_list_tasks(request, metadata) + pb_request = cloudtasks.ListTasksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudtasks.ListTasksResponse() + pb_resp = cloudtasks.ListTasksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_tasks(resp) + return resp + + class _PauseQueue(CloudTasksRestStub): + def __hash__(self): + return hash("PauseQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.PauseQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Call the pause queue method over HTTP. + + Args: + request (~.cloudtasks.PauseQueueRequest): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/queues/*}:pause", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_pause_queue(request, metadata) + pb_request = cloudtasks.PauseQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = queue.Queue() + pb_resp = queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_pause_queue(resp) + return resp + + class _PurgeQueue(CloudTasksRestStub): + def __hash__(self): + return hash("PurgeQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.PurgeQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Call the purge queue method over HTTP. + + Args: + request (~.cloudtasks.PurgeQueueRequest): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/queues/*}:purge", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_purge_queue(request, metadata) + pb_request = cloudtasks.PurgeQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = queue.Queue() + pb_resp = queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_purge_queue(resp) + return resp + + class _ResumeQueue(CloudTasksRestStub): + def __hash__(self): + return hash("ResumeQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.ResumeQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Call the resume queue method over HTTP. + + Args: + request (~.cloudtasks.ResumeQueueRequest): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/queues/*}:resume", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_resume_queue(request, metadata) + pb_request = cloudtasks.ResumeQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = queue.Queue() + pb_resp = queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resume_queue(resp) + return resp + + class _RunTask(CloudTasksRestStub): + def __hash__(self): + return hash("RunTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.RunTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Call the run task method over HTTP. + + Args: + request (~.cloudtasks.RunTaskRequest): + The request object. Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/queues/*/tasks/*}:run", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_run_task(request, metadata) + pb_request = cloudtasks.RunTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = task.Task() + pb_resp = task.Task.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_task(resp) + return resp + + class _SetIamPolicy(CloudTasksRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{resource=projects/*/locations/*/queues/*}:setIamPolicy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(CloudTasksRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{resource=projects/*/locations/*/queues/*}:testIamPermissions", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = iam_policy_pb2.TestIamPermissionsResponse() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + class _UpdateQueue(CloudTasksRestStub): + def __hash__(self): + return hash("UpdateQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.UpdateQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Call the update queue method over HTTP. + + Args: + request (~.cloudtasks.UpdateQueueRequest): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2/{queue.name=projects/*/locations/*/queues/*}", + "body": "queue", + }, + ] + request, metadata = self._interceptor.pre_update_queue(request, metadata) + pb_request = cloudtasks.UpdateQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gct_queue.Queue() + pb_resp = gct_queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_queue(resp) + return resp + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], gct_queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_task(self) -> Callable[[cloudtasks.CreateTaskRequest], gct_task.Task]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_queue( + self, + ) -> Callable[[cloudtasks.DeleteQueueRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_task(self) -> Callable[[cloudtasks.DeleteTaskRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_queue(self) -> Callable[[cloudtasks.GetQueueRequest], queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], task.Task]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_queues( + self, + ) -> Callable[[cloudtasks.ListQueuesRequest], cloudtasks.ListQueuesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListQueues(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_tasks( + self, + ) -> Callable[[cloudtasks.ListTasksRequest], cloudtasks.ListTasksResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTasks(self._session, self._host, self._interceptor) # type: ignore + + @property + def pause_queue(self) -> Callable[[cloudtasks.PauseQueueRequest], queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PauseQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def purge_queue(self) -> Callable[[cloudtasks.PurgeQueueRequest], queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PurgeQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def resume_queue(self) -> Callable[[cloudtasks.ResumeQueueRequest], queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ResumeQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], task.Task]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], gct_queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(CloudTasksRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(CloudTasksRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CloudTasksRestTransport",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/types/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/types/__init__.py new file mode 100644 index 000000000000..b4d16c1ea6da --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/types/__init__.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloudtasks import ( + CreateQueueRequest, + CreateTaskRequest, + DeleteQueueRequest, + DeleteTaskRequest, + GetQueueRequest, + GetTaskRequest, + ListQueuesRequest, + ListQueuesResponse, + ListTasksRequest, + ListTasksResponse, + PauseQueueRequest, + PurgeQueueRequest, + ResumeQueueRequest, + RunTaskRequest, + UpdateQueueRequest, +) +from .queue import Queue, RateLimits, RetryConfig, StackdriverLoggingConfig +from .target import ( + AppEngineHttpRequest, + AppEngineRouting, + HttpMethod, + HttpRequest, + OAuthToken, + OidcToken, +) +from .task import Attempt, Task + +__all__ = ( + "CreateQueueRequest", + "CreateTaskRequest", + "DeleteQueueRequest", + "DeleteTaskRequest", + "GetQueueRequest", + "GetTaskRequest", + "ListQueuesRequest", + "ListQueuesResponse", + "ListTasksRequest", + "ListTasksResponse", + "PauseQueueRequest", + "PurgeQueueRequest", + "ResumeQueueRequest", + "RunTaskRequest", + "UpdateQueueRequest", + "Queue", + "RateLimits", + "RetryConfig", + "StackdriverLoggingConfig", + "AppEngineHttpRequest", + "AppEngineRouting", + "HttpRequest", + "OAuthToken", + "OidcToken", + "HttpMethod", + "Attempt", + "Task", +) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/types/cloudtasks.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/types/cloudtasks.py new file mode 100644 index 000000000000..75d8cdc3790a --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/types/cloudtasks.py @@ -0,0 +1,561 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import task as gct_task + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2", + manifest={ + "ListQueuesRequest", + "ListQueuesResponse", + "GetQueueRequest", + "CreateQueueRequest", + "UpdateQueueRequest", + "DeleteQueueRequest", + "PurgeQueueRequest", + "PauseQueueRequest", + "ResumeQueueRequest", + "ListTasksRequest", + "ListTasksResponse", + "GetTaskRequest", + "CreateTaskRequest", + "DeleteTaskRequest", + "RunTaskRequest", + }, +) + + +class ListQueuesRequest(proto.Message): + r"""Request message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + + Attributes: + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + filter (str): + ``filter`` can be used to specify a subset of queues. Any + [Queue][google.cloud.tasks.v2.Queue] field can be used as a + filter and several operators as supported. For example: + ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as + described in `Stackdriver's Advanced Logs + Filters `__. + + Sample filter "state: PAUSED". + + Note that using filters might cause fewer queues than the + requested page_size to be returned. + page_size (int): + Requested page size. + + The maximum page size is 9800. If unspecified, the page size + will be the maximum. Fewer queues than requested might be + returned, even if more queues exist; use the + [next_page_token][google.cloud.tasks.v2.ListQueuesResponse.next_page_token] + in the response to determine if more queues exist. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2.ListQueuesResponse.next_page_token] + returned from the previous call to + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] + method. It is an error to switch the value of the + [filter][google.cloud.tasks.v2.ListQueuesRequest.filter] + while iterating through pages. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListQueuesResponse(proto.Message): + r"""Response message for + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. + + Attributes: + queues (MutableSequence[google.cloud.tasks_v2.types.Queue]): + The list of queues. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] + with this value as the + [page_token][google.cloud.tasks.v2.ListQueuesRequest.page_token]. + + If the next_page_token is empty, there are no more results. + + The page token is valid for only 2 hours. + """ + + @property + def raw_page(self): + return self + + queues: MutableSequence[gct_queue.Queue] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gct_queue.Queue, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetQueueRequest(proto.Message): + r"""Request message for + [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. + + Attributes: + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateQueueRequest(proto.Message): + r"""Request message for + [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. + + Attributes: + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + queue (google.cloud.tasks_v2.types.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2.Queue.name] cannot be + the same as an existing queue. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + queue: gct_queue.Queue = proto.Field( + proto.MESSAGE, + number=2, + message=gct_queue.Queue, + ) + + +class UpdateQueueRequest(proto.Message): + r"""Request message for + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. + + Attributes: + queue (google.cloud.tasks_v2.types.Queue): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. Any + value specified for an output only field will be ignored. + The queue's [name][google.cloud.tasks.v2.Queue.name] cannot + be changed. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields of the + queue are being updated. + If empty, then all fields will be updated. + """ + + queue: gct_queue.Queue = proto.Field( + proto.MESSAGE, + number=1, + message=gct_queue.Queue, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteQueueRequest(proto.Message): + r"""Request message for + [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PurgeQueueRequest(proto.Message): + r"""Request message for + [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PauseQueueRequest(proto.Message): + r"""Request message for + [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ResumeQueueRequest(proto.Message): + r"""Request message for + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTasksRequest(proto.Message): + r"""Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + response_view (google.cloud.tasks_v2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2.Task] resource. + page_size (int): + Maximum page size. + + Fewer tasks than requested might be returned, even if more + tasks exist; use + [next_page_token][google.cloud.tasks.v2.ListTasksResponse.next_page_token] + in the response to determine if more tasks exist. + + The maximum page size is 1000. If unspecified, the page size + will be the maximum. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2.ListTasksResponse.next_page_token] + returned from the previous call to + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks] + method. + + The page token is valid for only 2 hours. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListTasksResponse(proto.Message): + r"""Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. + + Attributes: + tasks (MutableSequence[google.cloud.tasks_v2.types.Task]): + The list of tasks. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks] with + this value as the + [page_token][google.cloud.tasks.v2.ListTasksRequest.page_token]. + + If the next_page_token is empty, there are no more results. + """ + + @property + def raw_page(self): + return self + + tasks: MutableSequence[gct_task.Task] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gct_task.Task, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTaskRequest(proto.Message): + r"""Request message for getting a task using + [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (google.cloud.tasks_v2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2.Task] resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + + +class CreateTaskRequest(proto.Message): + r"""Request message for + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + task (google.cloud.tasks_v2.types.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2.Task.name]. If a name is not + specified then the system will generate a random unique task + id, which will be set in the task returned in the + [response][google.cloud.tasks.v2.Task.name]. + + If [schedule_time][google.cloud.tasks.v2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set it to + the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task de-duplication. + If a task's ID is identical to that of an existing task or a + task that was deleted or executed recently then the call + will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1hour after + the original task was deleted or executed. If the task's + queue was created using queue.yaml or queue.xml, then + another task with the same name can't be created for ~9days + after the original task was deleted or executed. + + Because there is an extra lookup cost to identify duplicate + task names, these + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id is + recommended. Choosing task ids that are sequential or have + sequential prefixes, for example using a timestamp, causes + an increase in latency and error rates in all task commands. + The infrastructure relies on an approximately uniform + distribution of task ids to store and serve tasks + efficiently. + response_view (google.cloud.tasks_v2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2.Task] resource. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + task: gct_task.Task = proto.Field( + proto.MESSAGE, + number=2, + message=gct_task.Task, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=3, + enum=gct_task.Task.View, + ) + + +class DeleteTaskRequest(proto.Message): + r"""Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RunTaskRequest(proto.Message): + r"""Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (google.cloud.tasks_v2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2.Task] resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/types/queue.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/types/queue.py new file mode 100644 index 000000000000..5fa6d9ec8055 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/types/queue.py @@ -0,0 +1,474 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.tasks_v2.types import target + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2", + manifest={ + "Queue", + "RateLimits", + "RetryConfig", + "StackdriverLoggingConfig", + }, +) + + +class Queue(proto.Message): + r"""A queue is a container of related tasks. Queues are + configured to manage how those tasks are dispatched. + Configurable properties include rate limits, retry options, + queue types, and others. + + Attributes: + name (str): + Caller-specified and required in + [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue], + after which it becomes output only. + + The queue name. + + The queue name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the queue's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + app_engine_routing_override (google.cloud.tasks_v2.types.AppEngineRouting): + Overrides for [task-level + app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. + These settings apply only to [App Engine + tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in this + queue. [Http tasks][google.cloud.tasks.v2.HttpRequest] are + not affected. + + If set, ``app_engine_routing_override`` is used for all [App + Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in + the queue, no matter what the setting is for the [task-level + app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. + rate_limits (google.cloud.tasks_v2.types.RateLimits): + Rate limits for task dispatches. + + [rate_limits][google.cloud.tasks.v2.Queue.rate_limits] and + [retry_config][google.cloud.tasks.v2.Queue.retry_config] are + related because they both control task attempts. However + they control task attempts in different ways: + + - [rate_limits][google.cloud.tasks.v2.Queue.rate_limits] + controls the total rate of dispatches from a queue (i.e. + all traffic dispatched from the queue, regardless of + whether the dispatch is from a first attempt or a retry). + - [retry_config][google.cloud.tasks.v2.Queue.retry_config] + controls what happens to particular a task after its + first attempt fails. That is, + [retry_config][google.cloud.tasks.v2.Queue.retry_config] + controls task retries (the second attempt, third attempt, + etc). + + The queue's actual dispatch rate is the result of: + + - Number of tasks in the queue + - User-specified throttling: + [rate_limits][google.cloud.tasks.v2.Queue.rate_limits], + [retry_config][google.cloud.tasks.v2.Queue.retry_config], + and the [queue's + state][google.cloud.tasks.v2.Queue.state]. + - System throttling due to ``429`` (Too Many Requests) or + ``503`` (Service Unavailable) responses from the worker, + high error rates, or to smooth sudden large traffic + spikes. + retry_config (google.cloud.tasks_v2.types.RetryConfig): + Settings that determine the retry behavior. + + - For tasks created using Cloud Tasks: the queue-level + retry settings apply to all tasks in the queue that were + created using Cloud Tasks. Retry settings cannot be set + on individual tasks. + - For tasks created using the App Engine SDK: the + queue-level retry settings apply to all tasks in the + queue which do not have retry settings explicitly set on + the task and were created by the App Engine SDK. See `App + Engine + documentation `__. + state (google.cloud.tasks_v2.types.Queue.State): + Output only. The state of the queue. + + ``state`` can only be changed by calling + [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue], + [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue], + or uploading + `queue.yaml/xml `__. + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue] + cannot be used to change ``state``. + purge_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last time this queue was purged. + + All tasks that were + [created][google.cloud.tasks.v2.Task.create_time] before + this time were purged. + + A queue can be purged using + [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue], + the `App Engine Task Queue SDK, or the Cloud + Console `__. + + Purge time will be truncated to the nearest microsecond. + Purge time will be unset if the queue has never been purged. + stackdriver_logging_config (google.cloud.tasks_v2.types.StackdriverLoggingConfig): + Configuration options for writing logs to `Stackdriver + Logging `__. If this + field is unset, then no logs are written. + """ + + class State(proto.Enum): + r"""State of the queue. + + Values: + STATE_UNSPECIFIED (0): + Unspecified state. + RUNNING (1): + The queue is running. Tasks can be dispatched. + + If the queue was created using Cloud Tasks and the queue has + had no activity (method calls or task dispatches) for 30 + days, the queue may take a few minutes to re-activate. Some + method calls may return + [NOT_FOUND][google.rpc.Code.NOT_FOUND] and tasks may not be + dispatched for a few minutes until the queue has been + re-activated. + PAUSED (2): + Tasks are paused by the user. If the queue is + paused then Cloud Tasks will stop delivering + tasks from it, but more tasks can still be added + to it by the user. + DISABLED (3): + The queue is disabled. + + A queue becomes ``DISABLED`` when + `queue.yaml `__ + or + `queue.xml `__ + is uploaded which does not contain the queue. You cannot + directly disable a queue. + + When a queue is disabled, tasks can still be added to a + queue but the tasks are not dispatched. + + To permanently delete this queue and all of its tasks, call + [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + PAUSED = 2 + DISABLED = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + app_engine_routing_override: target.AppEngineRouting = proto.Field( + proto.MESSAGE, + number=2, + message=target.AppEngineRouting, + ) + rate_limits: "RateLimits" = proto.Field( + proto.MESSAGE, + number=3, + message="RateLimits", + ) + retry_config: "RetryConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="RetryConfig", + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + purge_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + stackdriver_logging_config: "StackdriverLoggingConfig" = proto.Field( + proto.MESSAGE, + number=9, + message="StackdriverLoggingConfig", + ) + + +class RateLimits(proto.Message): + r"""Rate limits. + + This message determines the maximum rate that tasks can be + dispatched by a queue, regardless of whether the dispatch is a first + task attempt or a retry. + + Note: The debugging command, + [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask], will run a task + even if the queue has reached its + [RateLimits][google.cloud.tasks.v2.RateLimits]. + + Attributes: + max_dispatches_per_second (float): + The maximum rate at which tasks are dispatched from this + queue. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + - The maximum allowed value is 500. + + This field has the same meaning as `rate in + queue.yaml/xml `__. + max_burst_size (int): + Output only. The max burst size. + + Max burst size limits how fast tasks in queue are processed + when many tasks are in the queue and the rate is high. This + field allows the queue to have a high rate so processing + starts shortly after a task is enqueued, but still limits + resource usage when many tasks are enqueued in a short + period of time. + + The `token + bucket `__ + algorithm is used to control the rate of task dispatches. + Each queue has a token bucket that holds tokens, up to the + maximum specified by ``max_burst_size``. Each time a task is + dispatched, a token is removed from the bucket. Tasks will + be dispatched until the queue's bucket runs out of tokens. + The bucket will be continuously refilled with new tokens + based on + [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second]. + + Cloud Tasks will pick the value of ``max_burst_size`` based + on the value of + [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second]. + + For queues that were created or updated using + ``queue.yaml/xml``, ``max_burst_size`` is equal to + `bucket_size `__. + Since ``max_burst_size`` is output only, if + [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue] + is called on a queue created by ``queue.yaml/xml``, + ``max_burst_size`` will be reset based on the value of + [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second], + regardless of whether + [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second] + is updated. + max_concurrent_dispatches (int): + The maximum number of concurrent tasks that Cloud Tasks + allows to be dispatched for this queue. After this threshold + has been reached, Cloud Tasks stops dispatching tasks until + the number of concurrent requests decreases. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + The maximum allowed value is 5,000. + + This field has the same meaning as `max_concurrent_requests + in + queue.yaml/xml `__. + """ + + max_dispatches_per_second: float = proto.Field( + proto.DOUBLE, + number=1, + ) + max_burst_size: int = proto.Field( + proto.INT32, + number=2, + ) + max_concurrent_dispatches: int = proto.Field( + proto.INT32, + number=3, + ) + + +class RetryConfig(proto.Message): + r"""Retry config. + + These settings determine when a failed task attempt is retried. + + Attributes: + max_attempts (int): + Number of attempts per task. + + Cloud Tasks will attempt the task ``max_attempts`` times + (that is, if the first attempt fails, then there will be + ``max_attempts - 1`` retries). Must be >= -1. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + -1 indicates unlimited attempts. + + This field has the same meaning as `task_retry_limit in + queue.yaml/xml `__. + max_retry_duration (google.protobuf.duration_pb2.Duration): + If positive, ``max_retry_duration`` specifies the time limit + for retrying a failed task, measured from when the task was + first attempted. Once ``max_retry_duration`` time has passed + *and* the task has been attempted + [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] + times, no further attempts will be made and the task will be + deleted. + + If zero, then the task age is unlimited. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``max_retry_duration`` will be truncated to the nearest + second. + + This field has the same meaning as `task_age_limit in + queue.yaml/xml `__. + min_backoff (google.protobuf.duration_pb2.Duration): + A task will be + [scheduled][google.cloud.tasks.v2.Task.schedule_time] for + retry between + [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig] specifies + that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``min_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `min_backoff_seconds in + queue.yaml/xml `__. + max_backoff (google.protobuf.duration_pb2.Duration): + A task will be + [scheduled][google.cloud.tasks.v2.Task.schedule_time] for + retry between + [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2.RetryConfig] specifies + that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``max_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `max_backoff_seconds in + queue.yaml/xml `__. + max_doublings (int): + The time between retries will double ``max_doublings`` + times. + + A task's retry interval starts at + [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff], + then doubles ``max_doublings`` times, then increases + linearly, and finally retries at intervals of + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + up to + [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] + times. + + For example, if + [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] + is 10s, + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + is 300s, and ``max_doublings`` is 3, then the a task will + first be retried in 10s. The retry interval will double + three times, and then increase linearly by 2^3 \* 10s. + Finally, the task will retry at intervals of + [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] + until the task has been attempted + [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] + times. Thus, the requests will retry at 10s, 20s, 40s, 80s, + 160s, 240s, 300s, 300s, .... + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field has the same meaning as `max_doublings in + queue.yaml/xml `__. + """ + + max_attempts: int = proto.Field( + proto.INT32, + number=1, + ) + max_retry_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + min_backoff: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + max_backoff: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + max_doublings: int = proto.Field( + proto.INT32, + number=5, + ) + + +class StackdriverLoggingConfig(proto.Message): + r"""Configuration options for writing logs to `Stackdriver + Logging `__. + + Attributes: + sampling_ratio (float): + Specifies the fraction of operations to write to + `Stackdriver + Logging `__. This + field may contain any value between 0.0 and 1.0, inclusive. + 0.0 is the default and means that no operations are logged. + """ + + sampling_ratio: float = proto.Field( + proto.DOUBLE, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/types/target.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/types/target.py new file mode 100644 index 000000000000..2ab2dc6a5b1a --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/types/target.py @@ -0,0 +1,582 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2", + manifest={ + "HttpMethod", + "HttpRequest", + "AppEngineHttpRequest", + "AppEngineRouting", + "OAuthToken", + "OidcToken", + }, +) + + +class HttpMethod(proto.Enum): + r"""The HTTP method used to deliver the task. + + Values: + HTTP_METHOD_UNSPECIFIED (0): + HTTP method unspecified + POST (1): + HTTP POST + GET (2): + HTTP GET + HEAD (3): + HTTP HEAD + PUT (4): + HTTP PUT + DELETE (5): + HTTP DELETE + PATCH (6): + HTTP PATCH + OPTIONS (7): + HTTP OPTIONS + """ + HTTP_METHOD_UNSPECIFIED = 0 + POST = 1 + GET = 2 + HEAD = 3 + PUT = 4 + DELETE = 5 + PATCH = 6 + OPTIONS = 7 + + +class HttpRequest(proto.Message): + r"""HTTP request. + + The task will be pushed to the worker as an HTTP request. If the + worker or the redirected worker acknowledges the task by returning a + successful HTTP response code ([``200`` - ``299``]), the task will + be removed from the queue. If any other HTTP response code is + returned or no response is received, the task will be retried + according to the following: + + - User-specified throttling: [retry + configuration][google.cloud.tasks.v2.Queue.retry_config], [rate + limits][google.cloud.tasks.v2.Queue.rate_limits], and the + [queue's state][google.cloud.tasks.v2.Queue.state]. + + - System throttling: To prevent the worker from overloading, Cloud + Tasks may temporarily reduce the queue's effective rate. + User-specified settings will not be changed. + + System throttling happens because: + + - Cloud Tasks backs off on all errors. Normally the backoff + specified in [rate + limits][google.cloud.tasks.v2.Queue.rate_limits] will be used. + But if the worker returns ``429`` (Too Many Requests), ``503`` + (Service Unavailable), or the rate of errors is high, Cloud Tasks + will use a higher backoff rate. The retry specified in the + ``Retry-After`` HTTP response header is considered. + + - To prevent traffic spikes and to smooth sudden increases in + traffic, dispatches ramp up slowly when the queue is newly + created or idle and if large numbers of tasks suddenly become + available to dispatch (due to spikes in create task rates, the + queue being unpaused, or many tasks that are scheduled at the + same time). + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + url (str): + Required. The full url path that the request will be sent + to. + + This string must begin with either "http://" or "https://". + Some examples are: ``http://acme.com`` and + ``https://acme.com/sales:8080``. Cloud Tasks will encode + some characters for safety and compatibility. The maximum + allowed URL length is 2083 characters after encoding. + + The ``Location`` header response from a redirect response + [``300`` - ``399``] may be followed. The redirect is not + counted as a separate attempt. + http_method (google.cloud.tasks_v2.types.HttpMethod): + The HTTP method to use for the request. The + default is POST. + headers (MutableMapping[str, str]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + + These headers represent a subset of the headers that will + accompany the task's HTTP request. Some HTTP request headers + will be ignored or replaced. + + A partial list of headers that will be ignored or replaced + is: + + - Host: This will be computed by Cloud Tasks and derived + from + [HttpRequest.url][google.cloud.tasks.v2.HttpRequest.url]. + - Content-Length: This will be computed by Cloud Tasks. + - User-Agent: This will be set to ``"Google-Cloud-Tasks"``. + - ``X-Google-*``: Google use only. + - ``X-AppEngine-*``: Google use only. + + ``Content-Type`` won't be set by Cloud Tasks. You can + explicitly set ``Content-Type`` to a media type when the + [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/octet-stream"`` or ``"application/json"``. + + Headers which can have multiple values (according to + RFC2616) can be specified using comma-separated values. + + The size of the headers must be less than 80KB. + body (bytes): + HTTP request body. + + A request body is allowed only if the [HTTP + method][google.cloud.tasks.v2.HttpRequest.http_method] is + POST, PUT, or PATCH. It is an error to set body on a task + with an incompatible + [HttpMethod][google.cloud.tasks.v2.HttpMethod]. + oauth_token (google.cloud.tasks_v2.types.OAuthToken): + If specified, an `OAuth + token `__ + will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization should generally only be used + when calling Google APIs hosted on \*.googleapis.com. + + This field is a member of `oneof`_ ``authorization_header``. + oidc_token (google.cloud.tasks_v2.types.OidcToken): + If specified, an + `OIDC `__ + token will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend + to validate the token yourself. + + This field is a member of `oneof`_ ``authorization_header``. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + http_method: "HttpMethod" = proto.Field( + proto.ENUM, + number=2, + enum="HttpMethod", + ) + headers: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + body: bytes = proto.Field( + proto.BYTES, + number=4, + ) + oauth_token: "OAuthToken" = proto.Field( + proto.MESSAGE, + number=5, + oneof="authorization_header", + message="OAuthToken", + ) + oidc_token: "OidcToken" = proto.Field( + proto.MESSAGE, + number=6, + oneof="authorization_header", + message="OidcToken", + ) + + +class AppEngineHttpRequest(proto.Message): + r"""App Engine HTTP request. + + The message defines the HTTP request that is sent to an App Engine + app when the task is dispatched. + + Using + [AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + The task will be delivered to the App Engine app which belongs to + the same project as the queue. For more information, see `How + Requests are + Routed `__ + and how routing is affected by `dispatch + files `__. + Traffic is encrypted during transport and never leaves Google + datacenters. Because this traffic is carried over a communication + mechanism internal to Google, you cannot explicitly set the protocol + (for example, HTTP or HTTPS). The request to the handler, however, + will appear to have used the HTTP protocol. + + The [AppEngineRouting][google.cloud.tasks.v2.AppEngineRouting] used + to construct the URL that the task is delivered to can be set at the + queue-level or task-level: + + - If [app_engine_routing_override is set on the + queue][google.cloud.tasks.v2.Queue.app_engine_routing_override], + this value is used for all tasks in the queue, no matter what the + setting is for the [task-level + app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. + + The ``url`` that the task will be sent to is: + + - ``url =`` [host][google.cloud.tasks.v2.AppEngineRouting.host] + ``+`` + [relative_uri][google.cloud.tasks.v2.AppEngineHttpRequest.relative_uri] + + Tasks can be dispatched to secure app handlers, unsecure app + handlers, and URIs restricted with + ```login: admin`` `__. + Because tasks are not run as any user, they cannot be dispatched to + URIs restricted with + ```login: required`` `__ + Task dispatches also do not follow redirects. + + The task attempt has succeeded if the app's request handler returns + an HTTP response code in the range [``200`` - ``299``]. The task + attempt has failed if the app's handler returns a non-2xx response + code or Cloud Tasks does not receive response before the + [deadline][google.cloud.tasks.v2.Task.dispatch_deadline]. Failed + tasks will be retried according to the [retry + configuration][google.cloud.tasks.v2.Queue.retry_config]. ``503`` + (Service Unavailable) is considered an App Engine system error + instead of an application error and will cause Cloud Tasks' traffic + congestion control to temporarily throttle the queue's dispatches. + Unlike other types of task targets, a ``429`` (Too Many Requests) + response from an app handler does not cause traffic congestion + control to throttle the queue. + + Attributes: + http_method (google.cloud.tasks_v2.types.HttpMethod): + The HTTP method to use for the request. The default is POST. + + The app's request handler for the task's target URL must be + able to handle HTTP requests with this http_method, + otherwise the task attempt fails with error code 405 (Method + Not Allowed). See `Writing a push task request + handler `__ + and the App Engine documentation for your runtime on `How + Requests are + Handled `__. + app_engine_routing (google.cloud.tasks_v2.types.AppEngineRouting): + Task-level setting for App Engine routing. + + - If [app_engine_routing_override is set on the + queue][google.cloud.tasks.v2.Queue.app_engine_routing_override], + this value is used for all tasks in the queue, no matter + what the setting is for the [task-level + app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. + relative_uri (str): + The relative URI. + + The relative URI must begin with "/" and must be + a valid HTTP relative URI. It can contain a path + and query string arguments. If the relative URI + is empty, then the root path "/" will be used. + No spaces are allowed, and the maximum length + allowed is 2083 characters. + headers (MutableMapping[str, str]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2.CloudTasks.CreateTask]. + Repeated headers are not supported but a header value can + contain commas. + + Cloud Tasks sets some headers to default values: + + - ``User-Agent``: By default, this header is + ``"AppEngine-Google; (+http://code.google.com/appengine)"``. + This header can be modified, but Cloud Tasks will append + ``"AppEngine-Google; (+http://code.google.com/appengine)"`` + to the modified ``User-Agent``. + + If the task has a + [body][google.cloud.tasks.v2.AppEngineHttpRequest.body], + Cloud Tasks sets the following headers: + + - ``Content-Type``: By default, the ``Content-Type`` header + is set to ``"application/octet-stream"``. The default can + be overridden by explicitly setting ``Content-Type`` to a + particular media type when the [task is + created][google.cloud.tasks.v2.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/json"``. + - ``Content-Length``: This is computed by Cloud Tasks. This + value is output only. It cannot be changed. + + The headers below cannot be set or overridden: + + - ``Host`` + - ``X-Google-*`` + - ``X-AppEngine-*`` + + In addition, Cloud Tasks sets some headers when the task is + dispatched, such as headers containing information about the + task; see `request + headers `__. + These headers are set only when the task is dispatched, so + they are not visible when the task is returned in a Cloud + Tasks response. + + Although there is no specific limit for the maximum number + of headers or the size, there is a limit on the maximum size + of the [Task][google.cloud.tasks.v2.Task]. For more + information, see the + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] + documentation. + body (bytes): + HTTP request body. + + A request body is allowed only if the HTTP method is POST or + PUT. It is an error to set a body on a task with an + incompatible [HttpMethod][google.cloud.tasks.v2.HttpMethod]. + """ + + http_method: "HttpMethod" = proto.Field( + proto.ENUM, + number=1, + enum="HttpMethod", + ) + app_engine_routing: "AppEngineRouting" = proto.Field( + proto.MESSAGE, + number=2, + message="AppEngineRouting", + ) + relative_uri: str = proto.Field( + proto.STRING, + number=3, + ) + headers: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + body: bytes = proto.Field( + proto.BYTES, + number=5, + ) + + +class AppEngineRouting(proto.Message): + r"""App Engine Routing. + + Defines routing characteristics specific to App Engine - service, + version, and instance. + + For more information about services, versions, and instances see `An + Overview of App + Engine `__, + `Microservices Architecture on Google App + Engine `__, + `App Engine Standard request + routing `__, + and `App Engine Flex request + routing `__. + + Using [AppEngineRouting][google.cloud.tasks.v2.AppEngineRouting] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + Attributes: + service (str): + App service. + + By default, the task is sent to the service which is the + default service when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2.AppEngineRouting.host] is not + parsable into + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2.AppEngineRouting.host] is not + parsable, then + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance] + are the empty string. + version (str): + App version. + + By default, the task is sent to the version which is the + default version when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2.AppEngineRouting.host] is not + parsable into + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2.AppEngineRouting.host] is not + parsable, then + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance] + are the empty string. + instance (str): + App instance. + + By default, the task is sent to an instance which is + available when the task is attempted. + + Requests can only be sent to a specific instance if `manual + scaling is used in App Engine + Standard `__. + App Engine Flex does not support instances. For more + information, see `App Engine Standard request + routing `__ + and `App Engine Flex request + routing `__. + host (str): + Output only. The host that the task is sent to. + + The host is constructed from the domain name of the app + associated with the queue's project ID (for example + .appspot.com), and the + [service][google.cloud.tasks.v2.AppEngineRouting.service], + [version][google.cloud.tasks.v2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. + Tasks which were created using the App Engine SDK might have + a custom domain name. + + For more information, see `How Requests are + Routed `__. + """ + + service: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + instance: str = proto.Field( + proto.STRING, + number=3, + ) + host: str = proto.Field( + proto.STRING, + number=4, + ) + + +class OAuthToken(proto.Message): + r"""Contains information needed for generating an `OAuth + token `__. + This type of authorization should generally only be used when + calling Google APIs hosted on \*.googleapis.com. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OAuth token. The service account + must be within the same project as the queue. The caller + must have iam.serviceAccounts.actAs permission for the + service account. + scope (str): + OAuth scope to be used for generating OAuth + access token. If not specified, + "https://www.googleapis.com/auth/cloud-platform" + will be used. + """ + + service_account_email: str = proto.Field( + proto.STRING, + number=1, + ) + scope: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OidcToken(proto.Message): + r"""Contains information needed for generating an `OpenID Connect + token `__. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the + token yourself. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OIDC token. The service account + must be within the same project as the queue. The caller + must have iam.serviceAccounts.actAs permission for the + service account. + audience (str): + Audience to be used when generating OIDC + token. If not specified, the URI specified in + target will be used. + """ + + service_account_email: str = proto.Field( + proto.STRING, + number=1, + ) + audience: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2/types/task.py b/packages/google-cloud-tasks/google/cloud/tasks_v2/types/task.py new file mode 100644 index 000000000000..bdb4726e288b --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2/types/task.py @@ -0,0 +1,300 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.tasks_v2.types import target + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2", + manifest={ + "Task", + "Attempt", + }, +) + + +class Task(proto.Message): + r"""A unit of scheduled work. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Optionally caller-specified in + [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. + + The task name. + + The task name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the task's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + - ``TASK_ID`` can contain only letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), or underscores (_). The maximum + length is 500 characters. + app_engine_http_request (google.cloud.tasks_v2.types.AppEngineHttpRequest): + HTTP request that is sent to the App Engine app handler. + + An App Engine task is a task that has + [AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest] + set. + + This field is a member of `oneof`_ ``message_type``. + http_request (google.cloud.tasks_v2.types.HttpRequest): + HTTP request that is sent to the worker. + + An HTTP task is a task that has + [HttpRequest][google.cloud.tasks.v2.HttpRequest] set. + + This field is a member of `oneof`_ ``message_type``. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the task is scheduled to be attempted or + retried. + + ``schedule_time`` will be truncated to the nearest + microsecond. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that the task was created. + + ``create_time`` will be truncated to the nearest second. + dispatch_deadline (google.protobuf.duration_pb2.Duration): + The deadline for requests sent to the worker. If the worker + does not respond by this deadline then the request is + cancelled and the attempt is marked as a + ``DEADLINE_EXCEEDED`` failure. Cloud Tasks will retry the + task according to the + [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + + Note that when the request is cancelled, Cloud Tasks will + stop listening for the response, but whether the worker + stops processing depends on the worker. For example, if the + worker is stuck, it may not react to cancelled requests. + + The default and maximum values depend on the type of + request: + + - For [HTTP tasks][google.cloud.tasks.v2.HttpRequest], the + default is 10 minutes. The deadline must be in the + interval [15 seconds, 30 minutes]. + + - For [App Engine + tasks][google.cloud.tasks.v2.AppEngineHttpRequest], 0 + indicates that the request has the default deadline. The + default deadline depends on the `scaling + type `__ + of the service: 10 minutes for standard apps with + automatic scaling, 24 hours for standard apps with manual + and basic scaling, and 60 minutes for flex apps. If the + request deadline is set, it must be in the interval [15 + seconds, 24 hours 15 seconds]. Regardless of the task's + ``dispatch_deadline``, the app handler will not run for + longer than than the service's timeout. We recommend + setting the ``dispatch_deadline`` to at most a few + seconds more than the app handler's timeout. For more + information see + `Timeouts `__. + + ``dispatch_deadline`` will be truncated to the nearest + millisecond. The deadline is an approximate deadline. + dispatch_count (int): + Output only. The number of attempts + dispatched. + This count includes attempts which have been + dispatched but haven't received a response. + response_count (int): + Output only. The number of attempts which + have received a response. + first_attempt (google.cloud.tasks_v2.types.Attempt): + Output only. The status of the task's first attempt. + + Only + [dispatch_time][google.cloud.tasks.v2.Attempt.dispatch_time] + will be set. The other + [Attempt][google.cloud.tasks.v2.Attempt] information is not + retained by Cloud Tasks. + last_attempt (google.cloud.tasks_v2.types.Attempt): + Output only. The status of the task's last + attempt. + view (google.cloud.tasks_v2.types.Task.View): + Output only. The view specifies which subset of the + [Task][google.cloud.tasks.v2.Task] has been returned. + """ + + class View(proto.Enum): + r"""The view specifies a subset of [Task][google.cloud.tasks.v2.Task] + data. + + When a task is returned in a response, not all information is + retrieved by default because some data, such as payloads, might be + desirable to return only when needed because of its large size or + because of the sensitivity of data that it contains. + + Values: + VIEW_UNSPECIFIED (0): + Unspecified. Defaults to BASIC. + BASIC (1): + The basic view omits fields which can be large or can + contain sensitive data. + + This view does not include the [body in + AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest.body]. + Bodies are desirable to return only when needed, because + they can be large and because of the sensitivity of the data + that you choose to store in it. + FULL (2): + All information is returned. + + Authorization for + [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Queue][google.cloud.tasks.v2.Queue] resource. + """ + VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + app_engine_http_request: target.AppEngineHttpRequest = proto.Field( + proto.MESSAGE, + number=2, + oneof="message_type", + message=target.AppEngineHttpRequest, + ) + http_request: target.HttpRequest = proto.Field( + proto.MESSAGE, + number=3, + oneof="message_type", + message=target.HttpRequest, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + dispatch_deadline: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=6, + message=duration_pb2.Duration, + ) + dispatch_count: int = proto.Field( + proto.INT32, + number=7, + ) + response_count: int = proto.Field( + proto.INT32, + number=8, + ) + first_attempt: "Attempt" = proto.Field( + proto.MESSAGE, + number=9, + message="Attempt", + ) + last_attempt: "Attempt" = proto.Field( + proto.MESSAGE, + number=10, + message="Attempt", + ) + view: View = proto.Field( + proto.ENUM, + number=11, + enum=View, + ) + + +class Attempt(proto.Message): + r"""The status of a task attempt. + + Attributes: + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt was scheduled. + + ``schedule_time`` will be truncated to the nearest + microsecond. + dispatch_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt was dispatched. + + ``dispatch_time`` will be truncated to the nearest + microsecond. + response_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt response was + received. + + ``response_time`` will be truncated to the nearest + microsecond. + response_status (google.rpc.status_pb2.Status): + Output only. The response from the worker for this attempt. + + If ``response_time`` is unset, then the task has not been + attempted or is currently running and the + ``response_status`` field is meaningless. + """ + + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + dispatch_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + response_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + response_status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/__init__.py new file mode 100644 index 000000000000..fe776d9cbfb8 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/__init__.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.tasks_v2beta2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.cloud_tasks import CloudTasksAsyncClient, CloudTasksClient +from .types.cloudtasks import ( + AcknowledgeTaskRequest, + BufferTaskRequest, + BufferTaskResponse, + CancelLeaseRequest, + CreateQueueRequest, + CreateTaskRequest, + DeleteQueueRequest, + DeleteTaskRequest, + GetQueueRequest, + GetTaskRequest, + LeaseTasksRequest, + LeaseTasksResponse, + ListQueuesRequest, + ListQueuesResponse, + ListTasksRequest, + ListTasksResponse, + PauseQueueRequest, + PurgeQueueRequest, + RenewLeaseRequest, + ResumeQueueRequest, + RunTaskRequest, + UpdateQueueRequest, + UploadQueueYamlRequest, +) +from .types.queue import Queue, QueueStats, RateLimits, RetryConfig +from .types.target import ( + AppEngineHttpRequest, + AppEngineHttpTarget, + AppEngineRouting, + HttpMethod, + HttpRequest, + HttpTarget, + OAuthToken, + OidcToken, + PathOverride, + PullMessage, + PullTarget, + QueryOverride, + UriOverride, +) +from .types.task import AttemptStatus, Task, TaskStatus + +__all__ = ( + "CloudTasksAsyncClient", + "AcknowledgeTaskRequest", + "AppEngineHttpRequest", + "AppEngineHttpTarget", + "AppEngineRouting", + "AttemptStatus", + "BufferTaskRequest", + "BufferTaskResponse", + "CancelLeaseRequest", + "CloudTasksClient", + "CreateQueueRequest", + "CreateTaskRequest", + "DeleteQueueRequest", + "DeleteTaskRequest", + "GetQueueRequest", + "GetTaskRequest", + "HttpMethod", + "HttpRequest", + "HttpTarget", + "LeaseTasksRequest", + "LeaseTasksResponse", + "ListQueuesRequest", + "ListQueuesResponse", + "ListTasksRequest", + "ListTasksResponse", + "OAuthToken", + "OidcToken", + "PathOverride", + "PauseQueueRequest", + "PullMessage", + "PullTarget", + "PurgeQueueRequest", + "QueryOverride", + "Queue", + "QueueStats", + "RateLimits", + "RenewLeaseRequest", + "ResumeQueueRequest", + "RetryConfig", + "RunTaskRequest", + "Task", + "TaskStatus", + "UpdateQueueRequest", + "UploadQueueYamlRequest", + "UriOverride", +) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_metadata.json b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_metadata.json new file mode 100644 index 000000000000..7ea03b379c34 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_metadata.json @@ -0,0 +1,358 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.tasks_v2beta2", + "protoPackage": "google.cloud.tasks.v2beta2", + "schema": "1.0", + "services": { + "CloudTasks": { + "clients": { + "grpc": { + "libraryClient": "CloudTasksClient", + "rpcs": { + "AcknowledgeTask": { + "methods": [ + "acknowledge_task" + ] + }, + "BufferTask": { + "methods": [ + "buffer_task" + ] + }, + "CancelLease": { + "methods": [ + "cancel_lease" + ] + }, + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "LeaseTasks": { + "methods": [ + "lease_tasks" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "RenewLease": { + "methods": [ + "renew_lease" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + }, + "UploadQueueYaml": { + "methods": [ + "upload_queue_yaml" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudTasksAsyncClient", + "rpcs": { + "AcknowledgeTask": { + "methods": [ + "acknowledge_task" + ] + }, + "BufferTask": { + "methods": [ + "buffer_task" + ] + }, + "CancelLease": { + "methods": [ + "cancel_lease" + ] + }, + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "LeaseTasks": { + "methods": [ + "lease_tasks" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "RenewLease": { + "methods": [ + "renew_lease" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + }, + "UploadQueueYaml": { + "methods": [ + "upload_queue_yaml" + ] + } + } + }, + "rest": { + "libraryClient": "CloudTasksClient", + "rpcs": { + "AcknowledgeTask": { + "methods": [ + "acknowledge_task" + ] + }, + "BufferTask": { + "methods": [ + "buffer_task" + ] + }, + "CancelLease": { + "methods": [ + "cancel_lease" + ] + }, + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "LeaseTasks": { + "methods": [ + "lease_tasks" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "RenewLease": { + "methods": [ + "renew_lease" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + }, + "UploadQueueYaml": { + "methods": [ + "upload_queue_yaml" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_version.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_version.py new file mode 100644 index 000000000000..3344051a673d --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.14.2" # {x-release-please-version} diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/py.typed b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/py.typed new file mode 100644 index 000000000000..41f0b1b8d473 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-tasks package uses inline types. diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/__init__.py new file mode 100644 index 000000000000..eddc5977fd56 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import CloudTasksAsyncClient +from .client import CloudTasksClient + +__all__ = ( + "CloudTasksClient", + "CloudTasksAsyncClient", +) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py new file mode 100644 index 000000000000..e824ec6c9072 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/async_client.py @@ -0,0 +1,3142 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2beta2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api import httpbody_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.tasks_v2beta2.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import target +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task + +from .client import CloudTasksClient +from .transports.base import DEFAULT_CLIENT_INFO, CloudTasksTransport +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +class CloudTasksAsyncClient: + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + _client: CloudTasksClient + + DEFAULT_ENDPOINT = CloudTasksClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudTasksClient.DEFAULT_MTLS_ENDPOINT + + queue_path = staticmethod(CloudTasksClient.queue_path) + parse_queue_path = staticmethod(CloudTasksClient.parse_queue_path) + task_path = staticmethod(CloudTasksClient.task_path) + parse_task_path = staticmethod(CloudTasksClient.parse_task_path) + common_billing_account_path = staticmethod( + CloudTasksClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudTasksClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(CloudTasksClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudTasksClient.parse_common_folder_path) + common_organization_path = staticmethod(CloudTasksClient.common_organization_path) + parse_common_organization_path = staticmethod( + CloudTasksClient.parse_common_organization_path + ) + common_project_path = staticmethod(CloudTasksClient.common_project_path) + parse_common_project_path = staticmethod(CloudTasksClient.parse_common_project_path) + common_location_path = staticmethod(CloudTasksClient.common_location_path) + parse_common_location_path = staticmethod( + CloudTasksClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksAsyncClient: The constructed client. + """ + return CloudTasksClient.from_service_account_info.__func__(CloudTasksAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksAsyncClient: The constructed client. + """ + return CloudTasksClient.from_service_account_file.__func__(CloudTasksAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CloudTasksClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CloudTasksTransport: + """Returns the transport used by the client instance. + + Returns: + CloudTasksTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(CloudTasksClient).get_transport_class, type(CloudTasksClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CloudTasksTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CloudTasksClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_queues( + self, + request: Optional[Union[cloudtasks.ListQueuesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesAsyncPager: + r"""Lists queues. + + Queues are returned in lexicographical order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_list_queues(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.ListQueuesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_queues(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.ListQueuesRequest, dict]]): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.services.cloud_tasks.pagers.ListQueuesAsyncPager: + Response message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ListQueuesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_queues, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListQueuesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_queue( + self, + request: Optional[Union[cloudtasks.GetQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_get_queue(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.GetQueueRequest( + name="name_value", + ) + + # Make the request + response = await client.get_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.GetQueueRequest, dict]]): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. + name (:class:`str`): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.GetQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_queue( + self, + request: Optional[Union[cloudtasks.CreateQueueRequest, dict]] = None, + *, + parent: Optional[str] = None, + queue: Optional[gct_queue.Queue] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_create_queue(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.CreateQueueRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.CreateQueueRequest, dict]]): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. + parent (:class:`str`): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (:class:`google.cloud.tasks_v2beta2.types.Queue`): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta2.Queue.name] + cannot be the same as an existing queue. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.CreateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_queue( + self, + request: Optional[Union[cloudtasks.UpdateQueueRequest, dict]] = None, + *, + queue: Optional[gct_queue.Queue] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_update_queue(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.UpdateQueueRequest( + ) + + # Make the request + response = await client.update_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.UpdateQueueRequest, dict]]): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. + queue (:class:`google.cloud.tasks_v2beta2.types.Queue`): + Required. The queue to create or update. + + The queue's + [name][google.cloud.tasks.v2beta2.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2beta2.Queue.name] cannot be + changed. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.UpdateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("queue.name", request.queue.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_queue( + self, + request: Optional[Union[cloudtasks.DeleteQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_delete_queue(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.DeleteQueueRequest( + name="name_value", + ) + + # Make the request + await client.delete_queue(request=request) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.DeleteQueueRequest, dict]]): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.DeleteQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def purge_queue( + self, + request: Optional[Union[cloudtasks.PurgeQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + + All tasks created before this method is called are + permanently deleted. + + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_purge_queue(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.PurgeQueueRequest( + name="name_value", + ) + + # Make the request + response = await client.purge_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.PurgeQueueRequest, dict]]): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.PurgeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.purge_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def pause_queue( + self, + request: Optional[Union[cloudtasks.PauseQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_pause_queue(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.PauseQueueRequest( + name="name_value", + ) + + # Make the request + response = await client.pause_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.PauseQueueRequest, dict]]): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.PauseQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pause_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def resume_queue( + self, + request: Optional[Union[cloudtasks.ResumeQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta2.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_resume_queue(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.ResumeQueueRequest( + name="name_value", + ) + + # Make the request + response = await client.resume_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.ResumeQueueRequest, dict]]): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ResumeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def upload_queue_yaml( + self, + request: Optional[Union[cloudtasks.UploadQueueYamlRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Update queue list by uploading a queue.yaml file. + + The queue.yaml file is supplied in the request body as a + YAML encoded string. This method was added to support + gcloud clients versions before 322.0.0. New clients + should use CreateQueue instead of this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_upload_queue_yaml(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.UploadQueueYamlRequest( + app_id="app_id_value", + ) + + # Make the request + await client.upload_queue_yaml(request=request) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.UploadQueueYamlRequest, dict]]): + The request object. Request message for + [UploadQueueYaml][google.cloud.tasks.v2beta2.CloudTasks.UploadQueueYaml]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = cloudtasks.UploadQueueYamlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.upload_queue_yaml, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest( + resource=resource, + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`MutableSequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, + permissions=permissions, + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_tasks( + self, + request: Optional[Union[cloudtasks.ListTasksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksAsyncPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_list_tasks(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.ListTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tasks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.ListTasksRequest, dict]]): + The request object. Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.services.cloud_tasks.pagers.ListTasksAsyncPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ListTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tasks, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTasksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_task( + self, + request: Optional[Union[cloudtasks.GetTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_get_task(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.GetTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.get_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.GetTaskRequest, dict]]): + The request object. Request message for getting a task using + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.GetTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_task( + self, + request: Optional[Union[cloudtasks.CreateTaskRequest, dict]] = None, + *, + parent: Optional[str] = None, + task: Optional[gct_task.Task] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the + maximum task size is 100KB. + - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the + maximum task size is 1MB. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_create_task(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.CreateTaskRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.CreateTaskRequest, dict]]): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (:class:`google.cloud.tasks_v2beta2.types.Task`): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta2.Task.name]. If a name + is not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2beta2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or completed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1 hour + after the original task was deleted or completed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9 days after the original task was deleted or + completed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.CreateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_task, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_task( + self, + request: Optional[Union[cloudtasks.DeleteTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has completed + successfully or permanently failed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_delete_task(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.DeleteTaskRequest( + name="name_value", + ) + + # Make the request + await client.delete_task(request=request) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.DeleteTaskRequest, dict]]): + The request object. Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.DeleteTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def lease_tasks( + self, + request: Optional[Union[cloudtasks.LeaseTasksRequest, dict]] = None, + *, + parent: Optional[str] = None, + lease_duration: Optional[duration_pb2.Duration] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.LeaseTasksResponse: + r"""Leases tasks from a pull queue for + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + This method is invoked by the worker to obtain a lease. The + worker must acknowledge the task via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + after they have performed the work associated with the task. + + The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is + intended to store data that the worker needs to perform the work + associated with the task. To return the payloads in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] + to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + + A maximum of 10 qps of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per queue. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is + returned when this limit is exceeded. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also + returned when + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + is exceeded. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_lease_tasks(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.LeaseTasksRequest( + parent="parent_value", + ) + + # Make the request + response = await client.lease_tasks(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.LeaseTasksRequest, dict]]): + The request object. Request message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lease_duration (:class:`google.protobuf.duration_pb2.Duration`): + Required. The duration of the lease. + + Each task returned in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] + will have its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + set to the current time plus the ``lease_duration``. The + task is leased until its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]; + thus, the task will not be returned to another + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call before its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + After the worker has successfully finished the work + associated with the task, the worker must call via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + before the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + Otherwise the task will be returned to a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call so that another worker can retry it. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + + This corresponds to the ``lease_duration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.LeaseTasksResponse: + Response message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, lease_duration]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.LeaseTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if lease_duration is not None: + request.lease_duration = lease_duration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.lease_tasks, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def acknowledge_task( + self, + request: Optional[Union[cloudtasks.AcknowledgeTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Acknowledges a pull task. + + The worker, that is, the entity that + [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this + task must call this method to indicate that the work associated + with the task has finished. + + The worker must acknowledge a task within the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] + or the lease will expire and the task will become available to + be leased again. After the task is acknowledged, it will not be + returned by a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_acknowledge_task(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.AcknowledgeTaskRequest( + name="name_value", + ) + + # Make the request + await client.acknowledge_task(request=request) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.AcknowledgeTaskRequest, dict]]): + The request object. Request message for acknowledging a task using + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.AcknowledgeTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.acknowledge_task, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def renew_lease( + self, + request: Optional[Union[cloudtasks.RenewLeaseRequest, dict]] = None, + *, + name: Optional[str] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + lease_duration: Optional[duration_pb2.Duration] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Renew the current lease of a pull task. + + The worker can use this method to extend the lease by a new + duration, starting from now. The new task lease will be returned + in the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_renew_lease(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.RenewLeaseRequest( + name="name_value", + ) + + # Make the request + response = await client.renew_lease(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.RenewLeaseRequest, dict]]): + The request object. Request message for renewing a lease using + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lease_duration (:class:`google.protobuf.duration_pb2.Duration`): + Required. The desired new lease duration, starting from + now. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + + This corresponds to the ``lease_duration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time, lease_duration]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.RenewLeaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + if lease_duration is not None: + request.lease_duration = lease_duration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.renew_lease, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_lease( + self, + request: Optional[Union[cloudtasks.CancelLeaseRequest, dict]] = None, + *, + name: Optional[str] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Cancel a pull task's lease. + + The worker can use this method to cancel a task's lease by + setting its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + to now. This will make the task available to be leased to the + next caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_cancel_lease(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.CancelLeaseRequest( + name="name_value", + ) + + # Make the request + response = await client.cancel_lease(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.CancelLeaseRequest, dict]]): + The request object. Request message for canceling a lease using + [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.CancelLeaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_lease, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def run_task( + self, + request: Optional[Union[cloudtasks.RunTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the + [status][google.cloud.tasks.v2beta2.Task.status] after the task + is dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot + be called on a [pull + task][google.cloud.tasks.v2beta2.PullMessage]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_run_task(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.RunTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.run_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.RunTaskRequest, dict]]): + The request object. Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.RunTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_task, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def buffer_task( + self, + request: Optional[Union[cloudtasks.BufferTaskRequest, dict]] = None, + *, + queue: Optional[str] = None, + task_id: Optional[str] = None, + body: Optional[httpbody_pb2.HttpBody] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.BufferTaskResponse: + r"""Creates and buffers a new task without the need to explicitly + define a Task message. The queue must have [HTTP + target][google.cloud.tasks.v2beta2.HttpTarget]. To create the + task with a custom ID, use the following format and set TASK_ID + to your desired ID: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer + To create the task with an automatically generated ID, use the + following format: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. + Note: This feature is in its experimental stage. You must + request access to the API through the `Cloud Tasks BufferTask + Experiment Signup form `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + async def sample_buffer_task(): + # Create a client + client = tasks_v2beta2.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta2.BufferTaskRequest( + queue="queue_value", + ) + + # Make the request + response = await client.buffer_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta2.types.BufferTaskRequest, dict]]): + The request object. LINT.IfChange Request message for + [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. + queue (:class:`str`): + Required. The parent queue name. For example: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID\` + + The queue must already exist. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task_id (:class:`str`): + Optional. Task ID for the task being + created. If not provided, a random task + ID is assigned to the task. + + This corresponds to the ``task_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + body (:class:`google.api.httpbody_pb2.HttpBody`): + Optional. Body of the HTTP request. + + The body can take any generic value. The value is + written to the [HttpRequest][payload] of the [Task]. + + This corresponds to the ``body`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.BufferTaskResponse: + Response message for + [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, task_id, body]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.BufferTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if task_id is not None: + request.task_id = task_id + if body is not None: + request.body = body + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.buffer_task, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("queue", request.queue), + ("task_id", request.task_id), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "CloudTasksAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CloudTasksAsyncClient",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py new file mode 100644 index 000000000000..3120b9e0aa65 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/client.py @@ -0,0 +1,3317 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2beta2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api import httpbody_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.tasks_v2beta2.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import target +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task + +from .transports.base import DEFAULT_CLIENT_INFO, CloudTasksTransport +from .transports.grpc import CloudTasksGrpcTransport +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport +from .transports.rest import CloudTasksRestTransport + + +class CloudTasksClientMeta(type): + """Metaclass for the CloudTasks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] + _transport_registry["grpc"] = CloudTasksGrpcTransport + _transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport + _transport_registry["rest"] = CloudTasksRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CloudTasksTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudTasksClient(metaclass=CloudTasksClientMeta): + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudtasks.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudTasksTransport: + """Returns the transport used by the client instance. + + Returns: + CloudTasksTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def queue_path( + project: str, + location: str, + queue: str, + ) -> str: + """Returns a fully-qualified queue string.""" + return "projects/{project}/locations/{location}/queues/{queue}".format( + project=project, + location=location, + queue=queue, + ) + + @staticmethod + def parse_queue_path(path: str) -> Dict[str, str]: + """Parses a queue path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def task_path( + project: str, + location: str, + queue: str, + task: str, + ) -> str: + """Returns a fully-qualified task string.""" + return "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format( + project=project, + location=location, + queue=queue, + task=task, + ) + + @staticmethod + def parse_task_path(path: str) -> Dict[str, str]: + """Parses a task path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)/tasks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudTasksTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudTasksTransport): + # transport is a CloudTasksTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_queues( + self, + request: Optional[Union[cloudtasks.ListQueuesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesPager: + r"""Lists queues. + + Queues are returned in lexicographical order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_list_queues(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.ListQueuesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_queues(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.ListQueuesRequest, dict]): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.services.cloud_tasks.pagers.ListQueuesPager: + Response message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListQueuesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListQueuesRequest): + request = cloudtasks.ListQueuesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_queues] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListQueuesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_queue( + self, + request: Optional[Union[cloudtasks.GetQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_get_queue(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.GetQueueRequest( + name="name_value", + ) + + # Make the request + response = client.get_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.GetQueueRequest, dict]): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetQueueRequest): + request = cloudtasks.GetQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_queue( + self, + request: Optional[Union[cloudtasks.CreateQueueRequest, dict]] = None, + *, + parent: Optional[str] = None, + queue: Optional[gct_queue.Queue] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_create_queue(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.CreateQueueRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.CreateQueueRequest, dict]): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (google.cloud.tasks_v2beta2.types.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta2.Queue.name] + cannot be the same as an existing queue. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateQueueRequest): + request = cloudtasks.CreateQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_queue( + self, + request: Optional[Union[cloudtasks.UpdateQueueRequest, dict]] = None, + *, + queue: Optional[gct_queue.Queue] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_update_queue(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.UpdateQueueRequest( + ) + + # Make the request + response = client.update_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.UpdateQueueRequest, dict]): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. + queue (google.cloud.tasks_v2beta2.types.Queue): + Required. The queue to create or update. + + The queue's + [name][google.cloud.tasks.v2beta2.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2beta2.Queue.name] cannot be + changed. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.UpdateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.UpdateQueueRequest): + request = cloudtasks.UpdateQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("queue.name", request.queue.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_queue( + self, + request: Optional[Union[cloudtasks.DeleteQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_delete_queue(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.DeleteQueueRequest( + name="name_value", + ) + + # Make the request + client.delete_queue(request=request) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.DeleteQueueRequest, dict]): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteQueueRequest): + request = cloudtasks.DeleteQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def purge_queue( + self, + request: Optional[Union[cloudtasks.PurgeQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + + All tasks created before this method is called are + permanently deleted. + + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_purge_queue(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.PurgeQueueRequest( + name="name_value", + ) + + # Make the request + response = client.purge_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.PurgeQueueRequest, dict]): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PurgeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PurgeQueueRequest): + request = cloudtasks.PurgeQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def pause_queue( + self, + request: Optional[Union[cloudtasks.PauseQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_pause_queue(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.PauseQueueRequest( + name="name_value", + ) + + # Make the request + response = client.pause_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.PauseQueueRequest, dict]): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PauseQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PauseQueueRequest): + request = cloudtasks.PauseQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pause_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resume_queue( + self, + request: Optional[Union[cloudtasks.ResumeQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta2.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_resume_queue(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.ResumeQueueRequest( + name="name_value", + ) + + # Make the request + response = client.resume_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.ResumeQueueRequest, dict]): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ResumeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ResumeQueueRequest): + request = cloudtasks.ResumeQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def upload_queue_yaml( + self, + request: Optional[Union[cloudtasks.UploadQueueYamlRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Update queue list by uploading a queue.yaml file. + + The queue.yaml file is supplied in the request body as a + YAML encoded string. This method was added to support + gcloud clients versions before 322.0.0. New clients + should use CreateQueue instead of this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_upload_queue_yaml(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.UploadQueueYamlRequest( + app_id="app_id_value", + ) + + # Make the request + client.upload_queue_yaml(request=request) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.UploadQueueYamlRequest, dict]): + The request object. Request message for + [UploadQueueYaml][google.cloud.tasks.v2beta2.CloudTasks.UploadQueueYaml]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.UploadQueueYamlRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.UploadQueueYamlRequest): + request = cloudtasks.UploadQueueYamlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.upload_queue_yaml] + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + resource (str): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (MutableSequence[str]): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + if resource is not None: + request.resource = resource + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_tasks( + self, + request: Optional[Union[cloudtasks.ListTasksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_list_tasks(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.ListTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tasks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.ListTasksRequest, dict]): + The request object. Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.services.cloud_tasks.pagers.ListTasksPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListTasksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListTasksRequest): + request = cloudtasks.ListTasksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTasksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_task( + self, + request: Optional[Union[cloudtasks.GetTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_get_task(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.GetTaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.GetTaskRequest, dict]): + The request object. Request message for getting a task using + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetTaskRequest): + request = cloudtasks.GetTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_task( + self, + request: Optional[Union[cloudtasks.CreateTaskRequest, dict]] = None, + *, + parent: Optional[str] = None, + task: Optional[gct_task.Task] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the + maximum task size is 100KB. + - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the + maximum task size is 1MB. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_create_task(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.CreateTaskRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.CreateTaskRequest, dict]): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (google.cloud.tasks_v2beta2.types.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta2.Task.name]. If a name + is not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2beta2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or completed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1 hour + after the original task was deleted or completed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9 days after the original task was deleted or + completed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateTaskRequest): + request = cloudtasks.CreateTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_task( + self, + request: Optional[Union[cloudtasks.DeleteTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has completed + successfully or permanently failed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_delete_task(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.DeleteTaskRequest( + name="name_value", + ) + + # Make the request + client.delete_task(request=request) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.DeleteTaskRequest, dict]): + The request object. Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteTaskRequest): + request = cloudtasks.DeleteTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def lease_tasks( + self, + request: Optional[Union[cloudtasks.LeaseTasksRequest, dict]] = None, + *, + parent: Optional[str] = None, + lease_duration: Optional[duration_pb2.Duration] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.LeaseTasksResponse: + r"""Leases tasks from a pull queue for + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + This method is invoked by the worker to obtain a lease. The + worker must acknowledge the task via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + after they have performed the work associated with the task. + + The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is + intended to store data that the worker needs to perform the work + associated with the task. To return the payloads in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] + to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + + A maximum of 10 qps of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per queue. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is + returned when this limit is exceeded. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also + returned when + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + is exceeded. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_lease_tasks(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.LeaseTasksRequest( + parent="parent_value", + ) + + # Make the request + response = client.lease_tasks(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.LeaseTasksRequest, dict]): + The request object. Request message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lease_duration (google.protobuf.duration_pb2.Duration): + Required. The duration of the lease. + + Each task returned in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] + will have its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + set to the current time plus the ``lease_duration``. The + task is leased until its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]; + thus, the task will not be returned to another + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call before its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + After the worker has successfully finished the work + associated with the task, the worker must call via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + before the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + Otherwise the task will be returned to a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call so that another worker can retry it. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + + This corresponds to the ``lease_duration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.LeaseTasksResponse: + Response message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, lease_duration]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.LeaseTasksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.LeaseTasksRequest): + request = cloudtasks.LeaseTasksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if lease_duration is not None: + request.lease_duration = lease_duration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.lease_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def acknowledge_task( + self, + request: Optional[Union[cloudtasks.AcknowledgeTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Acknowledges a pull task. + + The worker, that is, the entity that + [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this + task must call this method to indicate that the work associated + with the task has finished. + + The worker must acknowledge a task within the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] + or the lease will expire and the task will become available to + be leased again. After the task is acknowledged, it will not be + returned by a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_acknowledge_task(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.AcknowledgeTaskRequest( + name="name_value", + ) + + # Make the request + client.acknowledge_task(request=request) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.AcknowledgeTaskRequest, dict]): + The request object. Request message for acknowledging a task using + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.AcknowledgeTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.AcknowledgeTaskRequest): + request = cloudtasks.AcknowledgeTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.acknowledge_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def renew_lease( + self, + request: Optional[Union[cloudtasks.RenewLeaseRequest, dict]] = None, + *, + name: Optional[str] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + lease_duration: Optional[duration_pb2.Duration] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Renew the current lease of a pull task. + + The worker can use this method to extend the lease by a new + duration, starting from now. The new task lease will be returned + in the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_renew_lease(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.RenewLeaseRequest( + name="name_value", + ) + + # Make the request + response = client.renew_lease(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.RenewLeaseRequest, dict]): + The request object. Request message for renewing a lease using + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lease_duration (google.protobuf.duration_pb2.Duration): + Required. The desired new lease duration, starting from + now. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + + This corresponds to the ``lease_duration`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time, lease_duration]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.RenewLeaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.RenewLeaseRequest): + request = cloudtasks.RenewLeaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + if lease_duration is not None: + request.lease_duration = lease_duration + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.renew_lease] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_lease( + self, + request: Optional[Union[cloudtasks.CancelLeaseRequest, dict]] = None, + *, + name: Optional[str] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Cancel a pull task's lease. + + The worker can use this method to cancel a task's lease by + setting its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + to now. This will make the task available to be leased to the + next caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_cancel_lease(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.CancelLeaseRequest( + name="name_value", + ) + + # Make the request + response = client.cancel_lease(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.CancelLeaseRequest, dict]): + The request object. Request message for canceling a lease using + [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The task's current schedule time, available in + the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CancelLeaseRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CancelLeaseRequest): + request = cloudtasks.CancelLeaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_lease] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def run_task( + self, + request: Optional[Union[cloudtasks.RunTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the + [status][google.cloud.tasks.v2beta2.Task.status] after the task + is dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot + be called on a [pull + task][google.cloud.tasks.v2beta2.PullMessage]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_run_task(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.RunTaskRequest( + name="name_value", + ) + + # Make the request + response = client.run_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.RunTaskRequest, dict]): + The request object. Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.RunTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.RunTaskRequest): + request = cloudtasks.RunTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def buffer_task( + self, + request: Optional[Union[cloudtasks.BufferTaskRequest, dict]] = None, + *, + queue: Optional[str] = None, + task_id: Optional[str] = None, + body: Optional[httpbody_pb2.HttpBody] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.BufferTaskResponse: + r"""Creates and buffers a new task without the need to explicitly + define a Task message. The queue must have [HTTP + target][google.cloud.tasks.v2beta2.HttpTarget]. To create the + task with a custom ID, use the following format and set TASK_ID + to your desired ID: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer + To create the task with an automatically generated ID, use the + following format: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. + Note: This feature is in its experimental stage. You must + request access to the API through the `Cloud Tasks BufferTask + Experiment Signup form `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta2 + + def sample_buffer_task(): + # Create a client + client = tasks_v2beta2.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta2.BufferTaskRequest( + queue="queue_value", + ) + + # Make the request + response = client.buffer_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta2.types.BufferTaskRequest, dict]): + The request object. LINT.IfChange Request message for + [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. + queue (str): + Required. The parent queue name. For example: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID\` + + The queue must already exist. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task_id (str): + Optional. Task ID for the task being + created. If not provided, a random task + ID is assigned to the task. + + This corresponds to the ``task_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + body (google.api.httpbody_pb2.HttpBody): + Optional. Body of the HTTP request. + + The body can take any generic value. The value is + written to the [HttpRequest][payload] of the [Task]. + + This corresponds to the ``body`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta2.types.BufferTaskResponse: + Response message for + [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, task_id, body]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.BufferTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.BufferTaskRequest): + request = cloudtasks.BufferTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if task_id is not None: + request.task_id = task_id + if body is not None: + request.body = body + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.buffer_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("queue", request.queue), + ("task_id", request.task_id), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CloudTasksClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CloudTasksClient",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py new file mode 100644 index 000000000000..4b0bcde2a228 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/pagers.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.tasks_v2beta2.types import cloudtasks, queue, task + + +class ListQueuesPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta2.types.ListQueuesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta2.types.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudtasks.ListQueuesResponse], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta2.types.ListQueuesRequest): + The initial request object. + response (google.cloud.tasks_v2beta2.types.ListQueuesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[queue.Queue]: + for page in self.pages: + yield from page.queues + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListQueuesAsyncPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta2.types.ListQueuesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta2.types.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudtasks.ListQueuesResponse]], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta2.types.ListQueuesRequest): + The initial request object. + response (google.cloud.tasks_v2beta2.types.ListQueuesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[queue.Queue]: + async def async_generator(): + async for page in self.pages: + for response in page.queues: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTasksPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta2.types.ListTasksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta2.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudtasks.ListTasksResponse], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta2.types.ListTasksRequest): + The initial request object. + response (google.cloud.tasks_v2beta2.types.ListTasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[task.Task]: + for page in self.pages: + yield from page.tasks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTasksAsyncPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta2.types.ListTasksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta2.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudtasks.ListTasksResponse]], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta2.types.ListTasksRequest): + The initial request object. + response (google.cloud.tasks_v2beta2.types.ListTasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[task.Task]: + async def async_generator(): + async for page in self.pages: + for response in page.tasks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py new file mode 100644 index 000000000000..0a9dffaab4fe --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudTasksTransport +from .grpc import CloudTasksGrpcTransport +from .grpc_asyncio import CloudTasksGrpcAsyncIOTransport +from .rest import CloudTasksRestInterceptor, CloudTasksRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] +_transport_registry["grpc"] = CloudTasksGrpcTransport +_transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport +_transport_registry["rest"] = CloudTasksRestTransport + +__all__ = ( + "CloudTasksTransport", + "CloudTasksGrpcTransport", + "CloudTasksGrpcAsyncIOTransport", + "CloudTasksRestTransport", + "CloudTasksRestInterceptor", +) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py new file mode 100644 index 000000000000..f1f8bd036c79 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/base.py @@ -0,0 +1,548 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.tasks_v2beta2 import gapic_version as package_version +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class CloudTasksTransport(abc.ABC): + """Abstract transport class for CloudTasks.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "cloudtasks.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_queues: gapic_v1.method.wrap_method( + self.list_queues, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_queue: gapic_v1.method.wrap_method( + self.get_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_queue: gapic_v1.method.wrap_method( + self.create_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.update_queue: gapic_v1.method.wrap_method( + self.update_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.delete_queue: gapic_v1.method.wrap_method( + self.delete_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.purge_queue: gapic_v1.method.wrap_method( + self.purge_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.pause_queue: gapic_v1.method.wrap_method( + self.pause_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.resume_queue: gapic_v1.method.wrap_method( + self.resume_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.upload_queue_yaml: gapic_v1.method.wrap_method( + self.upload_queue_yaml, + default_timeout=20.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=20.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_tasks: gapic_v1.method.wrap_method( + self.list_tasks, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_task: gapic_v1.method.wrap_method( + self.get_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_task: gapic_v1.method.wrap_method( + self.create_task, + default_timeout=20.0, + client_info=client_info, + ), + self.delete_task: gapic_v1.method.wrap_method( + self.delete_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.lease_tasks: gapic_v1.method.wrap_method( + self.lease_tasks, + default_timeout=20.0, + client_info=client_info, + ), + self.acknowledge_task: gapic_v1.method.wrap_method( + self.acknowledge_task, + default_timeout=20.0, + client_info=client_info, + ), + self.renew_lease: gapic_v1.method.wrap_method( + self.renew_lease, + default_timeout=20.0, + client_info=client_info, + ), + self.cancel_lease: gapic_v1.method.wrap_method( + self.cancel_lease, + default_timeout=20.0, + client_info=client_info, + ), + self.run_task: gapic_v1.method.wrap_method( + self.run_task, + default_timeout=20.0, + client_info=client_info, + ), + self.buffer_task: gapic_v1.method.wrap_method( + self.buffer_task, + default_timeout=20.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_queues( + self, + ) -> Callable[ + [cloudtasks.ListQueuesRequest], + Union[cloudtasks.ListQueuesResponse, Awaitable[cloudtasks.ListQueuesResponse]], + ]: + raise NotImplementedError() + + @property + def get_queue( + self, + ) -> Callable[ + [cloudtasks.GetQueueRequest], Union[queue.Queue, Awaitable[queue.Queue]] + ]: + raise NotImplementedError() + + @property + def create_queue( + self, + ) -> Callable[ + [cloudtasks.CreateQueueRequest], + Union[gct_queue.Queue, Awaitable[gct_queue.Queue]], + ]: + raise NotImplementedError() + + @property + def update_queue( + self, + ) -> Callable[ + [cloudtasks.UpdateQueueRequest], + Union[gct_queue.Queue, Awaitable[gct_queue.Queue]], + ]: + raise NotImplementedError() + + @property + def delete_queue( + self, + ) -> Callable[ + [cloudtasks.DeleteQueueRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def purge_queue( + self, + ) -> Callable[ + [cloudtasks.PurgeQueueRequest], Union[queue.Queue, Awaitable[queue.Queue]] + ]: + raise NotImplementedError() + + @property + def pause_queue( + self, + ) -> Callable[ + [cloudtasks.PauseQueueRequest], Union[queue.Queue, Awaitable[queue.Queue]] + ]: + raise NotImplementedError() + + @property + def resume_queue( + self, + ) -> Callable[ + [cloudtasks.ResumeQueueRequest], Union[queue.Queue, Awaitable[queue.Queue]] + ]: + raise NotImplementedError() + + @property + def upload_queue_yaml( + self, + ) -> Callable[ + [cloudtasks.UploadQueueYamlRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_tasks( + self, + ) -> Callable[ + [cloudtasks.ListTasksRequest], + Union[cloudtasks.ListTasksResponse, Awaitable[cloudtasks.ListTasksResponse]], + ]: + raise NotImplementedError() + + @property + def get_task( + self, + ) -> Callable[[cloudtasks.GetTaskRequest], Union[task.Task, Awaitable[task.Task]]]: + raise NotImplementedError() + + @property + def create_task( + self, + ) -> Callable[ + [cloudtasks.CreateTaskRequest], Union[gct_task.Task, Awaitable[gct_task.Task]] + ]: + raise NotImplementedError() + + @property + def delete_task( + self, + ) -> Callable[ + [cloudtasks.DeleteTaskRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def lease_tasks( + self, + ) -> Callable[ + [cloudtasks.LeaseTasksRequest], + Union[cloudtasks.LeaseTasksResponse, Awaitable[cloudtasks.LeaseTasksResponse]], + ]: + raise NotImplementedError() + + @property + def acknowledge_task( + self, + ) -> Callable[ + [cloudtasks.AcknowledgeTaskRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def renew_lease( + self, + ) -> Callable[ + [cloudtasks.RenewLeaseRequest], Union[task.Task, Awaitable[task.Task]] + ]: + raise NotImplementedError() + + @property + def cancel_lease( + self, + ) -> Callable[ + [cloudtasks.CancelLeaseRequest], Union[task.Task, Awaitable[task.Task]] + ]: + raise NotImplementedError() + + @property + def run_task( + self, + ) -> Callable[[cloudtasks.RunTaskRequest], Union[task.Task, Awaitable[task.Task]]]: + raise NotImplementedError() + + @property + def buffer_task( + self, + ) -> Callable[ + [cloudtasks.BufferTaskRequest], + Union[cloudtasks.BufferTaskResponse, Awaitable[cloudtasks.BufferTaskResponse]], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("CloudTasksTransport",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py new file mode 100644 index 000000000000..781246213eb3 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc.py @@ -0,0 +1,1050 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task + +from .base import DEFAULT_CLIENT_INFO, CloudTasksTransport + + +class CloudTasksGrpcTransport(CloudTasksTransport): + """gRPC backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_queues( + self, + ) -> Callable[[cloudtasks.ListQueuesRequest], cloudtasks.ListQueuesResponse]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + ~.ListQueuesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_queues" not in self._stubs: + self._stubs["list_queues"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/ListQueues", + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs["list_queues"] + + @property + def get_queue(self) -> Callable[[cloudtasks.GetQueueRequest], queue.Queue]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_queue" not in self._stubs: + self._stubs["get_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/GetQueue", + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["get_queue"] + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], gct_queue.Queue]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_queue" not in self._stubs: + self._stubs["create_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/CreateQueue", + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["create_queue"] + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], gct_queue.Queue]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_queue" not in self._stubs: + self._stubs["update_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/UpdateQueue", + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["update_queue"] + + @property + def delete_queue( + self, + ) -> Callable[[cloudtasks.DeleteQueueRequest], empty_pb2.Empty]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_queue" not in self._stubs: + self._stubs["delete_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/DeleteQueue", + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_queue"] + + @property + def purge_queue(self) -> Callable[[cloudtasks.PurgeQueueRequest], queue.Queue]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + + All tasks created before this method is called are + permanently deleted. + + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_queue" not in self._stubs: + self._stubs["purge_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/PurgeQueue", + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["purge_queue"] + + @property + def pause_queue(self) -> Callable[[cloudtasks.PauseQueueRequest], queue.Queue]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_queue" not in self._stubs: + self._stubs["pause_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/PauseQueue", + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["pause_queue"] + + @property + def resume_queue(self) -> Callable[[cloudtasks.ResumeQueueRequest], queue.Queue]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta2.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_queue" not in self._stubs: + self._stubs["resume_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/ResumeQueue", + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["resume_queue"] + + @property + def upload_queue_yaml( + self, + ) -> Callable[[cloudtasks.UploadQueueYamlRequest], empty_pb2.Empty]: + r"""Return a callable for the upload queue yaml method over gRPC. + + Update queue list by uploading a queue.yaml file. + + The queue.yaml file is supplied in the request body as a + YAML encoded string. This method was added to support + gcloud clients versions before 322.0.0. New clients + should use CreateQueue instead of this method. + + Returns: + Callable[[~.UploadQueueYamlRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "upload_queue_yaml" not in self._stubs: + self._stubs["upload_queue_yaml"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/UploadQueueYaml", + request_serializer=cloudtasks.UploadQueueYamlRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["upload_queue_yaml"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def list_tasks( + self, + ) -> Callable[[cloudtasks.ListTasksRequest], cloudtasks.ListTasksResponse]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + ~.ListTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tasks" not in self._stubs: + self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/ListTasks", + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs["list_tasks"] + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], task.Task]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_task" not in self._stubs: + self._stubs["get_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/GetTask", + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["get_task"] + + @property + def create_task(self) -> Callable[[cloudtasks.CreateTaskRequest], gct_task.Task]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the + maximum task size is 100KB. + - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the + maximum task size is 1MB. + + Returns: + Callable[[~.CreateTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_task" not in self._stubs: + self._stubs["create_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/CreateTask", + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs["create_task"] + + @property + def delete_task(self) -> Callable[[cloudtasks.DeleteTaskRequest], empty_pb2.Empty]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has completed + successfully or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_task" not in self._stubs: + self._stubs["delete_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/DeleteTask", + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_task"] + + @property + def lease_tasks( + self, + ) -> Callable[[cloudtasks.LeaseTasksRequest], cloudtasks.LeaseTasksResponse]: + r"""Return a callable for the lease tasks method over gRPC. + + Leases tasks from a pull queue for + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + This method is invoked by the worker to obtain a lease. The + worker must acknowledge the task via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + after they have performed the work associated with the task. + + The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is + intended to store data that the worker needs to perform the work + associated with the task. To return the payloads in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] + to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + + A maximum of 10 qps of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per queue. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is + returned when this limit is exceeded. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also + returned when + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + is exceeded. + + Returns: + Callable[[~.LeaseTasksRequest], + ~.LeaseTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lease_tasks" not in self._stubs: + self._stubs["lease_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/LeaseTasks", + request_serializer=cloudtasks.LeaseTasksRequest.serialize, + response_deserializer=cloudtasks.LeaseTasksResponse.deserialize, + ) + return self._stubs["lease_tasks"] + + @property + def acknowledge_task( + self, + ) -> Callable[[cloudtasks.AcknowledgeTaskRequest], empty_pb2.Empty]: + r"""Return a callable for the acknowledge task method over gRPC. + + Acknowledges a pull task. + + The worker, that is, the entity that + [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this + task must call this method to indicate that the work associated + with the task has finished. + + The worker must acknowledge a task within the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] + or the lease will expire and the task will become available to + be leased again. After the task is acknowledged, it will not be + returned by a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Returns: + Callable[[~.AcknowledgeTaskRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "acknowledge_task" not in self._stubs: + self._stubs["acknowledge_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/AcknowledgeTask", + request_serializer=cloudtasks.AcknowledgeTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["acknowledge_task"] + + @property + def renew_lease(self) -> Callable[[cloudtasks.RenewLeaseRequest], task.Task]: + r"""Return a callable for the renew lease method over gRPC. + + Renew the current lease of a pull task. + + The worker can use this method to extend the lease by a new + duration, starting from now. The new task lease will be returned + in the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + Returns: + Callable[[~.RenewLeaseRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "renew_lease" not in self._stubs: + self._stubs["renew_lease"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/RenewLease", + request_serializer=cloudtasks.RenewLeaseRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["renew_lease"] + + @property + def cancel_lease(self) -> Callable[[cloudtasks.CancelLeaseRequest], task.Task]: + r"""Return a callable for the cancel lease method over gRPC. + + Cancel a pull task's lease. + + The worker can use this method to cancel a task's lease by + setting its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + to now. This will make the task available to be leased to the + next caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Returns: + Callable[[~.CancelLeaseRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_lease" not in self._stubs: + self._stubs["cancel_lease"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/CancelLease", + request_serializer=cloudtasks.CancelLeaseRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["cancel_lease"] + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], task.Task]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the + [status][google.cloud.tasks.v2beta2.Task.status] after the task + is dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot + be called on a [pull + task][google.cloud.tasks.v2beta2.PullMessage]. + + Returns: + Callable[[~.RunTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_task" not in self._stubs: + self._stubs["run_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/RunTask", + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["run_task"] + + @property + def buffer_task( + self, + ) -> Callable[[cloudtasks.BufferTaskRequest], cloudtasks.BufferTaskResponse]: + r"""Return a callable for the buffer task method over gRPC. + + Creates and buffers a new task without the need to explicitly + define a Task message. The queue must have [HTTP + target][google.cloud.tasks.v2beta2.HttpTarget]. To create the + task with a custom ID, use the following format and set TASK_ID + to your desired ID: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer + To create the task with an automatically generated ID, use the + following format: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. + Note: This feature is in its experimental stage. You must + request access to the API through the `Cloud Tasks BufferTask + Experiment Signup form `__. + + Returns: + Callable[[~.BufferTaskRequest], + ~.BufferTaskResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "buffer_task" not in self._stubs: + self._stubs["buffer_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/BufferTask", + request_serializer=cloudtasks.BufferTaskRequest.serialize, + response_deserializer=cloudtasks.BufferTaskResponse.deserialize, + ) + return self._stubs["buffer_task"] + + def close(self): + self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("CloudTasksGrpcTransport",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py new file mode 100644 index 000000000000..ca3d162f0b5f --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/grpc_asyncio.py @@ -0,0 +1,1073 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task + +from .base import DEFAULT_CLIENT_INFO, CloudTasksTransport +from .grpc import CloudTasksGrpcTransport + + +class CloudTasksGrpcAsyncIOTransport(CloudTasksTransport): + """gRPC AsyncIO backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_queues( + self, + ) -> Callable[ + [cloudtasks.ListQueuesRequest], Awaitable[cloudtasks.ListQueuesResponse] + ]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + Awaitable[~.ListQueuesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_queues" not in self._stubs: + self._stubs["list_queues"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/ListQueues", + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs["list_queues"] + + @property + def get_queue( + self, + ) -> Callable[[cloudtasks.GetQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_queue" not in self._stubs: + self._stubs["get_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/GetQueue", + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["get_queue"] + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], Awaitable[gct_queue.Queue]]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_queue" not in self._stubs: + self._stubs["create_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/CreateQueue", + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["create_queue"] + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], Awaitable[gct_queue.Queue]]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_queue" not in self._stubs: + self._stubs["update_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/UpdateQueue", + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["update_queue"] + + @property + def delete_queue( + self, + ) -> Callable[[cloudtasks.DeleteQueueRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_queue" not in self._stubs: + self._stubs["delete_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/DeleteQueue", + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_queue"] + + @property + def purge_queue( + self, + ) -> Callable[[cloudtasks.PurgeQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + + All tasks created before this method is called are + permanently deleted. + + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_queue" not in self._stubs: + self._stubs["purge_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/PurgeQueue", + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["purge_queue"] + + @property + def pause_queue( + self, + ) -> Callable[[cloudtasks.PauseQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta2.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_queue" not in self._stubs: + self._stubs["pause_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/PauseQueue", + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["pause_queue"] + + @property + def resume_queue( + self, + ) -> Callable[[cloudtasks.ResumeQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta2.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_queue" not in self._stubs: + self._stubs["resume_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/ResumeQueue", + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["resume_queue"] + + @property + def upload_queue_yaml( + self, + ) -> Callable[[cloudtasks.UploadQueueYamlRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the upload queue yaml method over gRPC. + + Update queue list by uploading a queue.yaml file. + + The queue.yaml file is supplied in the request body as a + YAML encoded string. This method was added to support + gcloud clients versions before 322.0.0. New clients + should use CreateQueue instead of this method. + + Returns: + Callable[[~.UploadQueueYamlRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "upload_queue_yaml" not in self._stubs: + self._stubs["upload_queue_yaml"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/UploadQueueYaml", + request_serializer=cloudtasks.UploadQueueYamlRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["upload_queue_yaml"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def list_tasks( + self, + ) -> Callable[ + [cloudtasks.ListTasksRequest], Awaitable[cloudtasks.ListTasksResponse] + ]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + Awaitable[~.ListTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tasks" not in self._stubs: + self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/ListTasks", + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs["list_tasks"] + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], Awaitable[task.Task]]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_task" not in self._stubs: + self._stubs["get_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/GetTask", + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["get_task"] + + @property + def create_task( + self, + ) -> Callable[[cloudtasks.CreateTaskRequest], Awaitable[gct_task.Task]]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], the + maximum task size is 100KB. + - For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the + maximum task size is 1MB. + + Returns: + Callable[[~.CreateTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_task" not in self._stubs: + self._stubs["create_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/CreateTask", + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs["create_task"] + + @property + def delete_task( + self, + ) -> Callable[[cloudtasks.DeleteTaskRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has completed + successfully or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_task" not in self._stubs: + self._stubs["delete_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/DeleteTask", + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_task"] + + @property + def lease_tasks( + self, + ) -> Callable[ + [cloudtasks.LeaseTasksRequest], Awaitable[cloudtasks.LeaseTasksResponse] + ]: + r"""Return a callable for the lease tasks method over gRPC. + + Leases tasks from a pull queue for + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + This method is invoked by the worker to obtain a lease. The + worker must acknowledge the task via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + after they have performed the work associated with the task. + + The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is + intended to store data that the worker needs to perform the work + associated with the task. To return the payloads in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] + to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + + A maximum of 10 qps of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per queue. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is + returned when this limit is exceeded. + [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] is also + returned when + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + is exceeded. + + Returns: + Callable[[~.LeaseTasksRequest], + Awaitable[~.LeaseTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "lease_tasks" not in self._stubs: + self._stubs["lease_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/LeaseTasks", + request_serializer=cloudtasks.LeaseTasksRequest.serialize, + response_deserializer=cloudtasks.LeaseTasksResponse.deserialize, + ) + return self._stubs["lease_tasks"] + + @property + def acknowledge_task( + self, + ) -> Callable[[cloudtasks.AcknowledgeTaskRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the acknowledge task method over gRPC. + + Acknowledges a pull task. + + The worker, that is, the entity that + [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this + task must call this method to indicate that the work associated + with the task has finished. + + The worker must acknowledge a task within the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] + or the lease will expire and the task will become available to + be leased again. After the task is acknowledged, it will not be + returned by a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Returns: + Callable[[~.AcknowledgeTaskRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "acknowledge_task" not in self._stubs: + self._stubs["acknowledge_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/AcknowledgeTask", + request_serializer=cloudtasks.AcknowledgeTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["acknowledge_task"] + + @property + def renew_lease( + self, + ) -> Callable[[cloudtasks.RenewLeaseRequest], Awaitable[task.Task]]: + r"""Return a callable for the renew lease method over gRPC. + + Renew the current lease of a pull task. + + The worker can use this method to extend the lease by a new + duration, starting from now. The new task lease will be returned + in the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + Returns: + Callable[[~.RenewLeaseRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "renew_lease" not in self._stubs: + self._stubs["renew_lease"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/RenewLease", + request_serializer=cloudtasks.RenewLeaseRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["renew_lease"] + + @property + def cancel_lease( + self, + ) -> Callable[[cloudtasks.CancelLeaseRequest], Awaitable[task.Task]]: + r"""Return a callable for the cancel lease method over gRPC. + + Cancel a pull task's lease. + + The worker can use this method to cancel a task's lease by + setting its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + to now. This will make the task available to be leased to the + next caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Returns: + Callable[[~.CancelLeaseRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_lease" not in self._stubs: + self._stubs["cancel_lease"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/CancelLease", + request_serializer=cloudtasks.CancelLeaseRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["cancel_lease"] + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], Awaitable[task.Task]]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the + [status][google.cloud.tasks.v2beta2.Task.status] after the task + is dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot + be called on a [pull + task][google.cloud.tasks.v2beta2.PullMessage]. + + Returns: + Callable[[~.RunTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_task" not in self._stubs: + self._stubs["run_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/RunTask", + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["run_task"] + + @property + def buffer_task( + self, + ) -> Callable[ + [cloudtasks.BufferTaskRequest], Awaitable[cloudtasks.BufferTaskResponse] + ]: + r"""Return a callable for the buffer task method over gRPC. + + Creates and buffers a new task without the need to explicitly + define a Task message. The queue must have [HTTP + target][google.cloud.tasks.v2beta2.HttpTarget]. To create the + task with a custom ID, use the following format and set TASK_ID + to your desired ID: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer + To create the task with an automatically generated ID, use the + following format: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. + Note: This feature is in its experimental stage. You must + request access to the API through the `Cloud Tasks BufferTask + Experiment Signup form `__. + + Returns: + Callable[[~.BufferTaskRequest], + Awaitable[~.BufferTaskResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "buffer_task" not in self._stubs: + self._stubs["buffer_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta2.CloudTasks/BufferTask", + request_serializer=cloudtasks.BufferTaskRequest.serialize, + response_deserializer=cloudtasks.BufferTaskResponse.deserialize, + ) + return self._stubs["buffer_task"] + + def close(self): + return self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("CloudTasksGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/rest.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/rest.py new file mode 100644 index 000000000000..be3b605b97f4 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/services/cloud_tasks/transports/rest.py @@ -0,0 +1,3223 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task + +from .base import CloudTasksTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CloudTasksRestInterceptor: + """Interceptor for CloudTasks. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudTasksRestTransport. + + .. code-block:: python + class MyCustomCloudTasksInterceptor(CloudTasksRestInterceptor): + def pre_acknowledge_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_buffer_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_buffer_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_cancel_lease(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_lease(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_lease_tasks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_lease_tasks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_queues(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_queues(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_tasks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_tasks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_pause_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_pause_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_purge_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_purge_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_renew_lease(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_renew_lease(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_resume_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_resume_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_upload_queue_yaml(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + transport = CloudTasksRestTransport(interceptor=MyCustomCloudTasksInterceptor()) + client = CloudTasksClient(transport=transport) + + + """ + + def pre_acknowledge_task( + self, + request: cloudtasks.AcknowledgeTaskRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudtasks.AcknowledgeTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for acknowledge_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def pre_buffer_task( + self, request: cloudtasks.BufferTaskRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.BufferTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for buffer_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_buffer_task( + self, response: cloudtasks.BufferTaskResponse + ) -> cloudtasks.BufferTaskResponse: + """Post-rpc interceptor for buffer_task + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_cancel_lease( + self, + request: cloudtasks.CancelLeaseRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudtasks.CancelLeaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_lease + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_cancel_lease(self, response: task.Task) -> task.Task: + """Post-rpc interceptor for cancel_lease + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_create_queue( + self, + request: cloudtasks.CreateQueueRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudtasks.CreateQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_create_queue(self, response: gct_queue.Queue) -> gct_queue.Queue: + """Post-rpc interceptor for create_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_create_task( + self, request: cloudtasks.CreateTaskRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.CreateTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_create_task(self, response: gct_task.Task) -> gct_task.Task: + """Post-rpc interceptor for create_task + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_delete_queue( + self, + request: cloudtasks.DeleteQueueRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudtasks.DeleteQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def pre_delete_task( + self, request: cloudtasks.DeleteTaskRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.DeleteTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_get_queue( + self, request: cloudtasks.GetQueueRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.GetQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_get_queue(self, response: queue.Queue) -> queue.Queue: + """Post-rpc interceptor for get_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_get_task( + self, request: cloudtasks.GetTaskRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.GetTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_get_task(self, response: task.Task) -> task.Task: + """Post-rpc interceptor for get_task + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_lease_tasks( + self, request: cloudtasks.LeaseTasksRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.LeaseTasksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for lease_tasks + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_lease_tasks( + self, response: cloudtasks.LeaseTasksResponse + ) -> cloudtasks.LeaseTasksResponse: + """Post-rpc interceptor for lease_tasks + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_list_queues( + self, request: cloudtasks.ListQueuesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.ListQueuesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_queues + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_list_queues( + self, response: cloudtasks.ListQueuesResponse + ) -> cloudtasks.ListQueuesResponse: + """Post-rpc interceptor for list_queues + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_list_tasks( + self, request: cloudtasks.ListTasksRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.ListTasksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_tasks + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_list_tasks( + self, response: cloudtasks.ListTasksResponse + ) -> cloudtasks.ListTasksResponse: + """Post-rpc interceptor for list_tasks + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_pause_queue( + self, request: cloudtasks.PauseQueueRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.PauseQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for pause_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_pause_queue(self, response: queue.Queue) -> queue.Queue: + """Post-rpc interceptor for pause_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_purge_queue( + self, request: cloudtasks.PurgeQueueRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.PurgeQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for purge_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_purge_queue(self, response: queue.Queue) -> queue.Queue: + """Post-rpc interceptor for purge_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_renew_lease( + self, request: cloudtasks.RenewLeaseRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.RenewLeaseRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for renew_lease + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_renew_lease(self, response: task.Task) -> task.Task: + """Post-rpc interceptor for renew_lease + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_resume_queue( + self, + request: cloudtasks.ResumeQueueRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudtasks.ResumeQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for resume_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_resume_queue(self, response: queue.Queue) -> queue.Queue: + """Post-rpc interceptor for resume_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_run_task( + self, request: cloudtasks.RunTaskRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.RunTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_run_task(self, response: task.Task) -> task.Task: + """Post-rpc interceptor for run_task + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_update_queue( + self, + request: cloudtasks.UpdateQueueRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudtasks.UpdateQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_update_queue(self, response: gct_queue.Queue) -> gct_queue.Queue: + """Post-rpc interceptor for update_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudTasksRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudTasksRestInterceptor + + +class CloudTasksRestTransport(CloudTasksTransport): + """REST backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudTasksRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudTasksRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AcknowledgeTask(CloudTasksRestStub): + def __hash__(self): + return hash("AcknowledgeTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.AcknowledgeTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the acknowledge task method over HTTP. + + Args: + request (~.cloudtasks.AcknowledgeTaskRequest): + The request object. Request message for acknowledging a task using + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:acknowledge", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_acknowledge_task( + request, metadata + ) + pb_request = cloudtasks.AcknowledgeTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _BufferTask(CloudTasksRestStub): + def __hash__(self): + return hash("BufferTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.BufferTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.BufferTaskResponse: + r"""Call the buffer task method over HTTP. + + Args: + request (~.cloudtasks.BufferTaskRequest): + The request object. LINT.IfChange Request message for + [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudtasks.BufferTaskResponse: + Response message for + [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta2/{queue=projects/*/locations/*/queues/*}/tasks/{task_id}:buffer", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_buffer_task(request, metadata) + pb_request = cloudtasks.BufferTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudtasks.BufferTaskResponse() + pb_resp = cloudtasks.BufferTaskResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_buffer_task(resp) + return resp + + class _CancelLease(CloudTasksRestStub): + def __hash__(self): + return hash("CancelLease") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.CancelLeaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Call the cancel lease method over HTTP. + + Args: + request (~.cloudtasks.CancelLeaseRequest): + The request object. Request message for canceling a lease using + [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:cancelLease", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_cancel_lease(request, metadata) + pb_request = cloudtasks.CancelLeaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = task.Task() + pb_resp = task.Task.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel_lease(resp) + return resp + + class _CreateQueue(CloudTasksRestStub): + def __hash__(self): + return hash("CreateQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.CreateQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Call the create queue method over HTTP. + + Args: + request (~.cloudtasks.CreateQueueRequest): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta2/{parent=projects/*/locations/*}/queues", + "body": "queue", + }, + ] + request, metadata = self._interceptor.pre_create_queue(request, metadata) + pb_request = cloudtasks.CreateQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gct_queue.Queue() + pb_resp = gct_queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_queue(resp) + return resp + + class _CreateTask(CloudTasksRestStub): + def __hash__(self): + return hash("CreateTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.CreateTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Call the create task method over HTTP. + + Args: + request (~.cloudtasks.CreateTaskRequest): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_task.Task: + A unit of scheduled work. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_task(request, metadata) + pb_request = cloudtasks.CreateTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gct_task.Task() + pb_resp = gct_task.Task.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_task(resp) + return resp + + class _DeleteQueue(CloudTasksRestStub): + def __hash__(self): + return hash("DeleteQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.DeleteQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete queue method over HTTP. + + Args: + request (~.cloudtasks.DeleteQueueRequest): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2beta2/{name=projects/*/locations/*/queues/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_queue(request, metadata) + pb_request = cloudtasks.DeleteQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteTask(CloudTasksRestStub): + def __hash__(self): + return hash("DeleteTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.DeleteTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete task method over HTTP. + + Args: + request (~.cloudtasks.DeleteTaskRequest): + The request object. Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_task(request, metadata) + pb_request = cloudtasks.DeleteTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetIamPolicy(CloudTasksRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta2/{resource=projects/*/locations/*/queues/*}:getIamPolicy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _GetQueue(CloudTasksRestStub): + def __hash__(self): + return hash("GetQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.GetQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Call the get queue method over HTTP. + + Args: + request (~.cloudtasks.GetQueueRequest): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta2/{name=projects/*/locations/*/queues/*}", + }, + ] + request, metadata = self._interceptor.pre_get_queue(request, metadata) + pb_request = cloudtasks.GetQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = queue.Queue() + pb_resp = queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_queue(resp) + return resp + + class _GetTask(CloudTasksRestStub): + def __hash__(self): + return hash("GetTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.GetTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Call the get task method over HTTP. + + Args: + request (~.cloudtasks.GetTaskRequest): + The request object. Request message for getting a task using + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}", + }, + ] + request, metadata = self._interceptor.pre_get_task(request, metadata) + pb_request = cloudtasks.GetTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = task.Task() + pb_resp = task.Task.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_task(resp) + return resp + + class _LeaseTasks(CloudTasksRestStub): + def __hash__(self): + return hash("LeaseTasks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.LeaseTasksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.LeaseTasksResponse: + r"""Call the lease tasks method over HTTP. + + Args: + request (~.cloudtasks.LeaseTasksRequest): + The request object. Request message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudtasks.LeaseTasksResponse: + Response message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:lease", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_lease_tasks(request, metadata) + pb_request = cloudtasks.LeaseTasksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudtasks.LeaseTasksResponse() + pb_resp = cloudtasks.LeaseTasksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_lease_tasks(resp) + return resp + + class _ListQueues(CloudTasksRestStub): + def __hash__(self): + return hash("ListQueues") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.ListQueuesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.ListQueuesResponse: + r"""Call the list queues method over HTTP. + + Args: + request (~.cloudtasks.ListQueuesRequest): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudtasks.ListQueuesResponse: + Response message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta2/{parent=projects/*/locations/*}/queues", + }, + ] + request, metadata = self._interceptor.pre_list_queues(request, metadata) + pb_request = cloudtasks.ListQueuesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudtasks.ListQueuesResponse() + pb_resp = cloudtasks.ListQueuesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_queues(resp) + return resp + + class _ListTasks(CloudTasksRestStub): + def __hash__(self): + return hash("ListTasks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.ListTasksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.ListTasksResponse: + r"""Call the list tasks method over HTTP. + + Args: + request (~.cloudtasks.ListTasksRequest): + The request object. Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudtasks.ListTasksResponse: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks", + }, + ] + request, metadata = self._interceptor.pre_list_tasks(request, metadata) + pb_request = cloudtasks.ListTasksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudtasks.ListTasksResponse() + pb_resp = cloudtasks.ListTasksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_tasks(resp) + return resp + + class _PauseQueue(CloudTasksRestStub): + def __hash__(self): + return hash("PauseQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.PauseQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Call the pause queue method over HTTP. + + Args: + request (~.cloudtasks.PauseQueueRequest): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta2/{name=projects/*/locations/*/queues/*}:pause", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_pause_queue(request, metadata) + pb_request = cloudtasks.PauseQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = queue.Queue() + pb_resp = queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_pause_queue(resp) + return resp + + class _PurgeQueue(CloudTasksRestStub): + def __hash__(self): + return hash("PurgeQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.PurgeQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Call the purge queue method over HTTP. + + Args: + request (~.cloudtasks.PurgeQueueRequest): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta2/{name=projects/*/locations/*/queues/*}:purge", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_purge_queue(request, metadata) + pb_request = cloudtasks.PurgeQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = queue.Queue() + pb_resp = queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_purge_queue(resp) + return resp + + class _RenewLease(CloudTasksRestStub): + def __hash__(self): + return hash("RenewLease") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.RenewLeaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Call the renew lease method over HTTP. + + Args: + request (~.cloudtasks.RenewLeaseRequest): + The request object. Request message for renewing a lease using + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:renewLease", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_renew_lease(request, metadata) + pb_request = cloudtasks.RenewLeaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = task.Task() + pb_resp = task.Task.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_renew_lease(resp) + return resp + + class _ResumeQueue(CloudTasksRestStub): + def __hash__(self): + return hash("ResumeQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.ResumeQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Call the resume queue method over HTTP. + + Args: + request (~.cloudtasks.ResumeQueueRequest): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta2/{name=projects/*/locations/*/queues/*}:resume", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_resume_queue(request, metadata) + pb_request = cloudtasks.ResumeQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = queue.Queue() + pb_resp = queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resume_queue(resp) + return resp + + class _RunTask(CloudTasksRestStub): + def __hash__(self): + return hash("RunTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.RunTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Call the run task method over HTTP. + + Args: + request (~.cloudtasks.RunTaskRequest): + The request object. Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:run", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_run_task(request, metadata) + pb_request = cloudtasks.RunTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = task.Task() + pb_resp = task.Task.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_task(resp) + return resp + + class _SetIamPolicy(CloudTasksRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta2/{resource=projects/*/locations/*/queues/*}:setIamPolicy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(CloudTasksRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta2/{resource=projects/*/locations/*/queues/*}:testIamPermissions", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = iam_policy_pb2.TestIamPermissionsResponse() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + class _UpdateQueue(CloudTasksRestStub): + def __hash__(self): + return hash("UpdateQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.UpdateQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Call the update queue method over HTTP. + + Args: + request (~.cloudtasks.UpdateQueueRequest): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, target types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2beta2/{queue.name=projects/*/locations/*/queues/*}", + "body": "queue", + }, + ] + request, metadata = self._interceptor.pre_update_queue(request, metadata) + pb_request = cloudtasks.UpdateQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gct_queue.Queue() + pb_resp = gct_queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_queue(resp) + return resp + + class _UploadQueueYaml(CloudTasksRestStub): + def __hash__(self): + return hash("UploadQueueYaml") + + def __call__( + self, + request: cloudtasks.UploadQueueYamlRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + raise NotImplementedError( + "Method UploadQueueYaml is not available over REST transport" + ) + + @property + def acknowledge_task( + self, + ) -> Callable[[cloudtasks.AcknowledgeTaskRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AcknowledgeTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def buffer_task( + self, + ) -> Callable[[cloudtasks.BufferTaskRequest], cloudtasks.BufferTaskResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BufferTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_lease(self) -> Callable[[cloudtasks.CancelLeaseRequest], task.Task]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelLease(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], gct_queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_task(self) -> Callable[[cloudtasks.CreateTaskRequest], gct_task.Task]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_queue( + self, + ) -> Callable[[cloudtasks.DeleteQueueRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_task(self) -> Callable[[cloudtasks.DeleteTaskRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_queue(self) -> Callable[[cloudtasks.GetQueueRequest], queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], task.Task]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def lease_tasks( + self, + ) -> Callable[[cloudtasks.LeaseTasksRequest], cloudtasks.LeaseTasksResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LeaseTasks(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_queues( + self, + ) -> Callable[[cloudtasks.ListQueuesRequest], cloudtasks.ListQueuesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListQueues(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_tasks( + self, + ) -> Callable[[cloudtasks.ListTasksRequest], cloudtasks.ListTasksResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTasks(self._session, self._host, self._interceptor) # type: ignore + + @property + def pause_queue(self) -> Callable[[cloudtasks.PauseQueueRequest], queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PauseQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def purge_queue(self) -> Callable[[cloudtasks.PurgeQueueRequest], queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PurgeQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def renew_lease(self) -> Callable[[cloudtasks.RenewLeaseRequest], task.Task]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RenewLease(self._session, self._host, self._interceptor) # type: ignore + + @property + def resume_queue(self) -> Callable[[cloudtasks.ResumeQueueRequest], queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ResumeQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], task.Task]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], gct_queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def upload_queue_yaml( + self, + ) -> Callable[[cloudtasks.UploadQueueYamlRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UploadQueueYaml(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(CloudTasksRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta2/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(CloudTasksRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta2/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CloudTasksRestTransport",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/__init__.py new file mode 100644 index 000000000000..1c68ddb34d67 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/__init__.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloudtasks import ( + AcknowledgeTaskRequest, + BufferTaskRequest, + BufferTaskResponse, + CancelLeaseRequest, + CreateQueueRequest, + CreateTaskRequest, + DeleteQueueRequest, + DeleteTaskRequest, + GetQueueRequest, + GetTaskRequest, + LeaseTasksRequest, + LeaseTasksResponse, + ListQueuesRequest, + ListQueuesResponse, + ListTasksRequest, + ListTasksResponse, + PauseQueueRequest, + PurgeQueueRequest, + RenewLeaseRequest, + ResumeQueueRequest, + RunTaskRequest, + UpdateQueueRequest, + UploadQueueYamlRequest, +) +from .queue import Queue, QueueStats, RateLimits, RetryConfig +from .target import ( + AppEngineHttpRequest, + AppEngineHttpTarget, + AppEngineRouting, + HttpMethod, + HttpRequest, + HttpTarget, + OAuthToken, + OidcToken, + PathOverride, + PullMessage, + PullTarget, + QueryOverride, + UriOverride, +) +from .task import AttemptStatus, Task, TaskStatus + +__all__ = ( + "AcknowledgeTaskRequest", + "BufferTaskRequest", + "BufferTaskResponse", + "CancelLeaseRequest", + "CreateQueueRequest", + "CreateTaskRequest", + "DeleteQueueRequest", + "DeleteTaskRequest", + "GetQueueRequest", + "GetTaskRequest", + "LeaseTasksRequest", + "LeaseTasksResponse", + "ListQueuesRequest", + "ListQueuesResponse", + "ListTasksRequest", + "ListTasksResponse", + "PauseQueueRequest", + "PurgeQueueRequest", + "RenewLeaseRequest", + "ResumeQueueRequest", + "RunTaskRequest", + "UpdateQueueRequest", + "UploadQueueYamlRequest", + "Queue", + "QueueStats", + "RateLimits", + "RetryConfig", + "AppEngineHttpRequest", + "AppEngineHttpTarget", + "AppEngineRouting", + "HttpRequest", + "HttpTarget", + "OAuthToken", + "OidcToken", + "PathOverride", + "PullMessage", + "PullTarget", + "QueryOverride", + "UriOverride", + "HttpMethod", + "AttemptStatus", + "Task", + "TaskStatus", +) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/cloudtasks.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/cloudtasks.py new file mode 100644 index 000000000000..8a2a69260a63 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/cloudtasks.py @@ -0,0 +1,961 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.api import httpbody_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import task as gct_task + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta2", + manifest={ + "ListQueuesRequest", + "ListQueuesResponse", + "GetQueueRequest", + "CreateQueueRequest", + "UpdateQueueRequest", + "DeleteQueueRequest", + "PurgeQueueRequest", + "PauseQueueRequest", + "ResumeQueueRequest", + "UploadQueueYamlRequest", + "ListTasksRequest", + "ListTasksResponse", + "GetTaskRequest", + "CreateTaskRequest", + "DeleteTaskRequest", + "LeaseTasksRequest", + "LeaseTasksResponse", + "AcknowledgeTaskRequest", + "RenewLeaseRequest", + "CancelLeaseRequest", + "RunTaskRequest", + "BufferTaskRequest", + "BufferTaskResponse", + }, +) + + +class ListQueuesRequest(proto.Message): + r"""Request message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + Attributes: + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + filter (str): + ``filter`` can be used to specify a subset of queues. Any + [Queue][google.cloud.tasks.v2beta2.Queue] field can be used + as a filter and several operators as supported. For example: + ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as + described in `Stackdriver's Advanced Logs + Filters `__. + + Sample filter "app_engine_http_target: \*". + + Note that using filters might cause fewer queues than the + requested_page size to be returned. + page_size (int): + Requested page size. + + The maximum page size is 9800. If unspecified, the page size + will be the maximum. Fewer queues than requested might be + returned, even if more queues exist; use the + [next_page_token][google.cloud.tasks.v2beta2.ListQueuesResponse.next_page_token] + in the response to determine if more queues exist. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2beta2.ListQueuesResponse.next_page_token] + returned from the previous call to + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues] + method. It is an error to switch the value of the + [filter][google.cloud.tasks.v2beta2.ListQueuesRequest.filter] + while iterating through pages. + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Read mask is used for a more granular control over + what the API returns. If the mask is not present all fields + will be returned except [Queue.stats]. [Queue.stats] will be + returned only if it was explicitly specified in the mask. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask_pb2.FieldMask, + ) + + +class ListQueuesResponse(proto.Message): + r"""Response message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + Attributes: + queues (MutableSequence[google.cloud.tasks_v2beta2.types.Queue]): + The list of queues. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues] + with this value as the + [page_token][google.cloud.tasks.v2beta2.ListQueuesRequest.page_token]. + + If the next_page_token is empty, there are no more results. + + The page token is valid for only 2 hours. + """ + + @property + def raw_page(self): + return self + + queues: MutableSequence[gct_queue.Queue] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gct_queue.Queue, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetQueueRequest(proto.Message): + r"""Request message for + [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. + + Attributes: + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Read mask is used for a more granular control over + what the API returns. If the mask is not present all fields + will be returned except [Queue.stats]. [Queue.stats] will be + returned only if it was explicitly specified in the mask. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class CreateQueueRequest(proto.Message): + r"""Request message for + [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. + + Attributes: + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + queue (google.cloud.tasks_v2beta2.types.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta2.Queue.name] cannot + be the same as an existing queue. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + queue: gct_queue.Queue = proto.Field( + proto.MESSAGE, + number=2, + message=gct_queue.Queue, + ) + + +class UpdateQueueRequest(proto.Message): + r"""Request message for + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. + + Attributes: + queue (google.cloud.tasks_v2beta2.types.Queue): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2beta2.Queue.name] + must be specified. + + Output only fields cannot be modified using UpdateQueue. Any + value specified for an output only field will be ignored. + The queue's [name][google.cloud.tasks.v2beta2.Queue.name] + cannot be changed. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields of the + queue are being updated. + If empty, then all fields will be updated. + """ + + queue: gct_queue.Queue = proto.Field( + proto.MESSAGE, + number=1, + message=gct_queue.Queue, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteQueueRequest(proto.Message): + r"""Request message for + [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PurgeQueueRequest(proto.Message): + r"""Request message for + [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PauseQueueRequest(proto.Message): + r"""Request message for + [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ResumeQueueRequest(proto.Message): + r"""Request message for + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UploadQueueYamlRequest(proto.Message): + r"""Request message for + [UploadQueueYaml][google.cloud.tasks.v2beta2.CloudTasks.UploadQueueYaml]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + app_id (str): + Required. The App ID is supplied as an HTTP + parameter. Unlike internal usage of App ID, it + does not include a region prefix. Rather, the + App ID represents the Project ID against which + to make the request. + http_body (google.api.httpbody_pb2.HttpBody): + The http body contains the queue.yaml file + which used to update queue lists + + This field is a member of `oneof`_ ``_http_body``. + """ + + app_id: str = proto.Field( + proto.STRING, + number=1, + ) + http_body: httpbody_pb2.HttpBody = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=httpbody_pb2.HttpBody, + ) + + +class ListTasksRequest(proto.Message): + r"""Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + response_view (google.cloud.tasks_v2beta2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + page_size (int): + Maximum page size. + + Fewer tasks than requested might be returned, even if more + tasks exist; use + [next_page_token][google.cloud.tasks.v2beta2.ListTasksResponse.next_page_token] + in the response to determine if more tasks exist. + + The maximum page size is 1000. If unspecified, the page size + will be the maximum. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2beta2.ListTasksResponse.next_page_token] + returned from the previous call to + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] + method. + + The page token is valid for only 2 hours. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListTasksResponse(proto.Message): + r"""Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + Attributes: + tasks (MutableSequence[google.cloud.tasks_v2beta2.types.Task]): + The list of tasks. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] + with this value as the + [page_token][google.cloud.tasks.v2beta2.ListTasksRequest.page_token]. + + If the next_page_token is empty, there are no more results. + """ + + @property + def raw_page(self): + return self + + tasks: MutableSequence[gct_task.Task] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gct_task.Task, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTaskRequest(proto.Message): + r"""Request message for getting a task using + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (google.cloud.tasks_v2beta2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + + +class CreateTaskRequest(proto.Message): + r"""Request message for + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + task (google.cloud.tasks_v2beta2.types.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta2.Task.name]. If a name is + not specified then the system will generate a random unique + task id, which will be set in the task returned in the + [response][google.cloud.tasks.v2beta2.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set it to + the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task de-duplication. + If a task's ID is identical to that of an existing task or a + task that was deleted or completed recently then the call + will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1 hour after + the original task was deleted or completed. If the task's + queue was created using queue.yaml or queue.xml, then + another task with the same name can't be created for ~9 days + after the original task was deleted or completed. + + Because there is an extra lookup cost to identify duplicate + task names, these + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id is + recommended. Choosing task ids that are sequential or have + sequential prefixes, for example using a timestamp, causes + an increase in latency and error rates in all task commands. + The infrastructure relies on an approximately uniform + distribution of task ids to store and serve tasks + efficiently. + response_view (google.cloud.tasks_v2beta2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + task: gct_task.Task = proto.Field( + proto.MESSAGE, + number=2, + message=gct_task.Task, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=3, + enum=gct_task.Task.View, + ) + + +class DeleteTaskRequest(proto.Message): + r"""Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LeaseTasksRequest(proto.Message): + r"""Request message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + max_tasks (int): + The maximum number of tasks to lease. + + The system will make a best effort to return as close to as + ``max_tasks`` as possible. + + The largest that ``max_tasks`` can be is 1000. + + The maximum total size of a [lease tasks + response][google.cloud.tasks.v2beta2.LeaseTasksResponse] is + 32 MB. If the sum of all task sizes requested reaches this + limit, fewer tasks than requested are returned. + lease_duration (google.protobuf.duration_pb2.Duration): + Required. The duration of the lease. + + Each task returned in the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] + will have its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + set to the current time plus the ``lease_duration``. The + task is leased until its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]; + thus, the task will not be returned to another + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call before its + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + + After the worker has successfully finished the work + associated with the task, the worker must call via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + before the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + Otherwise the task will be returned to a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call so that another worker can retry it. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + response_view (google.cloud.tasks_v2beta2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + filter (str): + ``filter`` can be used to specify a subset of tasks to + lease. + + When ``filter`` is set to ``tag=`` then the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] + will contain only tasks whose + [tag][google.cloud.tasks.v2beta2.PullMessage.tag] is equal + to ````. ```` must be less than 500 + characters. + + When ``filter`` is set to ``tag_function=oldest_tag()``, + only tasks which have the same tag as the task with the + oldest + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + will be returned. + + Grammar Syntax: + + - ``filter = "tag=" tag | "tag_function=" function`` + + - ``tag = string`` + + - ``function = "oldest_tag()"`` + + The ``oldest_tag()`` function returns tasks which have the + same tag as the oldest task (ordered by schedule time). + + SDK compatibility: Although the SDK allows tags to be either + string or + `bytes `__, + only UTF-8 encoded tags can be used in Cloud Tasks. Tag + which aren't UTF-8 encoded can't be used in the + [filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter] + and the task's + [tag][google.cloud.tasks.v2beta2.PullMessage.tag] will be + displayed as empty in Cloud Tasks. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + max_tasks: int = proto.Field( + proto.INT32, + number=2, + ) + lease_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=4, + enum=gct_task.Task.View, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class LeaseTasksResponse(proto.Message): + r"""Response message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + Attributes: + tasks (MutableSequence[google.cloud.tasks_v2beta2.types.Task]): + The leased tasks. + """ + + tasks: MutableSequence[gct_task.Task] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gct_task.Task, + ) + + +class AcknowledgeTaskRequest(proto.Message): + r"""Request message for acknowledging a task using + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The task's current schedule time, available in the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class RenewLeaseRequest(proto.Message): + r"""Request message for renewing a lease using + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The task's current schedule time, available in the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + lease_duration (google.protobuf.duration_pb2.Duration): + Required. The desired new lease duration, starting from now. + + The maximum lease duration is 1 week. ``lease_duration`` + will be truncated to the nearest second. + response_view (google.cloud.tasks_v2beta2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + lease_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=4, + enum=gct_task.Task.View, + ) + + +class CancelLeaseRequest(proto.Message): + r"""Request message for canceling a lease using + [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The task's current schedule time, available in the + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + response_view (google.cloud.tasks_v2beta2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=3, + enum=gct_task.Task.View, + ) + + +class RunTaskRequest(proto.Message): + r"""Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (google.cloud.tasks_v2beta2.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta2.Task] resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + + +class BufferTaskRequest(proto.Message): + r"""LINT.IfChange Request message for + [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. + + Attributes: + queue (str): + Required. The parent queue name. For example: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID\` + + The queue must already exist. + task_id (str): + Optional. Task ID for the task being created. + If not provided, a random task ID is assigned to + the task. + body (google.api.httpbody_pb2.HttpBody): + Optional. Body of the HTTP request. + + The body can take any generic value. The value is written to + the [HttpRequest][payload] of the [Task]. + """ + + queue: str = proto.Field( + proto.STRING, + number=1, + ) + task_id: str = proto.Field( + proto.STRING, + number=2, + ) + body: httpbody_pb2.HttpBody = proto.Field( + proto.MESSAGE, + number=3, + message=httpbody_pb2.HttpBody, + ) + + +class BufferTaskResponse(proto.Message): + r"""Response message for + [BufferTask][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. + + Attributes: + task (google.cloud.tasks_v2beta2.types.Task): + The created task. + """ + + task: gct_task.Task = proto.Field( + proto.MESSAGE, + number=1, + message=gct_task.Task, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/old_target.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/old_target.py new file mode 100644 index 000000000000..38179d1a099f --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/old_target.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta2", + manifest={}, +) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/queue.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/queue.py new file mode 100644 index 000000000000..b6d8e556463f --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/queue.py @@ -0,0 +1,609 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.tasks_v2beta2.types import target + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta2", + manifest={ + "Queue", + "RateLimits", + "RetryConfig", + "QueueStats", + }, +) + + +class Queue(proto.Message): + r"""A queue is a container of related tasks. Queues are + configured to manage how those tasks are dispatched. + Configurable properties include rate limits, retry options, + target types, and others. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Caller-specified and required in + [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue], + after which it becomes output only. + + The queue name. + + The queue name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the queue's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + app_engine_http_target (google.cloud.tasks_v2beta2.types.AppEngineHttpTarget): + App Engine HTTP target. + + An App Engine queue is a queue that has an + [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget]. + + This field is a member of `oneof`_ ``target_type``. + pull_target (google.cloud.tasks_v2beta2.types.PullTarget): + Pull target. + + A pull queue is a queue that has a + [PullTarget][google.cloud.tasks.v2beta2.PullTarget]. + + This field is a member of `oneof`_ ``target_type``. + http_target (google.cloud.tasks_v2beta2.types.HttpTarget): + An http_target is used to override the target values for + HTTP tasks. + + This field is a member of `oneof`_ ``target_type``. + rate_limits (google.cloud.tasks_v2beta2.types.RateLimits): + Rate limits for task dispatches. + + [rate_limits][google.cloud.tasks.v2beta2.Queue.rate_limits] + and + [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] + are related because they both control task attempts however + they control how tasks are attempted in different ways: + + - [rate_limits][google.cloud.tasks.v2beta2.Queue.rate_limits] + controls the total rate of dispatches from a queue (i.e. + all traffic dispatched from the queue, regardless of + whether the dispatch is from a first attempt or a retry). + - [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] + controls what happens to particular a task after its + first attempt fails. That is, + [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] + controls task retries (the second attempt, third attempt, + etc). + retry_config (google.cloud.tasks_v2beta2.types.RetryConfig): + Settings that determine the retry behavior. + + - For tasks created using Cloud Tasks: the queue-level + retry settings apply to all tasks in the queue that were + created using Cloud Tasks. Retry settings cannot be set + on individual tasks. + - For tasks created using the App Engine SDK: the + queue-level retry settings apply to all tasks in the + queue which do not have retry settings explicitly set on + the task and were created by the App Engine SDK. See `App + Engine + documentation `__. + state (google.cloud.tasks_v2beta2.types.Queue.State): + Output only. The state of the queue. + + ``state`` can only be changed by called + [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue], + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue], + or uploading + `queue.yaml/xml `__. + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] + cannot be used to change ``state``. + purge_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last time this queue was purged. + + All tasks that were + [created][google.cloud.tasks.v2beta2.Task.create_time] + before this time were purged. + + A queue can be purged using + [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue], + the `App Engine Task Queue SDK, or the Cloud + Console `__. + + Purge time will be truncated to the nearest microsecond. + Purge time will be unset if the queue has never been purged. + task_ttl (google.protobuf.duration_pb2.Duration): + The maximum amount of time that a task will be retained in + this queue. + + Queues created by Cloud Tasks have a default ``task_ttl`` of + 31 days. After a task has lived for ``task_ttl``, the task + will be deleted regardless of whether it was dispatched or + not. + + The ``task_ttl`` for queues created via queue.yaml/xml is + equal to the maximum duration because there is a `storage + quota `__ + for these queues. To view the maximum valid duration, see + the documentation for [Duration][google.protobuf.Duration]. + tombstone_ttl (google.protobuf.duration_pb2.Duration): + The task tombstone time to live (TTL). + + After a task is deleted or completed, the task's tombstone + is retained for the length of time specified by + ``tombstone_ttl``. The tombstone is used by task + de-duplication; another task with the same name can't be + created until the tombstone has expired. For more + information about task de-duplication, see the documentation + for + [CreateTaskRequest][google.cloud.tasks.v2beta2.CreateTaskRequest.task]. + + Queues created by Cloud Tasks have a default + ``tombstone_ttl`` of 1 hour. + stats (google.cloud.tasks_v2beta2.types.QueueStats): + Output only. The realtime, informational + statistics for a queue. In order to receive the + statistics the caller should include this field + in the FieldMask. + """ + + class State(proto.Enum): + r"""State of the queue. + + Values: + STATE_UNSPECIFIED (0): + Unspecified state. + RUNNING (1): + The queue is running. Tasks can be dispatched. + + If the queue was created using Cloud Tasks and the queue has + had no activity (method calls or task dispatches) for 30 + days, the queue may take a few minutes to re-activate. Some + method calls may return + [NOT_FOUND][google.rpc.Code.NOT_FOUND] and tasks may not be + dispatched for a few minutes until the queue has been + re-activated. + PAUSED (2): + Tasks are paused by the user. If the queue is paused then + Cloud Tasks will stop delivering tasks from it, but more + tasks can still be added to it by the user. When a pull + queue is paused, all + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + calls will return a + [FAILED_PRECONDITION][google.rpc.Code.FAILED_PRECONDITION]. + DISABLED (3): + The queue is disabled. + + A queue becomes ``DISABLED`` when + `queue.yaml `__ + or + `queue.xml `__ + is uploaded which does not contain the queue. You cannot + directly disable a queue. + + When a queue is disabled, tasks can still be added to a + queue but the tasks are not dispatched and + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + calls return a ``FAILED_PRECONDITION`` error. + + To permanently delete this queue and all of its tasks, call + [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + PAUSED = 2 + DISABLED = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + app_engine_http_target: target.AppEngineHttpTarget = proto.Field( + proto.MESSAGE, + number=3, + oneof="target_type", + message=target.AppEngineHttpTarget, + ) + pull_target: target.PullTarget = proto.Field( + proto.MESSAGE, + number=4, + oneof="target_type", + message=target.PullTarget, + ) + http_target: target.HttpTarget = proto.Field( + proto.MESSAGE, + number=17, + oneof="target_type", + message=target.HttpTarget, + ) + rate_limits: "RateLimits" = proto.Field( + proto.MESSAGE, + number=5, + message="RateLimits", + ) + retry_config: "RetryConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="RetryConfig", + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + purge_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + task_ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=9, + message=duration_pb2.Duration, + ) + tombstone_ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=10, + message=duration_pb2.Duration, + ) + stats: "QueueStats" = proto.Field( + proto.MESSAGE, + number=16, + message="QueueStats", + ) + + +class RateLimits(proto.Message): + r"""Rate limits. + + This message determines the maximum rate that tasks can be + dispatched by a queue, regardless of whether the dispatch is a first + task attempt or a retry. + + Note: The debugging command, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask], will run a + task even if the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits]. + + Attributes: + max_tasks_dispatched_per_second (float): + The maximum rate at which tasks are dispatched from this + queue. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + - For [App Engine + queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], + the maximum allowed value is 500. + - This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. In + addition to the ``max_tasks_dispatched_per_second`` + limit, a maximum of 10 QPS of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per pull queue. + + This field has the same meaning as `rate in + queue.yaml/xml `__. + max_burst_size (int): + The max burst size. + + Max burst size limits how fast tasks in queue are processed + when many tasks are in the queue and the rate is high. This + field allows the queue to have a high rate so processing + starts shortly after a task is enqueued, but still limits + resource usage when many tasks are enqueued in a short + period of time. + + The `token + bucket `__ + algorithm is used to control the rate of task dispatches. + Each queue has a token bucket that holds tokens, up to the + maximum specified by ``max_burst_size``. Each time a task is + dispatched, a token is removed from the bucket. Tasks will + be dispatched until the queue's bucket runs out of tokens. + The bucket will be continuously refilled with new tokens + based on + [max_dispatches_per_second][RateLimits.max_dispatches_per_second]. + + The default value of ``max_burst_size`` is picked by Cloud + Tasks based on the value of + [max_dispatches_per_second][RateLimits.max_dispatches_per_second]. + + The maximum value of ``max_burst_size`` is 500. + + For App Engine queues that were created or updated using + ``queue.yaml/xml``, ``max_burst_size`` is equal to + `bucket_size `__. + If + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] + is called on a queue without explicitly setting a value for + ``max_burst_size``, ``max_burst_size`` value will get + updated if + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] + is updating + [max_dispatches_per_second][RateLimits.max_dispatches_per_second]. + max_concurrent_tasks (int): + The maximum number of concurrent tasks that Cloud Tasks + allows to be dispatched for this queue. After this threshold + has been reached, Cloud Tasks stops dispatching tasks until + the number of concurrent requests decreases. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + The maximum allowed value is 5,000. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget] and always + -1, which indicates no limit. No other queue types can have + ``max_concurrent_tasks`` set to -1. + + This field has the same meaning as `max_concurrent_requests + in + queue.yaml/xml `__. + """ + + max_tasks_dispatched_per_second: float = proto.Field( + proto.DOUBLE, + number=1, + ) + max_burst_size: int = proto.Field( + proto.INT32, + number=2, + ) + max_concurrent_tasks: int = proto.Field( + proto.INT32, + number=3, + ) + + +class RetryConfig(proto.Message): + r"""Retry config. + + These settings determine how a failed task attempt is retried. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + max_attempts (int): + The maximum number of attempts for a task. + + Cloud Tasks will attempt the task ``max_attempts`` times + (that is, if the first attempt fails, then there will be + ``max_attempts - 1`` retries). Must be > 0. + + This field is a member of `oneof`_ ``num_attempts``. + unlimited_attempts (bool): + If true, then the number of attempts is + unlimited. + + This field is a member of `oneof`_ ``num_attempts``. + max_retry_duration (google.protobuf.duration_pb2.Duration): + If positive, ``max_retry_duration`` specifies the time limit + for retrying a failed task, measured from when the task was + first attempted. Once ``max_retry_duration`` time has passed + *and* the task has been attempted + [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] + times, no further attempts will be made and the task will be + deleted. + + If zero, then the task age is unlimited. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. + + ``max_retry_duration`` will be truncated to the nearest + second. + + This field has the same meaning as `task_age_limit in + queue.yaml/xml `__. + min_backoff (google.protobuf.duration_pb2.Duration): + A task will be + [scheduled][google.cloud.tasks.v2beta2.Task.schedule_time] + for retry between + [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig] + specifies that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. + + ``min_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `min_backoff_seconds in + queue.yaml/xml `__. + max_backoff (google.protobuf.duration_pb2.Duration): + A task will be + [scheduled][google.cloud.tasks.v2beta2.Task.schedule_time] + for retry between + [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig] + specifies that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. + + ``max_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `max_backoff_seconds in + queue.yaml/xml `__. + max_doublings (int): + The time between retries will double ``max_doublings`` + times. + + A task's retry interval starts at + [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff], + then doubles ``max_doublings`` times, then increases + linearly, and finally retries at intervals of + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + up to + [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] + times. + + For example, if + [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] + is 10s, + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + is 300s, and ``max_doublings`` is 3, then the a task will + first be retried in 10s. The retry interval will double + three times, and then increase linearly by 2^3 \* 10s. + Finally, the task will retry at intervals of + [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] + until the task has been attempted + [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] + times. Thus, the requests will retry at 10s, 20s, 40s, 80s, + 160s, 240s, 300s, 300s, .... + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field is output only for [pull + queues][google.cloud.tasks.v2beta2.PullTarget]. + + This field has the same meaning as `max_doublings in + queue.yaml/xml `__. + """ + + max_attempts: int = proto.Field( + proto.INT32, + number=1, + oneof="num_attempts", + ) + unlimited_attempts: bool = proto.Field( + proto.BOOL, + number=2, + oneof="num_attempts", + ) + max_retry_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + min_backoff: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + max_backoff: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=5, + message=duration_pb2.Duration, + ) + max_doublings: int = proto.Field( + proto.INT32, + number=6, + ) + + +class QueueStats(proto.Message): + r"""Statistics for a queue. + + Attributes: + tasks_count (int): + Output only. An estimation of the number of + tasks in the queue, that is, the tasks in the + queue that haven't been executed, the tasks in + the queue which the queue has dispatched but has + not yet received a reply for, and the failed + tasks that the queue is retrying. + oldest_estimated_arrival_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. An estimation of the nearest + time in the future where a task in the queue is + scheduled to be executed. + executed_last_minute_count (int): + Output only. The number of tasks that the + queue has dispatched and received a reply for + during the last minute. This variable counts + both successful and non-successful executions. + concurrent_dispatches_count (int): + Output only. The number of requests that the + queue has dispatched but has not received a + reply for yet. + effective_execution_rate (float): + Output only. The current maximum number of + tasks per second executed by the queue. The + maximum value of this variable is controlled by + the RateLimits of the Queue. However, this value + could be less to avoid overloading the endpoints + tasks in the queue are targeting. + """ + + tasks_count: int = proto.Field( + proto.INT64, + number=1, + ) + oldest_estimated_arrival_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + executed_last_minute_count: int = proto.Field( + proto.INT64, + number=3, + ) + concurrent_dispatches_count: int = proto.Field( + proto.INT64, + number=4, + ) + effective_execution_rate: float = proto.Field( + proto.DOUBLE, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/target.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/target.py new file mode 100644 index 000000000000..bbcc1de665f7 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/target.py @@ -0,0 +1,1030 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta2", + manifest={ + "HttpMethod", + "PullTarget", + "PullMessage", + "AppEngineHttpTarget", + "AppEngineHttpRequest", + "AppEngineRouting", + "HttpRequest", + "PathOverride", + "QueryOverride", + "UriOverride", + "HttpTarget", + "OAuthToken", + "OidcToken", + }, +) + + +class HttpMethod(proto.Enum): + r"""The HTTP method used to execute the task. + + Values: + HTTP_METHOD_UNSPECIFIED (0): + HTTP method unspecified + POST (1): + HTTP POST + GET (2): + HTTP GET + HEAD (3): + HTTP HEAD + PUT (4): + HTTP PUT + DELETE (5): + HTTP DELETE + PATCH (6): + HTTP PATCH + OPTIONS (7): + HTTP OPTIONS + """ + HTTP_METHOD_UNSPECIFIED = 0 + POST = 1 + GET = 2 + HEAD = 3 + PUT = 4 + DELETE = 5 + PATCH = 6 + OPTIONS = 7 + + +class PullTarget(proto.Message): + r"""Pull target.""" + + +class PullMessage(proto.Message): + r"""The pull message contains data that can be used by the caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] to + process the task. + + This proto can only be used for tasks in a queue which has + [pull_target][google.cloud.tasks.v2beta2.Queue.pull_target] set. + + Attributes: + payload (bytes): + A data payload consumed by the worker to + execute the task. + tag (str): + The task's tag. + + Tags allow similar tasks to be processed in a batch. If you + label tasks with a tag, your worker can [lease + tasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + with the same tag using + [filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter]. + For example, if you want to aggregate the events associated + with a specific user once a day, you could tag tasks with + the user ID. + + The task's tag can only be set when the [task is + created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + + The tag must be less than 500 characters. + + SDK compatibility: Although the SDK allows tags to be either + string or + `bytes `__, + only UTF-8 encoded tags can be used in Cloud Tasks. If a tag + isn't UTF-8 encoded, the tag will be empty when the task is + returned by Cloud Tasks. + """ + + payload: bytes = proto.Field( + proto.BYTES, + number=1, + ) + tag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AppEngineHttpTarget(proto.Message): + r"""App Engine HTTP target. + + The task will be delivered to the App Engine application hostname + specified by its + [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget] + and + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest]. + The documentation for + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] + explains how the task's host URL is constructed. + + Using + [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + Attributes: + app_engine_routing_override (google.cloud.tasks_v2beta2.types.AppEngineRouting): + Overrides for the [task-level + app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + + If set, ``app_engine_routing_override`` is used for all + tasks in the queue, no matter what the setting is for the + [task-level + app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + """ + + app_engine_routing_override: "AppEngineRouting" = proto.Field( + proto.MESSAGE, + number=1, + message="AppEngineRouting", + ) + + +class AppEngineHttpRequest(proto.Message): + r"""App Engine HTTP request. + + The message defines the HTTP request that is sent to an App Engine + app when the task is dispatched. + + This proto can only be used for tasks in a queue which has + [app_engine_http_target][google.cloud.tasks.v2beta2.Queue.app_engine_http_target] + set. + + Using + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + The task will be delivered to the App Engine app which belongs to + the same project as the queue. For more information, see `How + Requests are + Routed `__ + and how routing is affected by `dispatch + files `__. + Traffic is encrypted during transport and never leaves Google + datacenters. Because this traffic is carried over a communication + mechanism internal to Google, you cannot explicitly set the protocol + (for example, HTTP or HTTPS). The request to the handler, however, + will appear to have used the HTTP protocol. + + The [AppEngineRouting][google.cloud.tasks.v2beta2.AppEngineRouting] + used to construct the URL that the task is delivered to can be set + at the queue-level or task-level: + + - If set, + [app_engine_routing_override][google.cloud.tasks.v2beta2.AppEngineHttpTarget.app_engine_routing_override] + is used for all tasks in the queue, no matter what the setting is + for the [task-level + app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + + The ``url`` that the task will be sent to is: + + - ``url =`` + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] ``+`` + [relative_url][google.cloud.tasks.v2beta2.AppEngineHttpRequest.relative_url] + + Tasks can be dispatched to secure app handlers, unsecure app + handlers, and URIs restricted with + ```login: admin`` `__. + Because tasks are not run as any user, they cannot be dispatched to + URIs restricted with + ```login: required`` `__ + Task dispatches also do not follow redirects. + + The task attempt has succeeded if the app's request handler returns + an HTTP response code in the range [``200`` - ``299``]. The task + attempt has failed if the app's handler returns a non-2xx response + code or Cloud Tasks does not receive response before the + [deadline][Task.dispatch_deadline]. Failed tasks will be retried + according to the [retry + configuration][google.cloud.tasks.v2beta2.Queue.retry_config]. + ``503`` (Service Unavailable) is considered an App Engine system + error instead of an application error and will cause Cloud Tasks' + traffic congestion control to temporarily throttle the queue's + dispatches. Unlike other types of task targets, a ``429`` (Too Many + Requests) response from an app handler does not cause traffic + congestion control to throttle the queue. + + Attributes: + http_method (google.cloud.tasks_v2beta2.types.HttpMethod): + The HTTP method to use for the request. The default is POST. + + The app's request handler for the task's target URL must be + able to handle HTTP requests with this http_method, + otherwise the task attempt fails with error code 405 (Method + Not Allowed). See `Writing a push task request + handler `__ + and the App Engine documentation for your runtime on `How + Requests are + Handled `__. + app_engine_routing (google.cloud.tasks_v2beta2.types.AppEngineRouting): + Task-level setting for App Engine routing. + + If set, + [app_engine_routing_override][google.cloud.tasks.v2beta2.AppEngineHttpTarget.app_engine_routing_override] + is used for all tasks in the queue, no matter what the + setting is for the [task-level + app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + relative_url (str): + The relative URL. + + The relative URL must begin with "/" and must be + a valid HTTP relative URL. It can contain a path + and query string arguments. If the relative URL + is empty, then the root path "/" will be used. + No spaces are allowed, and the maximum length + allowed is 2083 characters. + headers (MutableMapping[str, str]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + Repeated headers are not supported but a header value can + contain commas. + + Cloud Tasks sets some headers to default values: + + - ``User-Agent``: By default, this header is + ``"AppEngine-Google; (+http://code.google.com/appengine)"``. + This header can be modified, but Cloud Tasks will append + ``"AppEngine-Google; (+http://code.google.com/appengine)"`` + to the modified ``User-Agent``. + + If the task has a + [payload][google.cloud.tasks.v2beta2.AppEngineHttpRequest.payload], + Cloud Tasks sets the following headers: + + - ``Content-Type``: By default, the ``Content-Type`` header + is set to ``"application/octet-stream"``. The default can + be overridden by explicitly setting ``Content-Type`` to a + particular media type when the [task is + created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/json"``. + - ``Content-Length``: This is computed by Cloud Tasks. This + value is output only. It cannot be changed. + + The headers below cannot be set or overridden: + + - ``Host`` + - ``X-Google-*`` + - ``X-AppEngine-*`` + + In addition, Cloud Tasks sets some headers when the task is + dispatched, such as headers containing information about the + task; see `request + headers `__. + These headers are set only when the task is dispatched, so + they are not visible when the task is returned in a Cloud + Tasks response. + + Although there is no specific limit for the maximum number + of headers or the size, there is a limit on the maximum size + of the [Task][google.cloud.tasks.v2beta2.Task]. For more + information, see the + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + documentation. + payload (bytes): + Payload. + + The payload will be sent as the HTTP message body. A message + body, and thus a payload, is allowed only if the HTTP method + is POST or PUT. It is an error to set a data payload on a + task with an incompatible + [HttpMethod][google.cloud.tasks.v2beta2.HttpMethod]. + """ + + http_method: "HttpMethod" = proto.Field( + proto.ENUM, + number=1, + enum="HttpMethod", + ) + app_engine_routing: "AppEngineRouting" = proto.Field( + proto.MESSAGE, + number=2, + message="AppEngineRouting", + ) + relative_url: str = proto.Field( + proto.STRING, + number=3, + ) + headers: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + payload: bytes = proto.Field( + proto.BYTES, + number=5, + ) + + +class AppEngineRouting(proto.Message): + r"""App Engine Routing. + + Defines routing characteristics specific to App Engine - service, + version, and instance. + + For more information about services, versions, and instances see `An + Overview of App + Engine `__, + `Microservices Architecture on Google App + Engine `__, + `App Engine Standard request + routing `__, + and `App Engine Flex request + routing `__. + + Attributes: + service (str): + App service. + + By default, the task is sent to the service which is the + default service when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable into + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable, then + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + are the empty string. + version (str): + App version. + + By default, the task is sent to the version which is the + default version when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable into + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable, then + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + are the empty string. + instance (str): + App instance. + + By default, the task is sent to an instance which is + available when the task is attempted. + + Requests can only be sent to a specific instance if `manual + scaling is used in App Engine + Standard `__. + App Engine Flex does not support instances. For more + information, see `App Engine Standard request + routing `__ + and `App Engine Flex request + routing `__. + host (str): + Output only. The host that the task is sent to. + + For more information, see `How Requests are + Routed `__. + + The host is constructed as: + + - ``host = [application_domain_name]``\ + ``| [service] + '.' + [application_domain_name]``\ + ``| [version] + '.' + [application_domain_name]``\ + ``| [version_dot_service]+ '.' + [application_domain_name]``\ + ``| [instance] + '.' + [application_domain_name]``\ + ``| [instance_dot_service] + '.' + [application_domain_name]``\ + ``| [instance_dot_version] + '.' + [application_domain_name]``\ + ``| [instance_dot_version_dot_service] + '.' + [application_domain_name]`` + + - ``application_domain_name`` = The domain name of the app, + for example .appspot.com, which is associated with the + queue's project ID. Some tasks which were created using + the App Engine SDK use a custom domain name. + + - ``service =`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + + - ``version =`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + + - ``version_dot_service =`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + ``+ '.' +`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + + - ``instance =`` + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + + - ``instance_dot_service =`` + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + ``+ '.' +`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + + - ``instance_dot_version =`` + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + ``+ '.' +`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + + - ``instance_dot_version_dot_service =`` + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + ``+ '.' +`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + ``+ '.' +`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + + If + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + is empty, then the task will be sent to the service which is + the default service when the task is attempted. + + If + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + is empty, then the task will be sent to the version which is + the default version when the task is attempted. + + If + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + is empty, then the task will be sent to an instance which is + available when the task is attempted. + + If + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], + or + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + is invalid, then the task will be sent to the default + version of the default service when the task is attempted. + """ + + service: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + instance: str = proto.Field( + proto.STRING, + number=3, + ) + host: str = proto.Field( + proto.STRING, + number=4, + ) + + +class HttpRequest(proto.Message): + r"""HTTP request. + + The task will be pushed to the worker as an HTTP request. An + HTTP request embodies a url, an http method, headers, body and + authorization for the http task. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + url (str): + Required. The full url path that the request will be sent + to. + + This string must begin with either "http://" or "https://". + Some examples are: ``http://acme.com`` and + ``https://acme.com/sales:8080``. Cloud Tasks will encode + some characters for safety and compatibility. The maximum + allowed URL length is 2083 characters after encoding. + + The ``Location`` header response from a redirect response + [``300`` - ``399``] may be followed. The redirect is not + counted as a separate attempt. + http_method (google.cloud.tasks_v2beta2.types.HttpMethod): + The HTTP method to use for the request. The + default is POST. + headers (MutableMapping[str, str]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when running the [task is + created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + or [task is + created][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. + + These headers represent a subset of the headers that will + accompany the task's HTTP request. Some HTTP request headers + will be ignored or replaced. + + A partial list of headers that will be ignored or replaced + is: + + - Any header that is prefixed with "X-CloudTasks-" will be + treated as service header. Service headers define + properties of the task and are predefined in CloudTask. + - Host: This will be computed by Cloud Tasks and derived + from + [HttpRequest.url][google.cloud.tasks.v2beta2.HttpRequest.url]. + - Content-Length: This will be computed by Cloud Tasks. + - User-Agent: This will be set to ``"Google-Cloud-Tasks"``. + - ``X-Google-*``: Google use only. + - ``X-AppEngine-*``: Google use only. + + ``Content-Type`` won't be set by Cloud Tasks. You can + explicitly set ``Content-Type`` to a media type when the + [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/octet-stream"`` or ``"application/json"``. + + Headers which can have multiple values (according to + RFC2616) can be specified using comma-separated values. + + The size of the headers must be less than 80KB. + body (bytes): + HTTP request body. + + A request body is allowed only if the [HTTP + method][google.cloud.tasks.v2beta2.HttpRequest.http_method] + is POST, PUT, or PATCH. It is an error to set body on a task + with an incompatible + [HttpMethod][google.cloud.tasks.v2beta2.HttpMethod]. + oauth_token (google.cloud.tasks_v2beta2.types.OAuthToken): + If specified, an `OAuth + token `__ + will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization should generally only be used + when calling Google APIs hosted on \*.googleapis.com. + + This field is a member of `oneof`_ ``authorization_header``. + oidc_token (google.cloud.tasks_v2beta2.types.OidcToken): + If specified, an + `OIDC `__ + token will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend + to validate the token yourself. + + This field is a member of `oneof`_ ``authorization_header``. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + http_method: "HttpMethod" = proto.Field( + proto.ENUM, + number=2, + enum="HttpMethod", + ) + headers: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + body: bytes = proto.Field( + proto.BYTES, + number=4, + ) + oauth_token: "OAuthToken" = proto.Field( + proto.MESSAGE, + number=5, + oneof="authorization_header", + message="OAuthToken", + ) + oidc_token: "OidcToken" = proto.Field( + proto.MESSAGE, + number=6, + oneof="authorization_header", + message="OidcToken", + ) + + +class PathOverride(proto.Message): + r"""PathOverride. + + Path message defines path override for HTTP targets. + + Attributes: + path (str): + The URI path (e.g., /users/1234). Default is + an empty string. + """ + + path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class QueryOverride(proto.Message): + r"""QueryOverride. + + Query message defines query override for HTTP targets. + + Attributes: + query_params (str): + The query parameters (e.g., + qparam1=123&qparam2=456). Default is an empty + string. + """ + + query_params: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UriOverride(proto.Message): + r"""Uri Override. + + When specified, all the HTTP tasks inside the queue will be + partially or fully overridden depending on the configured + values. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + scheme (google.cloud.tasks_v2beta2.types.UriOverride.Scheme): + Scheme override. + + When specified, the task URI scheme is replaced + by the provided value (HTTP or HTTPS). + + This field is a member of `oneof`_ ``_scheme``. + host (str): + Host override. + + When specified, replaces the host part of the task URL. For + example, if the task URL is "https://www.google.com," and + host value is set to "example.net", the overridden URI will + be changed to "https://example.net." Host value cannot be an + empty string (INVALID_ARGUMENT). + + This field is a member of `oneof`_ ``_host``. + port (int): + Port override. + + When specified, replaces the port part of the + task URI. For instance, for a URI + http://www.google.com/foo and port=123, the + overridden URI becomes + http://www.google.com:123/foo. Note that the + port value must be a positive integer. Setting + the port to 0 (Zero) clears the URI port. + + This field is a member of `oneof`_ ``_port``. + path_override (google.cloud.tasks_v2beta2.types.PathOverride): + URI path. + + When specified, replaces the existing path of + the task URL. Setting the path value to an empty + string clears the URI path segment. + query_override (google.cloud.tasks_v2beta2.types.QueryOverride): + URI Query. + + When specified, replaces the query part of the + task URI. Setting the query value to an empty + string clears the URI query segment. + uri_override_enforce_mode (google.cloud.tasks_v2beta2.types.UriOverride.UriOverrideEnforceMode): + URI Override Enforce Mode + + When specified, determines the Target + UriOverride mode. If not specified, it defaults + to ALWAYS. + """ + + class Scheme(proto.Enum): + r"""The Scheme for an HTTP request. By default, it is HTTPS. + + Values: + SCHEME_UNSPECIFIED (0): + Scheme unspecified. Defaults to HTTPS. + HTTP (1): + Convert the scheme to HTTP, e.g., + https://www.google.ca will change to + http://www.google.ca. + HTTPS (2): + Convert the scheme to HTTPS, e.g., + http://www.google.ca will change to + https://www.google.ca. + """ + SCHEME_UNSPECIFIED = 0 + HTTP = 1 + HTTPS = 2 + + class UriOverrideEnforceMode(proto.Enum): + r"""UriOverrideEnforceMode mode is to define enforcing mode for + the override modes. + + Values: + URI_OVERRIDE_ENFORCE_MODE_UNSPECIFIED (0): + OverrideMode Unspecified. Defaults to ALWAYS. + IF_NOT_EXISTS (1): + In the IF_NOT_EXISTS mode, queue-level configuration is only + applied where task-level configuration does not exist. + ALWAYS (2): + In the ALWAYS mode, queue-level configuration + overrides all task-level configuration + """ + URI_OVERRIDE_ENFORCE_MODE_UNSPECIFIED = 0 + IF_NOT_EXISTS = 1 + ALWAYS = 2 + + scheme: Scheme = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=Scheme, + ) + host: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + port: int = proto.Field( + proto.INT64, + number=3, + optional=True, + ) + path_override: "PathOverride" = proto.Field( + proto.MESSAGE, + number=4, + message="PathOverride", + ) + query_override: "QueryOverride" = proto.Field( + proto.MESSAGE, + number=5, + message="QueryOverride", + ) + uri_override_enforce_mode: UriOverrideEnforceMode = proto.Field( + proto.ENUM, + number=6, + enum=UriOverrideEnforceMode, + ) + + +class HttpTarget(proto.Message): + r"""HTTP target. + + When specified as a [Queue][target_type], all the tasks with + [HttpRequest] will be overridden according to the target. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri_override (google.cloud.tasks_v2beta2.types.UriOverride): + Uri override. + + When specified, overrides the execution Uri for + all the tasks in the queue. + http_method (google.cloud.tasks_v2beta2.types.HttpMethod): + The HTTP method to use for the request. + + When specified, it overrides + [HttpRequest][google.cloud.tasks.v2beta2.HttpTarget.http_method] + for the task. Note that if the value is set to + [HttpMethod][GET] the [HttpRequest][body] of the task will + be ignored at execution time. + header_overrides (MutableSequence[google.cloud.tasks_v2beta2.types.HttpTarget.HeaderOverride]): + HTTP target headers. + + This map contains the header field names and values. Headers + will be set when running the [task is + created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + and/or [task is + created][google.cloud.tasks.v2beta2.CloudTasks.BufferTask]. + + These headers represent a subset of the headers that will + accompany the task's HTTP request. Some HTTP request headers + will be ignored or replaced. + + A partial list of headers that will be ignored or replaced + is: + + - Any header that is prefixed with "X-CloudTasks-" will be + treated as service header. Service headers define + properties of the task and are predefined in CloudTask. + - Host: This will be computed by Cloud Tasks and derived + from + [HttpRequest.url][google.cloud.tasks.v2beta2.HttpRequest.url]. + - Content-Length: This will be computed by Cloud Tasks. + - User-Agent: This will be set to ``"Google-CloudTasks"``. + - ``X-Google-*``: Google use only. + - ``X-AppEngine-*``: Google use only. + + ``Content-Type`` won't be set by Cloud Tasks. You can + explicitly set ``Content-Type`` to a media type when the + [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/octet-stream"`` or ``"application/json"``. + + Headers which can have multiple values (according to + RFC2616) can be specified using comma-separated values. + + The size of the headers must be less than 80KB. Queue-level + headers to override headers of all the tasks in the queue. + oauth_token (google.cloud.tasks_v2beta2.types.OAuthToken): + If specified, an `OAuth + token `__ + will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization should generally only be used + when calling Google APIs hosted on \*.googleapis.com. + + This field is a member of `oneof`_ ``authorization_header``. + oidc_token (google.cloud.tasks_v2beta2.types.OidcToken): + If specified, an + `OIDC `__ + token will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend + to validate the token yourself. + + This field is a member of `oneof`_ ``authorization_header``. + """ + + class Header(proto.Message): + r"""Defines a header message. A header can have a key and a + value. + + Attributes: + key (str): + The key of the header. + value (str): + The value of the header. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + value: str = proto.Field( + proto.STRING, + number=2, + ) + + class HeaderOverride(proto.Message): + r"""Wraps the Header object. + + Attributes: + header (google.cloud.tasks_v2beta2.types.HttpTarget.Header): + header embodying a key and a value. + """ + + header: "HttpTarget.Header" = proto.Field( + proto.MESSAGE, + number=1, + message="HttpTarget.Header", + ) + + uri_override: "UriOverride" = proto.Field( + proto.MESSAGE, + number=1, + message="UriOverride", + ) + http_method: "HttpMethod" = proto.Field( + proto.ENUM, + number=2, + enum="HttpMethod", + ) + header_overrides: MutableSequence[HeaderOverride] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=HeaderOverride, + ) + oauth_token: "OAuthToken" = proto.Field( + proto.MESSAGE, + number=5, + oneof="authorization_header", + message="OAuthToken", + ) + oidc_token: "OidcToken" = proto.Field( + proto.MESSAGE, + number=6, + oneof="authorization_header", + message="OidcToken", + ) + + +class OAuthToken(proto.Message): + r"""Contains information needed for generating an `OAuth + token `__. + This type of authorization should generally only be used when + calling Google APIs hosted on \*.googleapis.com. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OAuth token. The service account + must be within the same project as the queue. The caller + must have iam.serviceAccounts.actAs permission for the + service account. + scope (str): + OAuth scope to be used for generating OAuth + access token. If not specified, + "https://www.googleapis.com/auth/cloud-platform" + will be used. + """ + + service_account_email: str = proto.Field( + proto.STRING, + number=1, + ) + scope: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OidcToken(proto.Message): + r"""Contains information needed for generating an `OpenID Connect + token `__. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the + token yourself. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OIDC token. The service account + must be within the same project as the queue. The caller + must have iam.serviceAccounts.actAs permission for the + service account. + audience (str): + Audience to be used when generating OIDC + token. If not specified, the URI specified in + target will be used. + """ + + service_account_email: str = proto.Field( + proto.STRING, + number=1, + ) + audience: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/task.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/task.py new file mode 100644 index 000000000000..3fce5b7323a2 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta2/types/task.py @@ -0,0 +1,308 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.tasks_v2beta2.types import target + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta2", + manifest={ + "Task", + "TaskStatus", + "AttemptStatus", + }, +) + + +class Task(proto.Message): + r"""A unit of scheduled work. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Optionally caller-specified in + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + + The task name. + + The task name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the task's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + - ``TASK_ID`` can contain only letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), or underscores (_). The maximum + length is 500 characters. + app_engine_http_request (google.cloud.tasks_v2beta2.types.AppEngineHttpRequest): + App Engine HTTP request that is sent to the task's target. + Can be set only if + [app_engine_http_target][google.cloud.tasks.v2beta2.Queue.app_engine_http_target] + is set on the queue. + + An App Engine task is a task that has + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] + set. + + This field is a member of `oneof`_ ``payload_type``. + pull_message (google.cloud.tasks_v2beta2.types.PullMessage): + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + to process the task. Can be set only if + [pull_target][google.cloud.tasks.v2beta2.Queue.pull_target] + is set on the queue. + + A pull task is a task that has + [PullMessage][google.cloud.tasks.v2beta2.PullMessage] set. + + This field is a member of `oneof`_ ``payload_type``. + http_request (google.cloud.tasks_v2beta2.types.HttpRequest): + HTTP request that is sent to the task's target. + + An HTTP task is a task that has + [HttpRequest][google.cloud.tasks.v2beta2.HttpRequest] set. + + This field is a member of `oneof`_ ``payload_type``. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the task is scheduled to be attempted. + + For App Engine queues, this is when the task will be + attempted or retried. + + For pull queues, this is the time when the task is available + to be leased; if a task is currently leased, this is the + time when the current lease expires, that is, the time that + the task was leased plus the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + ``schedule_time`` will be truncated to the nearest + microsecond. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that the task was created. + + ``create_time`` will be truncated to the nearest second. + status (google.cloud.tasks_v2beta2.types.TaskStatus): + Output only. The task status. + view (google.cloud.tasks_v2beta2.types.Task.View): + Output only. The view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] has been returned. + """ + + class View(proto.Enum): + r"""The view specifies a subset of + [Task][google.cloud.tasks.v2beta2.Task] data. + + When a task is returned in a response, not all information is + retrieved by default because some data, such as payloads, might be + desirable to return only when needed because of its large size or + because of the sensitivity of data that it contains. + + Values: + VIEW_UNSPECIFIED (0): + Unspecified. Defaults to BASIC. + BASIC (1): + The basic view omits fields which can be large or can + contain sensitive data. + + This view does not include the ([payload in + AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] + and [payload in + PullMessage][google.cloud.tasks.v2beta2.PullMessage.payload]). + These payloads are desirable to return only when needed, + because they can be large and because of the sensitivity of + the data that you choose to store in it. + FULL (2): + All information is returned. + + Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Queue][google.cloud.tasks.v2beta2.Queue] resource. + """ + VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + app_engine_http_request: target.AppEngineHttpRequest = proto.Field( + proto.MESSAGE, + number=3, + oneof="payload_type", + message=target.AppEngineHttpRequest, + ) + pull_message: target.PullMessage = proto.Field( + proto.MESSAGE, + number=4, + oneof="payload_type", + message=target.PullMessage, + ) + http_request: target.HttpRequest = proto.Field( + proto.MESSAGE, + number=13, + oneof="payload_type", + message=target.HttpRequest, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + status: "TaskStatus" = proto.Field( + proto.MESSAGE, + number=7, + message="TaskStatus", + ) + view: View = proto.Field( + proto.ENUM, + number=8, + enum=View, + ) + + +class TaskStatus(proto.Message): + r"""Status of the task. + + Attributes: + attempt_dispatch_count (int): + Output only. The number of attempts + dispatched. + This count includes attempts which have been + dispatched but haven't received a response. + attempt_response_count (int): + Output only. The number of attempts which have received a + response. + + This field is not calculated for [pull + tasks][google.cloud.tasks.v2beta2.PullMessage]. + first_attempt_status (google.cloud.tasks_v2beta2.types.AttemptStatus): + Output only. The status of the task's first attempt. + + Only + [dispatch_time][google.cloud.tasks.v2beta2.AttemptStatus.dispatch_time] + will be set. The other + [AttemptStatus][google.cloud.tasks.v2beta2.AttemptStatus] + information is not retained by Cloud Tasks. + + This field is not calculated for [pull + tasks][google.cloud.tasks.v2beta2.PullMessage]. + last_attempt_status (google.cloud.tasks_v2beta2.types.AttemptStatus): + Output only. The status of the task's last attempt. + + This field is not calculated for [pull + tasks][google.cloud.tasks.v2beta2.PullMessage]. + """ + + attempt_dispatch_count: int = proto.Field( + proto.INT32, + number=1, + ) + attempt_response_count: int = proto.Field( + proto.INT32, + number=2, + ) + first_attempt_status: "AttemptStatus" = proto.Field( + proto.MESSAGE, + number=3, + message="AttemptStatus", + ) + last_attempt_status: "AttemptStatus" = proto.Field( + proto.MESSAGE, + number=4, + message="AttemptStatus", + ) + + +class AttemptStatus(proto.Message): + r"""The status of a task attempt. + + Attributes: + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt was scheduled. + + ``schedule_time`` will be truncated to the nearest + microsecond. + dispatch_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt was dispatched. + + ``dispatch_time`` will be truncated to the nearest + microsecond. + response_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt response was + received. + + ``response_time`` will be truncated to the nearest + microsecond. + response_status (google.rpc.status_pb2.Status): + Output only. The response from the target for + this attempt. + If the task has not been attempted or the task + is currently running then the response status is + unset. + """ + + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + dispatch_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + response_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + response_status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/__init__.py new file mode 100644 index 000000000000..d66d7b53db1f --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/__init__.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.tasks_v2beta3 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.cloud_tasks import CloudTasksAsyncClient, CloudTasksClient +from .types.cloudtasks import ( + BufferTaskRequest, + BufferTaskResponse, + CreateQueueRequest, + CreateTaskRequest, + DeleteQueueRequest, + DeleteTaskRequest, + GetQueueRequest, + GetTaskRequest, + ListQueuesRequest, + ListQueuesResponse, + ListTasksRequest, + ListTasksResponse, + PauseQueueRequest, + PurgeQueueRequest, + ResumeQueueRequest, + RunTaskRequest, + UpdateQueueRequest, +) +from .types.queue import ( + Queue, + QueueStats, + RateLimits, + RetryConfig, + StackdriverLoggingConfig, +) +from .types.target import ( + AppEngineHttpQueue, + AppEngineHttpRequest, + AppEngineRouting, + HttpMethod, + HttpRequest, + HttpTarget, + OAuthToken, + OidcToken, + PathOverride, + PullMessage, + QueryOverride, + UriOverride, +) +from .types.task import Attempt, Task + +__all__ = ( + "CloudTasksAsyncClient", + "AppEngineHttpQueue", + "AppEngineHttpRequest", + "AppEngineRouting", + "Attempt", + "BufferTaskRequest", + "BufferTaskResponse", + "CloudTasksClient", + "CreateQueueRequest", + "CreateTaskRequest", + "DeleteQueueRequest", + "DeleteTaskRequest", + "GetQueueRequest", + "GetTaskRequest", + "HttpMethod", + "HttpRequest", + "HttpTarget", + "ListQueuesRequest", + "ListQueuesResponse", + "ListTasksRequest", + "ListTasksResponse", + "OAuthToken", + "OidcToken", + "PathOverride", + "PauseQueueRequest", + "PullMessage", + "PurgeQueueRequest", + "QueryOverride", + "Queue", + "QueueStats", + "RateLimits", + "ResumeQueueRequest", + "RetryConfig", + "RunTaskRequest", + "StackdriverLoggingConfig", + "Task", + "UpdateQueueRequest", + "UriOverride", +) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_metadata.json b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_metadata.json new file mode 100644 index 000000000000..13303a87bd1b --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_metadata.json @@ -0,0 +1,283 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.tasks_v2beta3", + "protoPackage": "google.cloud.tasks.v2beta3", + "schema": "1.0", + "services": { + "CloudTasks": { + "clients": { + "grpc": { + "libraryClient": "CloudTasksClient", + "rpcs": { + "BufferTask": { + "methods": [ + "buffer_task" + ] + }, + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudTasksAsyncClient", + "rpcs": { + "BufferTask": { + "methods": [ + "buffer_task" + ] + }, + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + } + } + }, + "rest": { + "libraryClient": "CloudTasksClient", + "rpcs": { + "BufferTask": { + "methods": [ + "buffer_task" + ] + }, + "CreateQueue": { + "methods": [ + "create_queue" + ] + }, + "CreateTask": { + "methods": [ + "create_task" + ] + }, + "DeleteQueue": { + "methods": [ + "delete_queue" + ] + }, + "DeleteTask": { + "methods": [ + "delete_task" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetQueue": { + "methods": [ + "get_queue" + ] + }, + "GetTask": { + "methods": [ + "get_task" + ] + }, + "ListQueues": { + "methods": [ + "list_queues" + ] + }, + "ListTasks": { + "methods": [ + "list_tasks" + ] + }, + "PauseQueue": { + "methods": [ + "pause_queue" + ] + }, + "PurgeQueue": { + "methods": [ + "purge_queue" + ] + }, + "ResumeQueue": { + "methods": [ + "resume_queue" + ] + }, + "RunTask": { + "methods": [ + "run_task" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateQueue": { + "methods": [ + "update_queue" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_version.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_version.py new file mode 100644 index 000000000000..3344051a673d --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.14.2" # {x-release-please-version} diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/py.typed b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/py.typed new file mode 100644 index 000000000000..41f0b1b8d473 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-tasks package uses inline types. diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/__init__.py new file mode 100644 index 000000000000..eddc5977fd56 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import CloudTasksAsyncClient +from .client import CloudTasksClient + +__all__ = ( + "CloudTasksClient", + "CloudTasksAsyncClient", +) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py new file mode 100644 index 000000000000..13ae750b733d --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/async_client.py @@ -0,0 +1,2528 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2beta3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api import httpbody_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.tasks_v2beta3.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import target +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task + +from .client import CloudTasksClient +from .transports.base import DEFAULT_CLIENT_INFO, CloudTasksTransport +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport + + +class CloudTasksAsyncClient: + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + _client: CloudTasksClient + + DEFAULT_ENDPOINT = CloudTasksClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudTasksClient.DEFAULT_MTLS_ENDPOINT + + queue_path = staticmethod(CloudTasksClient.queue_path) + parse_queue_path = staticmethod(CloudTasksClient.parse_queue_path) + task_path = staticmethod(CloudTasksClient.task_path) + parse_task_path = staticmethod(CloudTasksClient.parse_task_path) + common_billing_account_path = staticmethod( + CloudTasksClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudTasksClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(CloudTasksClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudTasksClient.parse_common_folder_path) + common_organization_path = staticmethod(CloudTasksClient.common_organization_path) + parse_common_organization_path = staticmethod( + CloudTasksClient.parse_common_organization_path + ) + common_project_path = staticmethod(CloudTasksClient.common_project_path) + parse_common_project_path = staticmethod(CloudTasksClient.parse_common_project_path) + common_location_path = staticmethod(CloudTasksClient.common_location_path) + parse_common_location_path = staticmethod( + CloudTasksClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksAsyncClient: The constructed client. + """ + return CloudTasksClient.from_service_account_info.__func__(CloudTasksAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksAsyncClient: The constructed client. + """ + return CloudTasksClient.from_service_account_file.__func__(CloudTasksAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CloudTasksClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CloudTasksTransport: + """Returns the transport used by the client instance. + + Returns: + CloudTasksTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(CloudTasksClient).get_transport_class, type(CloudTasksClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CloudTasksTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CloudTasksClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_queues( + self, + request: Optional[Union[cloudtasks.ListQueuesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesAsyncPager: + r"""Lists queues. + + Queues are returned in lexicographical order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + async def sample_list_queues(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta3.ListQueuesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_queues(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta3.types.ListQueuesRequest, dict]]): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + parent (:class:`str`): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.services.cloud_tasks.pagers.ListQueuesAsyncPager: + Response message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ListQueuesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_queues, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListQueuesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_queue( + self, + request: Optional[Union[cloudtasks.GetQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + async def sample_get_queue(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta3.GetQueueRequest( + name="name_value", + ) + + # Make the request + response = await client.get_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta3.types.GetQueueRequest, dict]]): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. + name (:class:`str`): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.GetQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_queue( + self, + request: Optional[Union[cloudtasks.CreateQueueRequest, dict]] = None, + *, + parent: Optional[str] = None, + queue: Optional[gct_queue.Queue] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + async def sample_create_queue(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta3.CreateQueueRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta3.types.CreateQueueRequest, dict]]): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. + parent (:class:`str`): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (:class:`google.cloud.tasks_v2beta3.types.Queue`): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta3.Queue.name] + cannot be the same as an existing queue. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.CreateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_queue( + self, + request: Optional[Union[cloudtasks.UpdateQueueRequest, dict]] = None, + *, + queue: Optional[gct_queue.Queue] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + async def sample_update_queue(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta3.UpdateQueueRequest( + ) + + # Make the request + response = await client.update_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta3.types.UpdateQueueRequest, dict]]): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. + queue (:class:`google.cloud.tasks_v2beta3.types.Queue`): + Required. The queue to create or update. + + The queue's + [name][google.cloud.tasks.v2beta3.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2beta3.Queue.name] cannot be + changed. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.UpdateQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("queue.name", request.queue.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_queue( + self, + request: Optional[Union[cloudtasks.DeleteQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + async def sample_delete_queue(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta3.DeleteQueueRequest( + name="name_value", + ) + + # Make the request + await client.delete_queue(request=request) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta3.types.DeleteQueueRequest, dict]]): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.DeleteQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def purge_queue( + self, + request: Optional[Union[cloudtasks.PurgeQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + + All tasks created before this method is called are + permanently deleted. + + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + async def sample_purge_queue(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta3.PurgeQueueRequest( + name="name_value", + ) + + # Make the request + response = await client.purge_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta3.types.PurgeQueueRequest, dict]]): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.PurgeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.purge_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def pause_queue( + self, + request: Optional[Union[cloudtasks.PauseQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + async def sample_pause_queue(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta3.PauseQueueRequest( + name="name_value", + ) + + # Make the request + response = await client.pause_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta3.types.PauseQueueRequest, dict]]): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.PauseQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pause_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def resume_queue( + self, + request: Optional[Union[cloudtasks.ResumeQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta3.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + async def sample_resume_queue(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta3.ResumeQueueRequest( + name="name_value", + ) + + # Make the request + response = await client.resume_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta3.types.ResumeQueueRequest, dict]]): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + name (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ResumeQueueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_queue, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest( + resource=resource, + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`MutableSequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, + permissions=permissions, + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_tasks( + self, + request: Optional[Union[cloudtasks.ListTasksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksAsyncPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + async def sample_list_tasks(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta3.ListTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tasks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta3.types.ListTasksRequest, dict]]): + The request object. Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.services.cloud_tasks.pagers.ListTasksAsyncPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.ListTasksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tasks, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTasksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_task( + self, + request: Optional[Union[cloudtasks.GetTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + async def sample_get_task(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta3.GetTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.get_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta3.types.GetTaskRequest, dict]]): + The request object. Request message for getting a task using + [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.GetTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_task( + self, + request: Optional[Union[cloudtasks.CreateTaskRequest, dict]] = None, + *, + parent: Optional[str] = None, + task: Optional[gct_task.Task] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + async def sample_create_task(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta3.CreateTaskRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta3.types.CreateTaskRequest, dict]]): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + parent (:class:`str`): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (:class:`google.cloud.tasks_v2beta3.types.Task`): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta3.Task.name]. If a name + is not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2beta3.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or executed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1 hour + after the original task was deleted or executed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9 days after the original task was deleted or + executed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.CreateTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_task, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_task( + self, + request: Optional[Union[cloudtasks.DeleteTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + async def sample_delete_task(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta3.DeleteTaskRequest( + name="name_value", + ) + + # Make the request + await client.delete_task(request=request) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta3.types.DeleteTaskRequest, dict]]): + The request object. Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.DeleteTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def run_task( + self, + request: Optional[Union[cloudtasks.RunTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + async def sample_run_task(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta3.RunTaskRequest( + name="name_value", + ) + + # Make the request + response = await client.run_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta3.types.RunTaskRequest, dict]]): + The request object. Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. + name (:class:`str`): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.RunTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.run_task, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def buffer_task( + self, + request: Optional[Union[cloudtasks.BufferTaskRequest, dict]] = None, + *, + queue: Optional[str] = None, + task_id: Optional[str] = None, + body: Optional[httpbody_pb2.HttpBody] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.BufferTaskResponse: + r"""Creates and buffers a new task without the need to explicitly + define a Task message. The queue must have [HTTP + target][google.cloud.tasks.v2beta3.HttpTarget]. To create the + task with a custom ID, use the following format and set TASK_ID + to your desired ID: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer + To create the task with an automatically generated ID, use the + following format: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. + Note: This feature is in its experimental stage. You must + request access to the API through the `Cloud Tasks BufferTask + Experiment Signup form `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + async def sample_buffer_task(): + # Create a client + client = tasks_v2beta3.CloudTasksAsyncClient() + + # Initialize request argument(s) + request = tasks_v2beta3.BufferTaskRequest( + queue="queue_value", + ) + + # Make the request + response = await client.buffer_task(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.tasks_v2beta3.types.BufferTaskRequest, dict]]): + The request object. Request message for + [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. + queue (:class:`str`): + Required. The parent queue name. For example: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID\` + + The queue must already exist. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task_id (:class:`str`): + Optional. Task ID for the task being + created. If not provided, a random task + ID is assigned to the task. + + This corresponds to the ``task_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + body (:class:`google.api.httpbody_pb2.HttpBody`): + Optional. Body of the HTTP request. + + The body can take any generic value. The value is + written to the [HttpRequest][payload] of the [Task]. + + This corresponds to the ``body`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.BufferTaskResponse: + Response message for + [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, task_id, body]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloudtasks.BufferTaskRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if task_id is not None: + request.task_id = task_id + if body is not None: + request.body = body + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.buffer_task, + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("queue", request.queue), + ("task_id", request.task_id), + ) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "CloudTasksAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CloudTasksAsyncClient",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py new file mode 100644 index 000000000000..568dfe69a331 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/client.py @@ -0,0 +1,2702 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.tasks_v2beta3 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api import httpbody_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.tasks_v2beta3.services.cloud_tasks import pagers +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import target +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task + +from .transports.base import DEFAULT_CLIENT_INFO, CloudTasksTransport +from .transports.grpc import CloudTasksGrpcTransport +from .transports.grpc_asyncio import CloudTasksGrpcAsyncIOTransport +from .transports.rest import CloudTasksRestTransport + + +class CloudTasksClientMeta(type): + """Metaclass for the CloudTasks client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] + _transport_registry["grpc"] = CloudTasksGrpcTransport + _transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport + _transport_registry["rest"] = CloudTasksRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CloudTasksTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudTasksClient(metaclass=CloudTasksClientMeta): + """Cloud Tasks allows developers to manage the execution of + background work in their applications. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudtasks.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudTasksClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudTasksTransport: + """Returns the transport used by the client instance. + + Returns: + CloudTasksTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def queue_path( + project: str, + location: str, + queue: str, + ) -> str: + """Returns a fully-qualified queue string.""" + return "projects/{project}/locations/{location}/queues/{queue}".format( + project=project, + location=location, + queue=queue, + ) + + @staticmethod + def parse_queue_path(path: str) -> Dict[str, str]: + """Parses a queue path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def task_path( + project: str, + location: str, + queue: str, + task: str, + ) -> str: + """Returns a fully-qualified task string.""" + return "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format( + project=project, + location=location, + queue=queue, + task=task, + ) + + @staticmethod + def parse_task_path(path: str) -> Dict[str, str]: + """Parses a task path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/queues/(?P.+?)/tasks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudTasksTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud tasks client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CloudTasksTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudTasksTransport): + # transport is a CloudTasksTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_queues( + self, + request: Optional[Union[cloudtasks.ListQueuesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListQueuesPager: + r"""Lists queues. + + Queues are returned in lexicographical order. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + def sample_list_queues(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta3.ListQueuesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_queues(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta3.types.ListQueuesRequest, dict]): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.services.cloud_tasks.pagers.ListQueuesPager: + Response message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListQueuesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListQueuesRequest): + request = cloudtasks.ListQueuesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_queues] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListQueuesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_queue( + self, + request: Optional[Union[cloudtasks.GetQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Gets a queue. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + def sample_get_queue(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta3.GetQueueRequest( + name="name_value", + ) + + # Make the request + response = client.get_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta3.types.GetQueueRequest, dict]): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetQueueRequest): + request = cloudtasks.GetQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_queue( + self, + request: Optional[Union[cloudtasks.CreateQueueRequest, dict]] = None, + *, + parent: Optional[str] = None, + queue: Optional[gct_queue.Queue] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + def sample_create_queue(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta3.CreateQueueRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta3.types.CreateQueueRequest, dict]): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + queue (google.cloud.tasks_v2beta3.types.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta3.Queue.name] + cannot be the same as an existing queue. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, queue]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateQueueRequest): + request = cloudtasks.CreateQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if queue is not None: + request.queue = queue + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_queue( + self, + request: Optional[Union[cloudtasks.UpdateQueueRequest, dict]] = None, + *, + queue: Optional[gct_queue.Queue] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + def sample_update_queue(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta3.UpdateQueueRequest( + ) + + # Make the request + response = client.update_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta3.types.UpdateQueueRequest, dict]): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. + queue (google.cloud.tasks_v2beta3.types.Queue): + Required. The queue to create or update. + + The queue's + [name][google.cloud.tasks.v2beta3.Queue.name] must be + specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be + ignored. The queue's + [name][google.cloud.tasks.v2beta3.Queue.name] cannot be + changed. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields + of the queue are being updated. + If empty, then all fields will be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.UpdateQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.UpdateQueueRequest): + request = cloudtasks.UpdateQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("queue.name", request.queue.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_queue( + self, + request: Optional[Union[cloudtasks.DeleteQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + def sample_delete_queue(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta3.DeleteQueueRequest( + name="name_value", + ) + + # Make the request + client.delete_queue(request=request) + + Args: + request (Union[google.cloud.tasks_v2beta3.types.DeleteQueueRequest, dict]): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteQueueRequest): + request = cloudtasks.DeleteQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def purge_queue( + self, + request: Optional[Union[cloudtasks.PurgeQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Purges a queue by deleting all of its tasks. + + All tasks created before this method is called are + permanently deleted. + + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + def sample_purge_queue(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta3.PurgeQueueRequest( + name="name_value", + ) + + # Make the request + response = client.purge_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta3.types.PurgeQueueRequest, dict]): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PurgeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PurgeQueueRequest): + request = cloudtasks.PurgeQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def pause_queue( + self, + request: Optional[Union[cloudtasks.PauseQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + def sample_pause_queue(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta3.PauseQueueRequest( + name="name_value", + ) + + # Make the request + response = client.pause_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta3.types.PauseQueueRequest, dict]): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.PauseQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.PauseQueueRequest): + request = cloudtasks.PauseQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pause_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def resume_queue( + self, + request: Optional[Union[cloudtasks.ResumeQueueRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta3.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + def sample_resume_queue(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta3.ResumeQueueRequest( + name="name_value", + ) + + # Make the request + response = client.resume_queue(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta3.types.ResumeQueueRequest, dict]): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ResumeQueueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ResumeQueueRequest): + request = cloudtasks.ResumeQueueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_queue] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + resource (str): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (MutableSequence[str]): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + if resource is not None: + request.resource = resource + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_tasks( + self, + request: Optional[Union[cloudtasks.ListTasksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTasksPager: + r"""Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + def sample_list_tasks(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta3.ListTasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tasks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta3.types.ListTasksRequest, dict]): + The request object. Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.services.cloud_tasks.pagers.ListTasksPager: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.ListTasksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.ListTasksRequest): + request = cloudtasks.ListTasksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tasks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTasksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_task( + self, + request: Optional[Union[cloudtasks.GetTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Gets a task. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + def sample_get_task(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta3.GetTaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta3.types.GetTaskRequest, dict]): + The request object. Request message for getting a task using + [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.GetTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.GetTaskRequest): + request = cloudtasks.GetTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_task( + self, + request: Optional[Union[cloudtasks.CreateTaskRequest, dict]] = None, + *, + parent: Optional[str] = None, + task: Optional[gct_task.Task] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + def sample_create_task(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta3.CreateTaskRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta3.types.CreateTaskRequest, dict]): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task (google.cloud.tasks_v2beta3.types.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta3.Task.name]. If a name + is not specified then the system will generate a random + unique task id, which will be set in the task returned + in the [response][google.cloud.tasks.v2beta3.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set + it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task + de-duplication. If a task's ID is identical to that of + an existing task or a task that was deleted or executed + recently then the call will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1 hour + after the original task was deleted or executed. If the + task's queue was created using queue.yaml or queue.xml, + then another task with the same name can't be created + for ~9 days after the original task was deleted or + executed. + + Because there is an extra lookup cost to identify + duplicate task names, these + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id + is recommended. Choosing task ids that are sequential or + have sequential prefixes, for example using a timestamp, + causes an increase in latency and error rates in all + task commands. The infrastructure relies on an + approximately uniform distribution of task ids to store + and serve tasks efficiently. + + This corresponds to the ``task`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, task]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.CreateTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.CreateTaskRequest): + request = cloudtasks.CreateTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if task is not None: + request.task = task + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_task( + self, + request: Optional[Union[cloudtasks.DeleteTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a task. + + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + def sample_delete_task(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta3.DeleteTaskRequest( + name="name_value", + ) + + # Make the request + client.delete_task(request=request) + + Args: + request (Union[google.cloud.tasks_v2beta3.types.DeleteTaskRequest, dict]): + The request object. Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.DeleteTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.DeleteTaskRequest): + request = cloudtasks.DeleteTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def run_task( + self, + request: Optional[Union[cloudtasks.RunTaskRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + def sample_run_task(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta3.RunTaskRequest( + name="name_value", + ) + + # Make the request + response = client.run_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta3.types.RunTaskRequest, dict]): + The request object. Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.Task: + A unit of scheduled work. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.RunTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.RunTaskRequest): + request = cloudtasks.RunTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def buffer_task( + self, + request: Optional[Union[cloudtasks.BufferTaskRequest, dict]] = None, + *, + queue: Optional[str] = None, + task_id: Optional[str] = None, + body: Optional[httpbody_pb2.HttpBody] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.BufferTaskResponse: + r"""Creates and buffers a new task without the need to explicitly + define a Task message. The queue must have [HTTP + target][google.cloud.tasks.v2beta3.HttpTarget]. To create the + task with a custom ID, use the following format and set TASK_ID + to your desired ID: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer + To create the task with an automatically generated ID, use the + following format: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. + Note: This feature is in its experimental stage. You must + request access to the API through the `Cloud Tasks BufferTask + Experiment Signup form `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import tasks_v2beta3 + + def sample_buffer_task(): + # Create a client + client = tasks_v2beta3.CloudTasksClient() + + # Initialize request argument(s) + request = tasks_v2beta3.BufferTaskRequest( + queue="queue_value", + ) + + # Make the request + response = client.buffer_task(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.tasks_v2beta3.types.BufferTaskRequest, dict]): + The request object. Request message for + [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. + queue (str): + Required. The parent queue name. For example: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID\` + + The queue must already exist. + + This corresponds to the ``queue`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + task_id (str): + Optional. Task ID for the task being + created. If not provided, a random task + ID is assigned to the task. + + This corresponds to the ``task_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + body (google.api.httpbody_pb2.HttpBody): + Optional. Body of the HTTP request. + + The body can take any generic value. The value is + written to the [HttpRequest][payload] of the [Task]. + + This corresponds to the ``body`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.tasks_v2beta3.types.BufferTaskResponse: + Response message for + [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([queue, task_id, body]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloudtasks.BufferTaskRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudtasks.BufferTaskRequest): + request = cloudtasks.BufferTaskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if queue is not None: + request.queue = queue + if task_id is not None: + request.task_id = task_id + if body is not None: + request.body = body + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.buffer_task] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("queue", request.queue), + ("task_id", request.task_id), + ) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CloudTasksClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CloudTasksClient",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py new file mode 100644 index 000000000000..207e1a7de49a --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/pagers.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.tasks_v2beta3.types import cloudtasks, queue, task + + +class ListQueuesPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta3.types.ListQueuesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta3.types.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudtasks.ListQueuesResponse], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta3.types.ListQueuesRequest): + The initial request object. + response (google.cloud.tasks_v2beta3.types.ListQueuesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[queue.Queue]: + for page in self.pages: + yield from page.queues + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListQueuesAsyncPager: + """A pager for iterating through ``list_queues`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta3.types.ListQueuesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``queues`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListQueues`` requests and continue to iterate + through the ``queues`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta3.types.ListQueuesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudtasks.ListQueuesResponse]], + request: cloudtasks.ListQueuesRequest, + response: cloudtasks.ListQueuesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta3.types.ListQueuesRequest): + The initial request object. + response (google.cloud.tasks_v2beta3.types.ListQueuesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListQueuesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudtasks.ListQueuesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[queue.Queue]: + async def async_generator(): + async for page in self.pages: + for response in page.queues: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTasksPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta3.types.ListTasksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta3.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cloudtasks.ListTasksResponse], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta3.types.ListTasksRequest): + The initial request object. + response (google.cloud.tasks_v2beta3.types.ListTasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[task.Task]: + for page in self.pages: + yield from page.tasks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTasksAsyncPager: + """A pager for iterating through ``list_tasks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.tasks_v2beta3.types.ListTasksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tasks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTasks`` requests and continue to iterate + through the ``tasks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.tasks_v2beta3.types.ListTasksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cloudtasks.ListTasksResponse]], + request: cloudtasks.ListTasksRequest, + response: cloudtasks.ListTasksResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.tasks_v2beta3.types.ListTasksRequest): + The initial request object. + response (google.cloud.tasks_v2beta3.types.ListTasksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloudtasks.ListTasksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloudtasks.ListTasksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[task.Task]: + async def async_generator(): + async for page in self.pages: + for response in page.tasks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py new file mode 100644 index 000000000000..0a9dffaab4fe --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudTasksTransport +from .grpc import CloudTasksGrpcTransport +from .grpc_asyncio import CloudTasksGrpcAsyncIOTransport +from .rest import CloudTasksRestInterceptor, CloudTasksRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudTasksTransport]] +_transport_registry["grpc"] = CloudTasksGrpcTransport +_transport_registry["grpc_asyncio"] = CloudTasksGrpcAsyncIOTransport +_transport_registry["rest"] = CloudTasksRestTransport + +__all__ = ( + "CloudTasksTransport", + "CloudTasksGrpcTransport", + "CloudTasksGrpcAsyncIOTransport", + "CloudTasksRestTransport", + "CloudTasksRestInterceptor", +) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py new file mode 100644 index 000000000000..cd7b0f657c98 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/base.py @@ -0,0 +1,480 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.tasks_v2beta3 import gapic_version as package_version +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class CloudTasksTransport(abc.ABC): + """Abstract transport class for CloudTasks.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "cloudtasks.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_queues: gapic_v1.method.wrap_method( + self.list_queues, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_queue: gapic_v1.method.wrap_method( + self.get_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_queue: gapic_v1.method.wrap_method( + self.create_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.update_queue: gapic_v1.method.wrap_method( + self.update_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.delete_queue: gapic_v1.method.wrap_method( + self.delete_queue, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.purge_queue: gapic_v1.method.wrap_method( + self.purge_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.pause_queue: gapic_v1.method.wrap_method( + self.pause_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.resume_queue: gapic_v1.method.wrap_method( + self.resume_queue, + default_timeout=20.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=20.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_tasks: gapic_v1.method.wrap_method( + self.list_tasks, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_task: gapic_v1.method.wrap_method( + self.get_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_task: gapic_v1.method.wrap_method( + self.create_task, + default_timeout=20.0, + client_info=client_info, + ), + self.delete_task: gapic_v1.method.wrap_method( + self.delete_task, + default_retry=retries.Retry( + initial=0.1, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=20.0, + ), + default_timeout=20.0, + client_info=client_info, + ), + self.run_task: gapic_v1.method.wrap_method( + self.run_task, + default_timeout=20.0, + client_info=client_info, + ), + self.buffer_task: gapic_v1.method.wrap_method( + self.buffer_task, + default_timeout=20.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_queues( + self, + ) -> Callable[ + [cloudtasks.ListQueuesRequest], + Union[cloudtasks.ListQueuesResponse, Awaitable[cloudtasks.ListQueuesResponse]], + ]: + raise NotImplementedError() + + @property + def get_queue( + self, + ) -> Callable[ + [cloudtasks.GetQueueRequest], Union[queue.Queue, Awaitable[queue.Queue]] + ]: + raise NotImplementedError() + + @property + def create_queue( + self, + ) -> Callable[ + [cloudtasks.CreateQueueRequest], + Union[gct_queue.Queue, Awaitable[gct_queue.Queue]], + ]: + raise NotImplementedError() + + @property + def update_queue( + self, + ) -> Callable[ + [cloudtasks.UpdateQueueRequest], + Union[gct_queue.Queue, Awaitable[gct_queue.Queue]], + ]: + raise NotImplementedError() + + @property + def delete_queue( + self, + ) -> Callable[ + [cloudtasks.DeleteQueueRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def purge_queue( + self, + ) -> Callable[ + [cloudtasks.PurgeQueueRequest], Union[queue.Queue, Awaitable[queue.Queue]] + ]: + raise NotImplementedError() + + @property + def pause_queue( + self, + ) -> Callable[ + [cloudtasks.PauseQueueRequest], Union[queue.Queue, Awaitable[queue.Queue]] + ]: + raise NotImplementedError() + + @property + def resume_queue( + self, + ) -> Callable[ + [cloudtasks.ResumeQueueRequest], Union[queue.Queue, Awaitable[queue.Queue]] + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_tasks( + self, + ) -> Callable[ + [cloudtasks.ListTasksRequest], + Union[cloudtasks.ListTasksResponse, Awaitable[cloudtasks.ListTasksResponse]], + ]: + raise NotImplementedError() + + @property + def get_task( + self, + ) -> Callable[[cloudtasks.GetTaskRequest], Union[task.Task, Awaitable[task.Task]]]: + raise NotImplementedError() + + @property + def create_task( + self, + ) -> Callable[ + [cloudtasks.CreateTaskRequest], Union[gct_task.Task, Awaitable[gct_task.Task]] + ]: + raise NotImplementedError() + + @property + def delete_task( + self, + ) -> Callable[ + [cloudtasks.DeleteTaskRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def run_task( + self, + ) -> Callable[[cloudtasks.RunTaskRequest], Union[task.Task, Awaitable[task.Task]]]: + raise NotImplementedError() + + @property + def buffer_task( + self, + ) -> Callable[ + [cloudtasks.BufferTaskRequest], + Union[cloudtasks.BufferTaskResponse, Awaitable[cloudtasks.BufferTaskResponse]], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("CloudTasksTransport",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py new file mode 100644 index 000000000000..22ae3cf0f0a1 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc.py @@ -0,0 +1,861 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task + +from .base import DEFAULT_CLIENT_INFO, CloudTasksTransport + + +class CloudTasksGrpcTransport(CloudTasksTransport): + """gRPC backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_queues( + self, + ) -> Callable[[cloudtasks.ListQueuesRequest], cloudtasks.ListQueuesResponse]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + ~.ListQueuesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_queues" not in self._stubs: + self._stubs["list_queues"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/ListQueues", + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs["list_queues"] + + @property + def get_queue(self) -> Callable[[cloudtasks.GetQueueRequest], queue.Queue]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_queue" not in self._stubs: + self._stubs["get_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/GetQueue", + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["get_queue"] + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], gct_queue.Queue]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_queue" not in self._stubs: + self._stubs["create_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/CreateQueue", + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["create_queue"] + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], gct_queue.Queue]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_queue" not in self._stubs: + self._stubs["update_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/UpdateQueue", + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["update_queue"] + + @property + def delete_queue( + self, + ) -> Callable[[cloudtasks.DeleteQueueRequest], empty_pb2.Empty]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_queue" not in self._stubs: + self._stubs["delete_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/DeleteQueue", + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_queue"] + + @property + def purge_queue(self) -> Callable[[cloudtasks.PurgeQueueRequest], queue.Queue]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + + All tasks created before this method is called are + permanently deleted. + + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_queue" not in self._stubs: + self._stubs["purge_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/PurgeQueue", + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["purge_queue"] + + @property + def pause_queue(self) -> Callable[[cloudtasks.PauseQueueRequest], queue.Queue]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_queue" not in self._stubs: + self._stubs["pause_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/PauseQueue", + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["pause_queue"] + + @property + def resume_queue(self) -> Callable[[cloudtasks.ResumeQueueRequest], queue.Queue]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta3.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + ~.Queue]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_queue" not in self._stubs: + self._stubs["resume_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/ResumeQueue", + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["resume_queue"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def list_tasks( + self, + ) -> Callable[[cloudtasks.ListTasksRequest], cloudtasks.ListTasksResponse]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + ~.ListTasksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tasks" not in self._stubs: + self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/ListTasks", + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs["list_tasks"] + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], task.Task]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_task" not in self._stubs: + self._stubs["get_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/GetTask", + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["get_task"] + + @property + def create_task(self) -> Callable[[cloudtasks.CreateTaskRequest], gct_task.Task]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Returns: + Callable[[~.CreateTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_task" not in self._stubs: + self._stubs["create_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/CreateTask", + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs["create_task"] + + @property + def delete_task(self) -> Callable[[cloudtasks.DeleteTaskRequest], empty_pb2.Empty]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_task" not in self._stubs: + self._stubs["delete_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/DeleteTask", + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_task"] + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], task.Task]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Returns: + Callable[[~.RunTaskRequest], + ~.Task]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_task" not in self._stubs: + self._stubs["run_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/RunTask", + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["run_task"] + + @property + def buffer_task( + self, + ) -> Callable[[cloudtasks.BufferTaskRequest], cloudtasks.BufferTaskResponse]: + r"""Return a callable for the buffer task method over gRPC. + + Creates and buffers a new task without the need to explicitly + define a Task message. The queue must have [HTTP + target][google.cloud.tasks.v2beta3.HttpTarget]. To create the + task with a custom ID, use the following format and set TASK_ID + to your desired ID: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer + To create the task with an automatically generated ID, use the + following format: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. + Note: This feature is in its experimental stage. You must + request access to the API through the `Cloud Tasks BufferTask + Experiment Signup form `__. + + Returns: + Callable[[~.BufferTaskRequest], + ~.BufferTaskResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "buffer_task" not in self._stubs: + self._stubs["buffer_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/BufferTask", + request_serializer=cloudtasks.BufferTaskRequest.serialize, + response_deserializer=cloudtasks.BufferTaskResponse.deserialize, + ) + return self._stubs["buffer_task"] + + def close(self): + self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("CloudTasksGrpcTransport",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py new file mode 100644 index 000000000000..c138c66ec81b --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/grpc_asyncio.py @@ -0,0 +1,878 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task + +from .base import DEFAULT_CLIENT_INFO, CloudTasksTransport +from .grpc import CloudTasksGrpcTransport + + +class CloudTasksGrpcAsyncIOTransport(CloudTasksTransport): + """gRPC AsyncIO backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_queues( + self, + ) -> Callable[ + [cloudtasks.ListQueuesRequest], Awaitable[cloudtasks.ListQueuesResponse] + ]: + r"""Return a callable for the list queues method over gRPC. + + Lists queues. + + Queues are returned in lexicographical order. + + Returns: + Callable[[~.ListQueuesRequest], + Awaitable[~.ListQueuesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_queues" not in self._stubs: + self._stubs["list_queues"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/ListQueues", + request_serializer=cloudtasks.ListQueuesRequest.serialize, + response_deserializer=cloudtasks.ListQueuesResponse.deserialize, + ) + return self._stubs["list_queues"] + + @property + def get_queue( + self, + ) -> Callable[[cloudtasks.GetQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the get queue method over gRPC. + + Gets a queue. + + Returns: + Callable[[~.GetQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_queue" not in self._stubs: + self._stubs["get_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/GetQueue", + request_serializer=cloudtasks.GetQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["get_queue"] + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], Awaitable[gct_queue.Queue]]: + r"""Return a callable for the create queue method over gRPC. + + Creates a queue. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.CreateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_queue" not in self._stubs: + self._stubs["create_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/CreateQueue", + request_serializer=cloudtasks.CreateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["create_queue"] + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], Awaitable[gct_queue.Queue]]: + r"""Return a callable for the update queue method over gRPC. + + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a + maximum of 31 days. After a task is 31 days old, the task will + be deleted regardless of whether it was dispatched or not. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.UpdateQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_queue" not in self._stubs: + self._stubs["update_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/UpdateQueue", + request_serializer=cloudtasks.UpdateQueueRequest.serialize, + response_deserializer=gct_queue.Queue.deserialize, + ) + return self._stubs["update_queue"] + + @property + def delete_queue( + self, + ) -> Callable[[cloudtasks.DeleteQueueRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete queue method over gRPC. + + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be + created for 7 days. + + WARNING: Using this method may have unintended side effects if + you are using an App Engine ``queue.yaml`` or ``queue.xml`` file + to manage your queues. Read `Overview of Queue Management and + queue.yaml `__ + before using this method. + + Returns: + Callable[[~.DeleteQueueRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_queue" not in self._stubs: + self._stubs["delete_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/DeleteQueue", + request_serializer=cloudtasks.DeleteQueueRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_queue"] + + @property + def purge_queue( + self, + ) -> Callable[[cloudtasks.PurgeQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the purge queue method over gRPC. + + Purges a queue by deleting all of its tasks. + + All tasks created before this method is called are + permanently deleted. + + Purge operations can take up to one minute to take + effect. Tasks might be dispatched before the purge takes + effect. A purge is irreversible. + + Returns: + Callable[[~.PurgeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_queue" not in self._stubs: + self._stubs["purge_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/PurgeQueue", + request_serializer=cloudtasks.PurgeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["purge_queue"] + + @property + def pause_queue( + self, + ) -> Callable[[cloudtasks.PauseQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the pause queue method over gRPC. + + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + Tasks can still be added when the queue is paused. A queue is + paused if its [state][google.cloud.tasks.v2beta3.Queue.state] is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + Returns: + Callable[[~.PauseQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_queue" not in self._stubs: + self._stubs["pause_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/PauseQueue", + request_serializer=cloudtasks.PauseQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["pause_queue"] + + @property + def resume_queue( + self, + ) -> Callable[[cloudtasks.ResumeQueueRequest], Awaitable[queue.Queue]]: + r"""Return a callable for the resume queue method over gRPC. + + Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The + state of a queue is stored in the queue's + [state][google.cloud.tasks.v2beta3.Queue.state]; after calling + this method it will be set to + [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can lead + to target overloading. If you are resuming high-QPS queues, + follow the 500/50/5 pattern described in `Managing Cloud Tasks + Scaling + Risks `__. + + Returns: + Callable[[~.ResumeQueueRequest], + Awaitable[~.Queue]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_queue" not in self._stubs: + self._stubs["resume_queue"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/ResumeQueue", + request_serializer=cloudtasks.ResumeQueueRequest.serialize, + response_deserializer=queue.Queue.deserialize, + ) + return self._stubs["resume_queue"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Returns an empty + policy if the resource exists and does not have a policy set. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.getIamPolicy`` + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a + [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM + permissions yet. Project-level permissions are required to use + the Cloud Console. + + Authorization requires the following `Google + IAM `__ permission on the + specified resource parent: + + - ``cloudtasks.queues.setIamPolicy`` + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that a caller has on a + [Queue][google.cloud.tasks.v2beta3.Queue]. If the resource does + not exist, this will return an empty set of permissions, not a + [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for + authorization checking. This operation may "fail open" without + warning. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def list_tasks( + self, + ) -> Callable[ + [cloudtasks.ListTasksRequest], Awaitable[cloudtasks.ListTasksResponse] + ]: + r"""Return a callable for the list tasks method over gRPC. + + Lists the tasks in a queue. + + By default, only the + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is + retrieved due to performance considerations; + [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] + controls the subset of information which is returned. + + The tasks may be returned in any order. The ordering may change + at any time. + + Returns: + Callable[[~.ListTasksRequest], + Awaitable[~.ListTasksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tasks" not in self._stubs: + self._stubs["list_tasks"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/ListTasks", + request_serializer=cloudtasks.ListTasksRequest.serialize, + response_deserializer=cloudtasks.ListTasksResponse.deserialize, + ) + return self._stubs["list_tasks"] + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], Awaitable[task.Task]]: + r"""Return a callable for the get task method over gRPC. + + Gets a task. + + Returns: + Callable[[~.GetTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_task" not in self._stubs: + self._stubs["get_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/GetTask", + request_serializer=cloudtasks.GetTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["get_task"] + + @property + def create_task( + self, + ) -> Callable[[cloudtasks.CreateTaskRequest], Awaitable[gct_task.Task]]: + r"""Return a callable for the create task method over gRPC. + + Creates a task and adds it to a queue. + + Tasks cannot be updated after creation; there is no UpdateTask + command. + + - The maximum task size is 100KB. + + Returns: + Callable[[~.CreateTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_task" not in self._stubs: + self._stubs["create_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/CreateTask", + request_serializer=cloudtasks.CreateTaskRequest.serialize, + response_deserializer=gct_task.Task.deserialize, + ) + return self._stubs["create_task"] + + @property + def delete_task( + self, + ) -> Callable[[cloudtasks.DeleteTaskRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete task method over gRPC. + + Deletes a task. + + A task can be deleted if it is scheduled or dispatched. + A task cannot be deleted if it has executed successfully + or permanently failed. + + Returns: + Callable[[~.DeleteTaskRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_task" not in self._stubs: + self._stubs["delete_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/DeleteTask", + request_serializer=cloudtasks.DeleteTaskRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_task"] + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], Awaitable[task.Task]]: + r"""Return a callable for the run task method over gRPC. + + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, + even if the task is already running, the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or is + [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be + used to retry a failed task after a fix has been made or to + manually force a task to be dispatched now. + + The dispatched task is returned. That is, the task that is + returned contains the [status][Task.status] after the task is + dispatched but before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + will be reset to the time that + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was + called plus the retry delay specified in the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + Returns: + Callable[[~.RunTaskRequest], + Awaitable[~.Task]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_task" not in self._stubs: + self._stubs["run_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/RunTask", + request_serializer=cloudtasks.RunTaskRequest.serialize, + response_deserializer=task.Task.deserialize, + ) + return self._stubs["run_task"] + + @property + def buffer_task( + self, + ) -> Callable[ + [cloudtasks.BufferTaskRequest], Awaitable[cloudtasks.BufferTaskResponse] + ]: + r"""Return a callable for the buffer task method over gRPC. + + Creates and buffers a new task without the need to explicitly + define a Task message. The queue must have [HTTP + target][google.cloud.tasks.v2beta3.HttpTarget]. To create the + task with a custom ID, use the following format and set TASK_ID + to your desired ID: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID:buffer + To create the task with an automatically generated ID, use the + following format: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks:buffer. + Note: This feature is in its experimental stage. You must + request access to the API through the `Cloud Tasks BufferTask + Experiment Signup form `__. + + Returns: + Callable[[~.BufferTaskRequest], + Awaitable[~.BufferTaskResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "buffer_task" not in self._stubs: + self._stubs["buffer_task"] = self.grpc_channel.unary_unary( + "/google.cloud.tasks.v2beta3.CloudTasks/BufferTask", + request_serializer=cloudtasks.BufferTaskRequest.serialize, + response_deserializer=cloudtasks.BufferTaskResponse.deserialize, + ) + return self._stubs["buffer_task"] + + def close(self): + return self.grpc_channel.close() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("CloudTasksGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/rest.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/rest.py new file mode 100644 index 000000000000..1e08e90c1ae1 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/services/cloud_tasks/transports/rest.py @@ -0,0 +1,2690 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task + +from .base import CloudTasksTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CloudTasksRestInterceptor: + """Interceptor for CloudTasks. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudTasksRestTransport. + + .. code-block:: python + class MyCustomCloudTasksInterceptor(CloudTasksRestInterceptor): + def pre_buffer_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_buffer_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_queues(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_queues(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_tasks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_tasks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_pause_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_pause_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_purge_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_purge_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_resume_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_resume_queue(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_task(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_task(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_queue(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_queue(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CloudTasksRestTransport(interceptor=MyCustomCloudTasksInterceptor()) + client = CloudTasksClient(transport=transport) + + + """ + + def pre_buffer_task( + self, request: cloudtasks.BufferTaskRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.BufferTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for buffer_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_buffer_task( + self, response: cloudtasks.BufferTaskResponse + ) -> cloudtasks.BufferTaskResponse: + """Post-rpc interceptor for buffer_task + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_create_queue( + self, + request: cloudtasks.CreateQueueRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudtasks.CreateQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_create_queue(self, response: gct_queue.Queue) -> gct_queue.Queue: + """Post-rpc interceptor for create_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_create_task( + self, request: cloudtasks.CreateTaskRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.CreateTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_create_task(self, response: gct_task.Task) -> gct_task.Task: + """Post-rpc interceptor for create_task + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_delete_queue( + self, + request: cloudtasks.DeleteQueueRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudtasks.DeleteQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def pre_delete_task( + self, request: cloudtasks.DeleteTaskRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.DeleteTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_get_queue( + self, request: cloudtasks.GetQueueRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.GetQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_get_queue(self, response: queue.Queue) -> queue.Queue: + """Post-rpc interceptor for get_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_get_task( + self, request: cloudtasks.GetTaskRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.GetTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_get_task(self, response: task.Task) -> task.Task: + """Post-rpc interceptor for get_task + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_list_queues( + self, request: cloudtasks.ListQueuesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.ListQueuesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_queues + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_list_queues( + self, response: cloudtasks.ListQueuesResponse + ) -> cloudtasks.ListQueuesResponse: + """Post-rpc interceptor for list_queues + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_list_tasks( + self, request: cloudtasks.ListTasksRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.ListTasksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_tasks + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_list_tasks( + self, response: cloudtasks.ListTasksResponse + ) -> cloudtasks.ListTasksResponse: + """Post-rpc interceptor for list_tasks + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_pause_queue( + self, request: cloudtasks.PauseQueueRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.PauseQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for pause_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_pause_queue(self, response: queue.Queue) -> queue.Queue: + """Post-rpc interceptor for pause_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_purge_queue( + self, request: cloudtasks.PurgeQueueRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.PurgeQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for purge_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_purge_queue(self, response: queue.Queue) -> queue.Queue: + """Post-rpc interceptor for purge_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_resume_queue( + self, + request: cloudtasks.ResumeQueueRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudtasks.ResumeQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for resume_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_resume_queue(self, response: queue.Queue) -> queue.Queue: + """Post-rpc interceptor for resume_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_run_task( + self, request: cloudtasks.RunTaskRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudtasks.RunTaskRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_task + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_run_task(self, response: task.Task) -> task.Task: + """Post-rpc interceptor for run_task + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_update_queue( + self, + request: cloudtasks.UpdateQueueRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudtasks.UpdateQueueRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_queue + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_update_queue(self, response: gct_queue.Queue) -> gct_queue.Queue: + """Post-rpc interceptor for update_queue + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudTasks server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CloudTasks server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudTasksRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudTasksRestInterceptor + + +class CloudTasksRestTransport(CloudTasksTransport): + """REST backend transport for CloudTasks. + + Cloud Tasks allows developers to manage the execution of + background work in their applications. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "cloudtasks.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudTasksRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudTasksRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BufferTask(CloudTasksRestStub): + def __hash__(self): + return hash("BufferTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.BufferTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.BufferTaskResponse: + r"""Call the buffer task method over HTTP. + + Args: + request (~.cloudtasks.BufferTaskRequest): + The request object. Request message for + [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudtasks.BufferTaskResponse: + Response message for + [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta3/{queue=projects/*/locations/*/queues/*}/tasks/{task_id}:buffer", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_buffer_task(request, metadata) + pb_request = cloudtasks.BufferTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudtasks.BufferTaskResponse() + pb_resp = cloudtasks.BufferTaskResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_buffer_task(resp) + return resp + + class _CreateQueue(CloudTasksRestStub): + def __hash__(self): + return hash("CreateQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.CreateQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Call the create queue method over HTTP. + + Args: + request (~.cloudtasks.CreateQueueRequest): + The request object. Request message for + [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta3/{parent=projects/*/locations/*}/queues", + "body": "queue", + }, + ] + request, metadata = self._interceptor.pre_create_queue(request, metadata) + pb_request = cloudtasks.CreateQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gct_queue.Queue() + pb_resp = gct_queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_queue(resp) + return resp + + class _CreateTask(CloudTasksRestStub): + def __hash__(self): + return hash("CreateTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.CreateTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_task.Task: + r"""Call the create task method over HTTP. + + Args: + request (~.cloudtasks.CreateTaskRequest): + The request object. Request message for + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_task.Task: + A unit of scheduled work. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_task(request, metadata) + pb_request = cloudtasks.CreateTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gct_task.Task() + pb_resp = gct_task.Task.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_task(resp) + return resp + + class _DeleteQueue(CloudTasksRestStub): + def __hash__(self): + return hash("DeleteQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.DeleteQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete queue method over HTTP. + + Args: + request (~.cloudtasks.DeleteQueueRequest): + The request object. Request message for + [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2beta3/{name=projects/*/locations/*/queues/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_queue(request, metadata) + pb_request = cloudtasks.DeleteQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteTask(CloudTasksRestStub): + def __hash__(self): + return hash("DeleteTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.DeleteTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete task method over HTTP. + + Args: + request (~.cloudtasks.DeleteTaskRequest): + The request object. Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_task(request, metadata) + pb_request = cloudtasks.DeleteTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetIamPolicy(CloudTasksRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta3/{resource=projects/*/locations/*/queues/*}:getIamPolicy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _GetQueue(CloudTasksRestStub): + def __hash__(self): + return hash("GetQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.GetQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Call the get queue method over HTTP. + + Args: + request (~.cloudtasks.GetQueueRequest): + The request object. Request message for + [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta3/{name=projects/*/locations/*/queues/*}", + }, + ] + request, metadata = self._interceptor.pre_get_queue(request, metadata) + pb_request = cloudtasks.GetQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = queue.Queue() + pb_resp = queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_queue(resp) + return resp + + class _GetTask(CloudTasksRestStub): + def __hash__(self): + return hash("GetTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.GetTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Call the get task method over HTTP. + + Args: + request (~.cloudtasks.GetTaskRequest): + The request object. Request message for getting a task using + [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}", + }, + ] + request, metadata = self._interceptor.pre_get_task(request, metadata) + pb_request = cloudtasks.GetTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = task.Task() + pb_resp = task.Task.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_task(resp) + return resp + + class _ListQueues(CloudTasksRestStub): + def __hash__(self): + return hash("ListQueues") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.ListQueuesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.ListQueuesResponse: + r"""Call the list queues method over HTTP. + + Args: + request (~.cloudtasks.ListQueuesRequest): + The request object. Request message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudtasks.ListQueuesResponse: + Response message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta3/{parent=projects/*/locations/*}/queues", + }, + ] + request, metadata = self._interceptor.pre_list_queues(request, metadata) + pb_request = cloudtasks.ListQueuesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudtasks.ListQueuesResponse() + pb_resp = cloudtasks.ListQueuesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_queues(resp) + return resp + + class _ListTasks(CloudTasksRestStub): + def __hash__(self): + return hash("ListTasks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.ListTasksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudtasks.ListTasksResponse: + r"""Call the list tasks method over HTTP. + + Args: + request (~.cloudtasks.ListTasksRequest): + The request object. Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudtasks.ListTasksResponse: + Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks", + }, + ] + request, metadata = self._interceptor.pre_list_tasks(request, metadata) + pb_request = cloudtasks.ListTasksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudtasks.ListTasksResponse() + pb_resp = cloudtasks.ListTasksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_tasks(resp) + return resp + + class _PauseQueue(CloudTasksRestStub): + def __hash__(self): + return hash("PauseQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.PauseQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Call the pause queue method over HTTP. + + Args: + request (~.cloudtasks.PauseQueueRequest): + The request object. Request message for + [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta3/{name=projects/*/locations/*/queues/*}:pause", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_pause_queue(request, metadata) + pb_request = cloudtasks.PauseQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = queue.Queue() + pb_resp = queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_pause_queue(resp) + return resp + + class _PurgeQueue(CloudTasksRestStub): + def __hash__(self): + return hash("PurgeQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.PurgeQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Call the purge queue method over HTTP. + + Args: + request (~.cloudtasks.PurgeQueueRequest): + The request object. Request message for + [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta3/{name=projects/*/locations/*/queues/*}:purge", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_purge_queue(request, metadata) + pb_request = cloudtasks.PurgeQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = queue.Queue() + pb_resp = queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_purge_queue(resp) + return resp + + class _ResumeQueue(CloudTasksRestStub): + def __hash__(self): + return hash("ResumeQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.ResumeQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> queue.Queue: + r"""Call the resume queue method over HTTP. + + Args: + request (~.cloudtasks.ResumeQueueRequest): + The request object. Request message for + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta3/{name=projects/*/locations/*/queues/*}:resume", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_resume_queue(request, metadata) + pb_request = cloudtasks.ResumeQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = queue.Queue() + pb_resp = queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_resume_queue(resp) + return resp + + class _RunTask(CloudTasksRestStub): + def __hash__(self): + return hash("RunTask") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.RunTaskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> task.Task: + r"""Call the run task method over HTTP. + + Args: + request (~.cloudtasks.RunTaskRequest): + The request object. Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.task.Task: + A unit of scheduled work. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}:run", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_run_task(request, metadata) + pb_request = cloudtasks.RunTaskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = task.Task() + pb_resp = task.Task.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_task(resp) + return resp + + class _SetIamPolicy(CloudTasksRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta3/{resource=projects/*/locations/*/queues/*}:setIamPolicy", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(CloudTasksRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta3/{resource=projects/*/locations/*/queues/*}:testIamPermissions", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = iam_policy_pb2.TestIamPermissionsResponse() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + class _UpdateQueue(CloudTasksRestStub): + def __hash__(self): + return hash("UpdateQueue") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudtasks.UpdateQueueRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gct_queue.Queue: + r"""Call the update queue method over HTTP. + + Args: + request (~.cloudtasks.UpdateQueueRequest): + The request object. Request message for + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gct_queue.Queue: + A queue is a container of related + tasks. Queues are configured to manage + how those tasks are dispatched. + Configurable properties include rate + limits, retry options, queue types, and + others. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2beta3/{queue.name=projects/*/locations/*/queues/*}", + "body": "queue", + }, + ] + request, metadata = self._interceptor.pre_update_queue(request, metadata) + pb_request = cloudtasks.UpdateQueueRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gct_queue.Queue() + pb_resp = gct_queue.Queue.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_queue(resp) + return resp + + @property + def buffer_task( + self, + ) -> Callable[[cloudtasks.BufferTaskRequest], cloudtasks.BufferTaskResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BufferTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_queue( + self, + ) -> Callable[[cloudtasks.CreateQueueRequest], gct_queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_task(self) -> Callable[[cloudtasks.CreateTaskRequest], gct_task.Task]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_queue( + self, + ) -> Callable[[cloudtasks.DeleteQueueRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_task(self) -> Callable[[cloudtasks.DeleteTaskRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_queue(self) -> Callable[[cloudtasks.GetQueueRequest], queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_task(self) -> Callable[[cloudtasks.GetTaskRequest], task.Task]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_queues( + self, + ) -> Callable[[cloudtasks.ListQueuesRequest], cloudtasks.ListQueuesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListQueues(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_tasks( + self, + ) -> Callable[[cloudtasks.ListTasksRequest], cloudtasks.ListTasksResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTasks(self._session, self._host, self._interceptor) # type: ignore + + @property + def pause_queue(self) -> Callable[[cloudtasks.PauseQueueRequest], queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PauseQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def purge_queue(self) -> Callable[[cloudtasks.PurgeQueueRequest], queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PurgeQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def resume_queue(self) -> Callable[[cloudtasks.ResumeQueueRequest], queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ResumeQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_task(self) -> Callable[[cloudtasks.RunTaskRequest], task.Task]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunTask(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_queue( + self, + ) -> Callable[[cloudtasks.UpdateQueueRequest], gct_queue.Queue]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateQueue(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(CloudTasksRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta3/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(CloudTasksRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta3/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CloudTasksRestTransport",) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/__init__.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/__init__.py new file mode 100644 index 000000000000..77d5ff35fbc2 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/__init__.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloudtasks import ( + BufferTaskRequest, + BufferTaskResponse, + CreateQueueRequest, + CreateTaskRequest, + DeleteQueueRequest, + DeleteTaskRequest, + GetQueueRequest, + GetTaskRequest, + ListQueuesRequest, + ListQueuesResponse, + ListTasksRequest, + ListTasksResponse, + PauseQueueRequest, + PurgeQueueRequest, + ResumeQueueRequest, + RunTaskRequest, + UpdateQueueRequest, +) +from .queue import Queue, QueueStats, RateLimits, RetryConfig, StackdriverLoggingConfig +from .target import ( + AppEngineHttpQueue, + AppEngineHttpRequest, + AppEngineRouting, + HttpMethod, + HttpRequest, + HttpTarget, + OAuthToken, + OidcToken, + PathOverride, + PullMessage, + QueryOverride, + UriOverride, +) +from .task import Attempt, Task + +__all__ = ( + "BufferTaskRequest", + "BufferTaskResponse", + "CreateQueueRequest", + "CreateTaskRequest", + "DeleteQueueRequest", + "DeleteTaskRequest", + "GetQueueRequest", + "GetTaskRequest", + "ListQueuesRequest", + "ListQueuesResponse", + "ListTasksRequest", + "ListTasksResponse", + "PauseQueueRequest", + "PurgeQueueRequest", + "ResumeQueueRequest", + "RunTaskRequest", + "UpdateQueueRequest", + "Queue", + "QueueStats", + "RateLimits", + "RetryConfig", + "StackdriverLoggingConfig", + "AppEngineHttpQueue", + "AppEngineHttpRequest", + "AppEngineRouting", + "HttpRequest", + "HttpTarget", + "OAuthToken", + "OidcToken", + "PathOverride", + "PullMessage", + "QueryOverride", + "UriOverride", + "HttpMethod", + "Attempt", + "Task", +) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/cloudtasks.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/cloudtasks.py new file mode 100644 index 000000000000..04169d15e969 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/cloudtasks.py @@ -0,0 +1,637 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.api import httpbody_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import task as gct_task + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta3", + manifest={ + "ListQueuesRequest", + "ListQueuesResponse", + "GetQueueRequest", + "CreateQueueRequest", + "UpdateQueueRequest", + "DeleteQueueRequest", + "PurgeQueueRequest", + "PauseQueueRequest", + "ResumeQueueRequest", + "ListTasksRequest", + "ListTasksResponse", + "GetTaskRequest", + "CreateTaskRequest", + "DeleteTaskRequest", + "RunTaskRequest", + "BufferTaskRequest", + "BufferTaskResponse", + }, +) + + +class ListQueuesRequest(proto.Message): + r"""Request message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + + Attributes: + parent (str): + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + filter (str): + ``filter`` can be used to specify a subset of queues. Any + [Queue][google.cloud.tasks.v2beta3.Queue] field can be used + as a filter and several operators as supported. For example: + ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as + described in `Stackdriver's Advanced Logs + Filters `__. + + Sample filter "state: PAUSED". + + Note that using filters might cause fewer queues than the + requested page_size to be returned. + page_size (int): + Requested page size. + + The maximum page size is 9800. If unspecified, the page size + will be the maximum. Fewer queues than requested might be + returned, even if more queues exist; use the + [next_page_token][google.cloud.tasks.v2beta3.ListQueuesResponse.next_page_token] + in the response to determine if more queues exist. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2beta3.ListQueuesResponse.next_page_token] + returned from the previous call to + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues] + method. It is an error to switch the value of the + [filter][google.cloud.tasks.v2beta3.ListQueuesRequest.filter] + while iterating through pages. + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Read mask is used for a more granular control over + what the API returns. If the mask is not present all fields + will be returned except [Queue.stats]. [Queue.stats] will be + returned only if it was explicitly specified in the mask. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=5, + message=field_mask_pb2.FieldMask, + ) + + +class ListQueuesResponse(proto.Message): + r"""Response message for + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. + + Attributes: + queues (MutableSequence[google.cloud.tasks_v2beta3.types.Queue]): + The list of queues. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues] + with this value as the + [page_token][google.cloud.tasks.v2beta3.ListQueuesRequest.page_token]. + + If the next_page_token is empty, there are no more results. + + The page token is valid for only 2 hours. + """ + + @property + def raw_page(self): + return self + + queues: MutableSequence[gct_queue.Queue] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gct_queue.Queue, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetQueueRequest(proto.Message): + r"""Request message for + [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. + + Attributes: + name (str): + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Read mask is used for a more granular control over + what the API returns. If the mask is not present all fields + will be returned except [Queue.stats]. [Queue.stats] will be + returned only if it was explicitly specified in the mask. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class CreateQueueRequest(proto.Message): + r"""Request message for + [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. + + Attributes: + parent (str): + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling + Cloud Tasks' implementation of + [ListLocations][google.cloud.location.Locations.ListLocations]. + queue (google.cloud.tasks_v2beta3.types.Queue): + Required. The queue to create. + + [Queue's name][google.cloud.tasks.v2beta3.Queue.name] cannot + be the same as an existing queue. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + queue: gct_queue.Queue = proto.Field( + proto.MESSAGE, + number=2, + message=gct_queue.Queue, + ) + + +class UpdateQueueRequest(proto.Message): + r"""Request message for + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. + + Attributes: + queue (google.cloud.tasks_v2beta3.types.Queue): + Required. The queue to create or update. + + The queue's [name][google.cloud.tasks.v2beta3.Queue.name] + must be specified. + + Output only fields cannot be modified using UpdateQueue. Any + value specified for an output only field will be ignored. + The queue's [name][google.cloud.tasks.v2beta3.Queue.name] + cannot be changed. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + A mask used to specify which fields of the + queue are being updated. + If empty, then all fields will be updated. + """ + + queue: gct_queue.Queue = proto.Field( + proto.MESSAGE, + number=1, + message=gct_queue.Queue, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteQueueRequest(proto.Message): + r"""Request message for + [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PurgeQueueRequest(proto.Message): + r"""Request message for + [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class PauseQueueRequest(proto.Message): + r"""Request message for + [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ResumeQueueRequest(proto.Message): + r"""Request message for + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. + + Attributes: + name (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTasksRequest(proto.Message): + r"""Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + response_view (google.cloud.tasks_v2beta3.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta3.Task] resource. + page_size (int): + Maximum page size. + + Fewer tasks than requested might be returned, even if more + tasks exist; use + [next_page_token][google.cloud.tasks.v2beta3.ListTasksResponse.next_page_token] + in the response to determine if more tasks exist. + + The maximum page size is 1000. If unspecified, the page size + will be the maximum. + page_token (str): + A token identifying the page of results to return. + + To request the first page results, page_token must be empty. + To request the next page of results, page_token must be the + value of + [next_page_token][google.cloud.tasks.v2beta3.ListTasksResponse.next_page_token] + returned from the previous call to + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] + method. + + The page token is valid for only 2 hours. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListTasksResponse(proto.Message): + r"""Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. + + Attributes: + tasks (MutableSequence[google.cloud.tasks_v2beta3.types.Task]): + The list of tasks. + next_page_token (str): + A token to retrieve next page of results. + + To return the next page of results, call + [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] + with this value as the + [page_token][google.cloud.tasks.v2beta3.ListTasksRequest.page_token]. + + If the next_page_token is empty, there are no more results. + """ + + @property + def raw_page(self): + return self + + tasks: MutableSequence[gct_task.Task] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gct_task.Task, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTaskRequest(proto.Message): + r"""Request message for getting a task using + [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (google.cloud.tasks_v2beta3.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta3.Task] resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + + +class CreateTaskRequest(proto.Message): + r"""Request message for + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + + Attributes: + parent (str): + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + task (google.cloud.tasks_v2beta3.types.Task): + Required. The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task + [name][google.cloud.tasks.v2beta3.Task.name]. If a name is + not specified then the system will generate a random unique + task id, which will be set in the task returned in the + [response][google.cloud.tasks.v2beta3.Task.name]. + + If + [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] + is not set or is in the past then Cloud Tasks will set it to + the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task de-duplication. + If a task's ID is identical to that of an existing task or a + task that was deleted or executed recently then the call + will fail with + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. If the + task's queue was created using Cloud Tasks, then another + task with the same name can't be created for ~1 hour after + the original task was deleted or executed. If the task's + queue was created using queue.yaml or queue.xml, then + another task with the same name can't be created for ~9 days + after the original task was deleted or executed. + + Because there is an extra lookup cost to identify duplicate + task names, these + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id is + recommended. Choosing task ids that are sequential or have + sequential prefixes, for example using a timestamp, causes + an increase in latency and error rates in all task commands. + The infrastructure relies on an approximately uniform + distribution of task ids to store and serve tasks + efficiently. + response_view (google.cloud.tasks_v2beta3.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta3.Task] resource. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + task: gct_task.Task = proto.Field( + proto.MESSAGE, + number=2, + message=gct_task.Task, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=3, + enum=gct_task.Task.View, + ) + + +class DeleteTaskRequest(proto.Message): + r"""Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RunTaskRequest(proto.Message): + r"""Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. + + Attributes: + name (str): + Required. The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (google.cloud.tasks_v2beta3.types.Task.View): + The response_view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] will be returned. + + By default response_view is + [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + information is retrieved by default because some data, such + as payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. + + Authorization for + [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Task][google.cloud.tasks.v2beta3.Task] resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + response_view: gct_task.Task.View = proto.Field( + proto.ENUM, + number=2, + enum=gct_task.Task.View, + ) + + +class BufferTaskRequest(proto.Message): + r"""Request message for + [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. + + Attributes: + queue (str): + Required. The parent queue name. For example: + projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID\` + + The queue must already exist. + task_id (str): + Optional. Task ID for the task being created. + If not provided, a random task ID is assigned to + the task. + body (google.api.httpbody_pb2.HttpBody): + Optional. Body of the HTTP request. + + The body can take any generic value. The value is written to + the [HttpRequest][payload] of the [Task]. + """ + + queue: str = proto.Field( + proto.STRING, + number=1, + ) + task_id: str = proto.Field( + proto.STRING, + number=2, + ) + body: httpbody_pb2.HttpBody = proto.Field( + proto.MESSAGE, + number=3, + message=httpbody_pb2.HttpBody, + ) + + +class BufferTaskResponse(proto.Message): + r"""Response message for + [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. + + Attributes: + task (google.cloud.tasks_v2beta3.types.Task): + The created task. + """ + + task: gct_task.Task = proto.Field( + proto.MESSAGE, + number=1, + message=gct_task.Task, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/queue.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/queue.py new file mode 100644 index 000000000000..9bf29b01027a --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/queue.py @@ -0,0 +1,618 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.tasks_v2beta3.types import target + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta3", + manifest={ + "Queue", + "RateLimits", + "RetryConfig", + "StackdriverLoggingConfig", + "QueueStats", + }, +) + + +class Queue(proto.Message): + r"""A queue is a container of related tasks. Queues are + configured to manage how those tasks are dispatched. + Configurable properties include rate limits, retry options, + queue types, and others. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Caller-specified and required in + [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue], + after which it becomes output only. + + The queue name. + + The queue name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the queue's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + app_engine_http_queue (google.cloud.tasks_v2beta3.types.AppEngineHttpQueue): + [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] + settings apply only to [App Engine + tasks][google.cloud.tasks.v2beta3.AppEngineHttpRequest] in + this queue. [Http + tasks][google.cloud.tasks.v2beta3.HttpRequest] are not + affected by this proto. + + This field is a member of `oneof`_ ``queue_type``. + http_target (google.cloud.tasks_v2beta3.types.HttpTarget): + Modifies HTTP target for HTTP tasks. + rate_limits (google.cloud.tasks_v2beta3.types.RateLimits): + Rate limits for task dispatches. + + [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits] + and + [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] + are related because they both control task attempts. However + they control task attempts in different ways: + + - [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits] + controls the total rate of dispatches from a queue (i.e. + all traffic dispatched from the queue, regardless of + whether the dispatch is from a first attempt or a retry). + - [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] + controls what happens to particular a task after its + first attempt fails. That is, + [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] + controls task retries (the second attempt, third attempt, + etc). + + The queue's actual dispatch rate is the result of: + + - Number of tasks in the queue + - User-specified throttling: + [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits], + [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config], + and the [queue's + state][google.cloud.tasks.v2beta3.Queue.state]. + - System throttling due to ``429`` (Too Many Requests) or + ``503`` (Service Unavailable) responses from the worker, + high error rates, or to smooth sudden large traffic + spikes. + retry_config (google.cloud.tasks_v2beta3.types.RetryConfig): + Settings that determine the retry behavior. + + - For tasks created using Cloud Tasks: the queue-level + retry settings apply to all tasks in the queue that were + created using Cloud Tasks. Retry settings cannot be set + on individual tasks. + - For tasks created using the App Engine SDK: the + queue-level retry settings apply to all tasks in the + queue which do not have retry settings explicitly set on + the task and were created by the App Engine SDK. See `App + Engine + documentation `__. + state (google.cloud.tasks_v2beta3.types.Queue.State): + Output only. The state of the queue. + + ``state`` can only be changed by called + [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue], + [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue], + or uploading + `queue.yaml/xml `__. + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue] + cannot be used to change ``state``. + purge_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last time this queue was purged. + + All tasks that were + [created][google.cloud.tasks.v2beta3.Task.create_time] + before this time were purged. + + A queue can be purged using + [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue], + the `App Engine Task Queue SDK, or the Cloud + Console `__. + + Purge time will be truncated to the nearest microsecond. + Purge time will be unset if the queue has never been purged. + task_ttl (google.protobuf.duration_pb2.Duration): + The maximum amount of time that a task will be retained in + this queue. + + Queues created by Cloud Tasks have a default ``task_ttl`` of + 31 days. After a task has lived for ``task_ttl``, the task + will be deleted regardless of whether it was dispatched or + not. + + The ``task_ttl`` for queues created via queue.yaml/xml is + equal to the maximum duration because there is a `storage + quota `__ + for these queues. To view the maximum valid duration, see + the documentation for [Duration][google.protobuf.Duration]. + tombstone_ttl (google.protobuf.duration_pb2.Duration): + The task tombstone time to live (TTL). + + After a task is deleted or executed, the task's tombstone is + retained for the length of time specified by + ``tombstone_ttl``. The tombstone is used by task + de-duplication; another task with the same name can't be + created until the tombstone has expired. For more + information about task de-duplication, see the documentation + for + [CreateTaskRequest][google.cloud.tasks.v2beta3.CreateTaskRequest.task]. + + Queues created by Cloud Tasks have a default + ``tombstone_ttl`` of 1 hour. + stackdriver_logging_config (google.cloud.tasks_v2beta3.types.StackdriverLoggingConfig): + Configuration options for writing logs to `Stackdriver + Logging `__. If this + field is unset, then no logs are written. + type_ (google.cloud.tasks_v2beta3.types.Queue.Type): + Immutable. The type of a queue (push or pull). + + ``Queue.type`` is an immutable property of the queue that is + set at the queue creation time. When left unspecified, the + default value of ``PUSH`` is selected. + stats (google.cloud.tasks_v2beta3.types.QueueStats): + Output only. The realtime, informational + statistics for a queue. In order to receive the + statistics the caller should include this field + in the FieldMask. + """ + + class State(proto.Enum): + r"""State of the queue. + + Values: + STATE_UNSPECIFIED (0): + Unspecified state. + RUNNING (1): + The queue is running. Tasks can be dispatched. + + If the queue was created using Cloud Tasks and the queue has + had no activity (method calls or task dispatches) for 30 + days, the queue may take a few minutes to re-activate. Some + method calls may return + [NOT_FOUND][google.rpc.Code.NOT_FOUND] and tasks may not be + dispatched for a few minutes until the queue has been + re-activated. + PAUSED (2): + Tasks are paused by the user. If the queue is + paused then Cloud Tasks will stop delivering + tasks from it, but more tasks can still be added + to it by the user. + DISABLED (3): + The queue is disabled. + + A queue becomes ``DISABLED`` when + `queue.yaml `__ + or + `queue.xml `__ + is uploaded which does not contain the queue. You cannot + directly disable a queue. + + When a queue is disabled, tasks can still be added to a + queue but the tasks are not dispatched. + + To permanently delete this queue and all of its tasks, call + [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + PAUSED = 2 + DISABLED = 3 + + class Type(proto.Enum): + r"""The type of the queue. + + Values: + TYPE_UNSPECIFIED (0): + Default value. + PULL (1): + A pull queue. + PUSH (2): + A push queue. + """ + TYPE_UNSPECIFIED = 0 + PULL = 1 + PUSH = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + app_engine_http_queue: target.AppEngineHttpQueue = proto.Field( + proto.MESSAGE, + number=3, + oneof="queue_type", + message=target.AppEngineHttpQueue, + ) + http_target: target.HttpTarget = proto.Field( + proto.MESSAGE, + number=13, + message=target.HttpTarget, + ) + rate_limits: "RateLimits" = proto.Field( + proto.MESSAGE, + number=4, + message="RateLimits", + ) + retry_config: "RetryConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="RetryConfig", + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + purge_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + task_ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + tombstone_ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=9, + message=duration_pb2.Duration, + ) + stackdriver_logging_config: "StackdriverLoggingConfig" = proto.Field( + proto.MESSAGE, + number=10, + message="StackdriverLoggingConfig", + ) + type_: Type = proto.Field( + proto.ENUM, + number=11, + enum=Type, + ) + stats: "QueueStats" = proto.Field( + proto.MESSAGE, + number=12, + message="QueueStats", + ) + + +class RateLimits(proto.Message): + r"""Rate limits. + + This message determines the maximum rate that tasks can be + dispatched by a queue, regardless of whether the dispatch is a first + task attempt or a retry. + + Note: The debugging command, + [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask], will run a + task even if the queue has reached its + [RateLimits][google.cloud.tasks.v2beta3.RateLimits]. + + Attributes: + max_dispatches_per_second (float): + The maximum rate at which tasks are dispatched from this + queue. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + - For [App Engine + queues][google.cloud.tasks.v2beta3.AppEngineHttpQueue], + the maximum allowed value is 500. + + This field has the same meaning as `rate in + queue.yaml/xml `__. + max_burst_size (int): + The max burst size. + + Max burst size limits how fast tasks in queue are processed + when many tasks are in the queue and the rate is high. This + field allows the queue to have a high rate so processing + starts shortly after a task is enqueued, but still limits + resource usage when many tasks are enqueued in a short + period of time. + + The `token + bucket `__ + algorithm is used to control the rate of task dispatches. + Each queue has a token bucket that holds tokens, up to the + maximum specified by ``max_burst_size``. Each time a task is + dispatched, a token is removed from the bucket. Tasks will + be dispatched until the queue's bucket runs out of tokens. + The bucket will be continuously refilled with new tokens + based on + [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second]. + + The default value of ``max_burst_size`` is picked by Cloud + Tasks based on the value of + [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second]. + + The maximum value of ``max_burst_size`` is 500. + + For App Engine queues that were created or updated using + ``queue.yaml/xml``, ``max_burst_size`` is equal to + `bucket_size `__. + If + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue] + is called on a queue without explicitly setting a value for + ``max_burst_size``, ``max_burst_size`` value will get + updated if + [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue] + is updating + [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second]. + max_concurrent_dispatches (int): + The maximum number of concurrent tasks that Cloud Tasks + allows to be dispatched for this queue. After this threshold + has been reached, Cloud Tasks stops dispatching tasks until + the number of concurrent requests decreases. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + The maximum allowed value is 5,000. + + This field has the same meaning as `max_concurrent_requests + in + queue.yaml/xml `__. + """ + + max_dispatches_per_second: float = proto.Field( + proto.DOUBLE, + number=1, + ) + max_burst_size: int = proto.Field( + proto.INT32, + number=2, + ) + max_concurrent_dispatches: int = proto.Field( + proto.INT32, + number=3, + ) + + +class RetryConfig(proto.Message): + r"""Retry config. + + These settings determine when a failed task attempt is retried. + + Attributes: + max_attempts (int): + Number of attempts per task. + + Cloud Tasks will attempt the task ``max_attempts`` times + (that is, if the first attempt fails, then there will be + ``max_attempts - 1`` retries). Must be >= -1. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + -1 indicates unlimited attempts. + + This field has the same meaning as `task_retry_limit in + queue.yaml/xml `__. + max_retry_duration (google.protobuf.duration_pb2.Duration): + If positive, ``max_retry_duration`` specifies the time limit + for retrying a failed task, measured from when the task was + first attempted. Once ``max_retry_duration`` time has passed + *and* the task has been attempted + [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] + times, no further attempts will be made and the task will be + deleted. + + If zero, then the task age is unlimited. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``max_retry_duration`` will be truncated to the nearest + second. + + This field has the same meaning as `task_age_limit in + queue.yaml/xml `__. + min_backoff (google.protobuf.duration_pb2.Duration): + A task will be + [scheduled][google.cloud.tasks.v2beta3.Task.schedule_time] + for retry between + [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig] + specifies that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``min_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `min_backoff_seconds in + queue.yaml/xml `__. + max_backoff (google.protobuf.duration_pb2.Duration): + A task will be + [scheduled][google.cloud.tasks.v2beta3.Task.schedule_time] + for retry between + [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] + and + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + duration after it fails, if the queue's + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig] + specifies that the task should be retried. + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + ``max_backoff`` will be truncated to the nearest second. + + This field has the same meaning as `max_backoff_seconds in + queue.yaml/xml `__. + max_doublings (int): + The time between retries will double ``max_doublings`` + times. + + A task's retry interval starts at + [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff], + then doubles ``max_doublings`` times, then increases + linearly, and finally retries at intervals of + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + up to + [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] + times. + + For example, if + [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] + is 10s, + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + is 300s, and ``max_doublings`` is 3, then the a task will + first be retried in 10s. The retry interval will double + three times, and then increase linearly by 2^3 \* 10s. + Finally, the task will retry at intervals of + [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] + until the task has been attempted + [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] + times. Thus, the requests will retry at 10s, 20s, 40s, 80s, + 160s, 240s, 300s, 300s, .... + + If unspecified when the queue is created, Cloud Tasks will + pick the default. + + This field has the same meaning as `max_doublings in + queue.yaml/xml `__. + """ + + max_attempts: int = proto.Field( + proto.INT32, + number=1, + ) + max_retry_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + min_backoff: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + max_backoff: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + max_doublings: int = proto.Field( + proto.INT32, + number=5, + ) + + +class StackdriverLoggingConfig(proto.Message): + r"""Configuration options for writing logs to `Stackdriver + Logging `__. + + Attributes: + sampling_ratio (float): + Specifies the fraction of operations to write to + `Stackdriver + Logging `__. This + field may contain any value between 0.0 and 1.0, inclusive. + 0.0 is the default and means that no operations are logged. + """ + + sampling_ratio: float = proto.Field( + proto.DOUBLE, + number=1, + ) + + +class QueueStats(proto.Message): + r"""Statistics for a queue. + + Attributes: + tasks_count (int): + Output only. An estimation of the number of + tasks in the queue, that is, the tasks in the + queue that haven't been executed, the tasks in + the queue which the queue has dispatched but has + not yet received a reply for, and the failed + tasks that the queue is retrying. + oldest_estimated_arrival_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. An estimation of the nearest + time in the future where a task in the queue is + scheduled to be executed. + executed_last_minute_count (int): + Output only. The number of tasks that the + queue has dispatched and received a reply for + during the last minute. This variable counts + both successful and non-successful executions. + concurrent_dispatches_count (int): + Output only. The number of requests that the + queue has dispatched but has not received a + reply for yet. + effective_execution_rate (float): + Output only. The current maximum number of + tasks per second executed by the queue. The + maximum value of this variable is controlled by + the RateLimits of the Queue. However, this value + could be less to avoid overloading the endpoints + tasks in the queue are targeting. + """ + + tasks_count: int = proto.Field( + proto.INT64, + number=1, + ) + oldest_estimated_arrival_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + executed_last_minute_count: int = proto.Field( + proto.INT64, + number=3, + ) + concurrent_dispatches_count: int = proto.Field( + proto.INT64, + number=4, + ) + effective_execution_rate: float = proto.Field( + proto.DOUBLE, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/target.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/target.py new file mode 100644 index 000000000000..3f04cd9f5d9e --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/target.py @@ -0,0 +1,978 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta3", + manifest={ + "HttpMethod", + "PullMessage", + "PathOverride", + "QueryOverride", + "UriOverride", + "HttpTarget", + "HttpRequest", + "AppEngineHttpQueue", + "AppEngineHttpRequest", + "AppEngineRouting", + "OAuthToken", + "OidcToken", + }, +) + + +class HttpMethod(proto.Enum): + r"""The HTTP method used to execute the task. + + Values: + HTTP_METHOD_UNSPECIFIED (0): + HTTP method unspecified + POST (1): + HTTP POST + GET (2): + HTTP GET + HEAD (3): + HTTP HEAD + PUT (4): + HTTP PUT + DELETE (5): + HTTP DELETE + PATCH (6): + HTTP PATCH + OPTIONS (7): + HTTP OPTIONS + """ + HTTP_METHOD_UNSPECIFIED = 0 + POST = 1 + GET = 2 + HEAD = 3 + PUT = 4 + DELETE = 5 + PATCH = 6 + OPTIONS = 7 + + +class PullMessage(proto.Message): + r"""Pull Message. + + This proto can only be used for tasks in a queue which has + [PULL][google.cloud.tasks.v2beta3.Queue.type] type. It currently + exists for backwards compatibility with the App Engine Task Queue + SDK. This message type maybe returned with methods + [list][google.cloud.tasks.v2beta3.CloudTask.ListTasks] and + [get][google.cloud.tasks.v2beta3.CloudTask.ListTasks], when the + response view is [FULL][google.cloud.tasks.v2beta3.Task.View.Full]. + + Attributes: + payload (bytes): + A data payload consumed by the worker to + execute the task. + tag (str): + The tasks's tag. + + The tag is less than 500 characters. + + SDK compatibility: Although the SDK allows tags to be either + string or + `bytes `__, + only UTF-8 encoded tags can be used in Cloud Tasks. If a tag + isn't UTF-8 encoded, the tag will be empty when the task is + returned by Cloud Tasks. + """ + + payload: bytes = proto.Field( + proto.BYTES, + number=1, + ) + tag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class PathOverride(proto.Message): + r"""PathOverride. + + Path message defines path override for HTTP targets. + + Attributes: + path (str): + The URI path (e.g., /users/1234). Default is + an empty string. + """ + + path: str = proto.Field( + proto.STRING, + number=1, + ) + + +class QueryOverride(proto.Message): + r"""QueryOverride. + + Query message defines query override for HTTP targets. + + Attributes: + query_params (str): + The query parameters (e.g., + qparam1=123&qparam2=456). Default is an empty + string. + """ + + query_params: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UriOverride(proto.Message): + r"""URI Override. + + When specified, all the HTTP tasks inside the queue will be + partially or fully overridden depending on the configured + values. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + scheme (google.cloud.tasks_v2beta3.types.UriOverride.Scheme): + Scheme override. + + When specified, the task URI scheme is replaced + by the provided value (HTTP or HTTPS). + + This field is a member of `oneof`_ ``_scheme``. + host (str): + Host override. + + When specified, replaces the host part of the task URL. For + example, if the task URL is "https://www.google.com," and + host value is set to "example.net", the overridden URI will + be changed to "https://example.net." Host value cannot be an + empty string (INVALID_ARGUMENT). + + This field is a member of `oneof`_ ``_host``. + port (int): + Port override. + + When specified, replaces the port part of the + task URI. For instance, for a URI + http://www.google.com/foo and port=123, the + overridden URI becomes + http://www.google.com:123/foo. Note that the + port value must be a positive integer. Setting + the port to 0 (Zero) clears the URI port. + + This field is a member of `oneof`_ ``_port``. + path_override (google.cloud.tasks_v2beta3.types.PathOverride): + URI path. + + When specified, replaces the existing path of + the task URL. Setting the path value to an empty + string clears the URI path segment. + query_override (google.cloud.tasks_v2beta3.types.QueryOverride): + URI Query. + + When specified, replaces the query part of the + task URI. Setting the query value to an empty + string clears the URI query segment. + uri_override_enforce_mode (google.cloud.tasks_v2beta3.types.UriOverride.UriOverrideEnforceMode): + URI Override Enforce Mode + + When specified, determines the Target + UriOverride mode. If not specified, it defaults + to ALWAYS. + """ + + class Scheme(proto.Enum): + r"""The Scheme for an HTTP request. By default, it is HTTPS. + + Values: + SCHEME_UNSPECIFIED (0): + Scheme unspecified. Defaults to HTTPS. + HTTP (1): + Convert the scheme to HTTP, e.g., + https://www.google.ca will change to + http://www.google.ca. + HTTPS (2): + Convert the scheme to HTTPS, e.g., + http://www.google.ca will change to + https://www.google.ca. + """ + SCHEME_UNSPECIFIED = 0 + HTTP = 1 + HTTPS = 2 + + class UriOverrideEnforceMode(proto.Enum): + r"""UriOverrideEnforceMode mode is to define enforcing mode for + the override modes. + + Values: + URI_OVERRIDE_ENFORCE_MODE_UNSPECIFIED (0): + OverrideMode Unspecified. Defaults to ALWAYS. + IF_NOT_EXISTS (1): + In the IF_NOT_EXISTS mode, queue-level configuration is only + applied where task-level configuration does not exist. + ALWAYS (2): + In the ALWAYS mode, queue-level configuration + overrides all task-level configuration + """ + URI_OVERRIDE_ENFORCE_MODE_UNSPECIFIED = 0 + IF_NOT_EXISTS = 1 + ALWAYS = 2 + + scheme: Scheme = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=Scheme, + ) + host: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + port: int = proto.Field( + proto.INT64, + number=3, + optional=True, + ) + path_override: "PathOverride" = proto.Field( + proto.MESSAGE, + number=4, + message="PathOverride", + ) + query_override: "QueryOverride" = proto.Field( + proto.MESSAGE, + number=5, + message="QueryOverride", + ) + uri_override_enforce_mode: UriOverrideEnforceMode = proto.Field( + proto.ENUM, + number=6, + enum=UriOverrideEnforceMode, + ) + + +class HttpTarget(proto.Message): + r"""HTTP target. + + When specified as a [Queue][target_type], all the tasks with + [HttpRequest] will be overridden according to the target. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri_override (google.cloud.tasks_v2beta3.types.UriOverride): + URI override. + + When specified, overrides the execution URI for + all the tasks in the queue. + http_method (google.cloud.tasks_v2beta3.types.HttpMethod): + The HTTP method to use for the request. + + When specified, it overrides + [HttpRequest][google.cloud.tasks.v2beta3.HttpTarget.http_method] + for the task. Note that if the value is set to + [HttpMethod][GET] the [HttpRequest][body] of the task will + be ignored at execution time. + header_overrides (MutableSequence[google.cloud.tasks_v2beta3.types.HttpTarget.HeaderOverride]): + HTTP target headers. + + This map contains the header field names and values. Headers + will be set when running the + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] + and/or + [BufferTask][google.cloud.tasks.v2beta3.CloudTasks.BufferTask]. + + These headers represent a subset of the headers that will be + configured for the task's HTTP request. Some HTTP request + headers will be ignored or replaced. + + A partial list of headers that will be ignored or replaced + is: + + - Several predefined headers, prefixed with + "X-CloudTasks-", can be used to define properties of the + task. + - Host: This will be computed by Cloud Tasks and derived + from + [HttpRequest.url][google.cloud.tasks.v2beta3.Target.HttpRequest.url]. + - Content-Length: This will be computed by Cloud Tasks. + + ``Content-Type`` won't be set by Cloud Tasks. You can + explicitly set ``Content-Type`` to a media type when the + [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + For example,\ ``Content-Type`` can be set to + ``"application/octet-stream"`` or ``"application/json"``. + The default value is set to ``"application/json"``. + + - User-Agent: This will be set to ``"Google-Cloud-Tasks"``. + + Headers which can have multiple values (according to + RFC2616) can be specified using comma-separated values. + + The size of the headers must be less than 80KB. Queue-level + headers to override headers of all the tasks in the queue. + oauth_token (google.cloud.tasks_v2beta3.types.OAuthToken): + If specified, an `OAuth + token `__ + will be generated and attached as the ``Authorization`` + header in the HTTP request. + + This type of authorization should generally only be used + when calling Google APIs hosted on \*.googleapis.com. + + This field is a member of `oneof`_ ``authorization_header``. + oidc_token (google.cloud.tasks_v2beta3.types.OidcToken): + If specified, an + `OIDC `__ + token will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend + to validate the token yourself. + + This field is a member of `oneof`_ ``authorization_header``. + """ + + class Header(proto.Message): + r"""Defines a header message. A header can have a key and a + value. + + Attributes: + key (str): + The Key of the header. + value (str): + The Value of the header. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + value: str = proto.Field( + proto.STRING, + number=2, + ) + + class HeaderOverride(proto.Message): + r"""Wraps the Header object. + + Attributes: + header (google.cloud.tasks_v2beta3.types.HttpTarget.Header): + header embodying a key and a value. + """ + + header: "HttpTarget.Header" = proto.Field( + proto.MESSAGE, + number=1, + message="HttpTarget.Header", + ) + + uri_override: "UriOverride" = proto.Field( + proto.MESSAGE, + number=1, + message="UriOverride", + ) + http_method: "HttpMethod" = proto.Field( + proto.ENUM, + number=2, + enum="HttpMethod", + ) + header_overrides: MutableSequence[HeaderOverride] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=HeaderOverride, + ) + oauth_token: "OAuthToken" = proto.Field( + proto.MESSAGE, + number=5, + oneof="authorization_header", + message="OAuthToken", + ) + oidc_token: "OidcToken" = proto.Field( + proto.MESSAGE, + number=6, + oneof="authorization_header", + message="OidcToken", + ) + + +class HttpRequest(proto.Message): + r"""HTTP request. + + The task will be pushed to the worker as an HTTP request. If the + worker or the redirected worker acknowledges the task by returning a + successful HTTP response code ([``200`` - ``299``]), the task will + be removed from the queue. If any other HTTP response code is + returned or no response is received, the task will be retried + according to the following: + + - User-specified throttling: [retry + configuration][google.cloud.tasks.v2beta3.Queue.retry_config], + [rate limits][google.cloud.tasks.v2beta3.Queue.rate_limits], and + the [queue's state][google.cloud.tasks.v2beta3.Queue.state]. + + - System throttling: To prevent the worker from overloading, Cloud + Tasks may temporarily reduce the queue's effective rate. + User-specified settings will not be changed. + + System throttling happens because: + + - Cloud Tasks backs off on all errors. Normally the backoff + specified in [rate + limits][google.cloud.tasks.v2beta3.Queue.rate_limits] will be + used. But if the worker returns ``429`` (Too Many Requests), + ``503`` (Service Unavailable), or the rate of errors is high, + Cloud Tasks will use a higher backoff rate. The retry specified + in the ``Retry-After`` HTTP response header is considered. + + - To prevent traffic spikes and to smooth sudden increases in + traffic, dispatches ramp up slowly when the queue is newly + created or idle and if large numbers of tasks suddenly become + available to dispatch (due to spikes in create task rates, the + queue being unpaused, or many tasks that are scheduled at the + same time). + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + url (str): + Required. The full url path that the request will be sent + to. + + This string must begin with either "http://" or "https://". + Some examples are: ``http://acme.com`` and + ``https://acme.com/sales:8080``. Cloud Tasks will encode + some characters for safety and compatibility. The maximum + allowed URL length is 2083 characters after encoding. + + The ``Location`` header response from a redirect response + [``300`` - ``399``] may be followed. The redirect is not + counted as a separate attempt. + http_method (google.cloud.tasks_v2beta3.types.HttpMethod): + The HTTP method to use for the request. The + default is POST. + headers (MutableMapping[str, str]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + + These headers represent a subset of the headers that will + accompany the task's HTTP request. Some HTTP request headers + will be ignored or replaced. + + A partial list of headers that will be ignored or replaced + is: + + - Any header that is prefixed with "X-CloudTasks-" will be + treated as service header. Service headers define + properties of the task and are predefined in CloudTask. + - Host: This will be computed by Cloud Tasks and derived + from + [HttpRequest.url][google.cloud.tasks.v2beta3.HttpRequest.url]. + - Content-Length: This will be computed by Cloud Tasks. + - User-Agent: This will be set to ``"Google-Cloud-Tasks"``. + - ``X-Google-*``: Google use only. + - ``X-AppEngine-*``: Google use only. + + ``Content-Type`` won't be set by Cloud Tasks. You can + explicitly set ``Content-Type`` to a media type when the + [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/octet-stream"`` or ``"application/json"``. + + Headers which can have multiple values (according to + RFC2616) can be specified using comma-separated values. + + The size of the headers must be less than 80KB. + body (bytes): + HTTP request body. + + A request body is allowed only if the [HTTP + method][google.cloud.tasks.v2beta3.HttpRequest.http_method] + is POST, PUT, or PATCH. It is an error to set body on a task + with an incompatible + [HttpMethod][google.cloud.tasks.v2beta3.HttpMethod]. + oauth_token (google.cloud.tasks_v2beta3.types.OAuthToken): + If specified, an `OAuth + token `__ + will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization should generally only be used + when calling Google APIs hosted on \*.googleapis.com. + + This field is a member of `oneof`_ ``authorization_header``. + oidc_token (google.cloud.tasks_v2beta3.types.OidcToken): + If specified, an + `OIDC `__ + token will be generated and attached as an ``Authorization`` + header in the HTTP request. + + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend + to validate the token yourself. + + This field is a member of `oneof`_ ``authorization_header``. + """ + + url: str = proto.Field( + proto.STRING, + number=1, + ) + http_method: "HttpMethod" = proto.Field( + proto.ENUM, + number=2, + enum="HttpMethod", + ) + headers: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + body: bytes = proto.Field( + proto.BYTES, + number=4, + ) + oauth_token: "OAuthToken" = proto.Field( + proto.MESSAGE, + number=5, + oneof="authorization_header", + message="OAuthToken", + ) + oidc_token: "OidcToken" = proto.Field( + proto.MESSAGE, + number=6, + oneof="authorization_header", + message="OidcToken", + ) + + +class AppEngineHttpQueue(proto.Message): + r"""App Engine HTTP queue. + + The task will be delivered to the App Engine application hostname + specified by its + [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] + and + [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest]. + The documentation for + [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] + explains how the task's host URL is constructed. + + Using + [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + Attributes: + app_engine_routing_override (google.cloud.tasks_v2beta3.types.AppEngineRouting): + Overrides for the [task-level + app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + + If set, ``app_engine_routing_override`` is used for all + tasks in the queue, no matter what the setting is for the + [task-level + app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + """ + + app_engine_routing_override: "AppEngineRouting" = proto.Field( + proto.MESSAGE, + number=1, + message="AppEngineRouting", + ) + + +class AppEngineHttpRequest(proto.Message): + r"""App Engine HTTP request. + + The message defines the HTTP request that is sent to an App Engine + app when the task is dispatched. + + Using + [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + The task will be delivered to the App Engine app which belongs to + the same project as the queue. For more information, see `How + Requests are + Routed `__ + and how routing is affected by `dispatch + files `__. + Traffic is encrypted during transport and never leaves Google + datacenters. Because this traffic is carried over a communication + mechanism internal to Google, you cannot explicitly set the protocol + (for example, HTTP or HTTPS). The request to the handler, however, + will appear to have used the HTTP protocol. + + The [AppEngineRouting][google.cloud.tasks.v2beta3.AppEngineRouting] + used to construct the URL that the task is delivered to can be set + at the queue-level or task-level: + + - If set, + [app_engine_routing_override][google.cloud.tasks.v2beta3.AppEngineHttpQueue.app_engine_routing_override] + is used for all tasks in the queue, no matter what the setting is + for the [task-level + app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + + The ``url`` that the task will be sent to is: + + - ``url =`` + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] ``+`` + [relative_uri][google.cloud.tasks.v2beta3.AppEngineHttpRequest.relative_uri] + + Tasks can be dispatched to secure app handlers, unsecure app + handlers, and URIs restricted with + ```login: admin`` `__. + Because tasks are not run as any user, they cannot be dispatched to + URIs restricted with + ```login: required`` `__ + Task dispatches also do not follow redirects. + + The task attempt has succeeded if the app's request handler returns + an HTTP response code in the range [``200`` - ``299``]. The task + attempt has failed if the app's handler returns a non-2xx response + code or Cloud Tasks does not receive response before the + [deadline][google.cloud.tasks.v2beta3.Task.dispatch_deadline]. + Failed tasks will be retried according to the [retry + configuration][google.cloud.tasks.v2beta3.Queue.retry_config]. + ``503`` (Service Unavailable) is considered an App Engine system + error instead of an application error and will cause Cloud Tasks' + traffic congestion control to temporarily throttle the queue's + dispatches. Unlike other types of task targets, a ``429`` (Too Many + Requests) response from an app handler does not cause traffic + congestion control to throttle the queue. + + Attributes: + http_method (google.cloud.tasks_v2beta3.types.HttpMethod): + The HTTP method to use for the request. The default is POST. + + The app's request handler for the task's target URL must be + able to handle HTTP requests with this http_method, + otherwise the task attempt fails with error code 405 (Method + Not Allowed). See `Writing a push task request + handler `__ + and the App Engine documentation for your runtime on `How + Requests are + Handled `__. + app_engine_routing (google.cloud.tasks_v2beta3.types.AppEngineRouting): + Task-level setting for App Engine routing. + + If set, + [app_engine_routing_override][google.cloud.tasks.v2beta3.AppEngineHttpQueue.app_engine_routing_override] + is used for all tasks in the queue, no matter what the + setting is for the [task-level + app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + relative_uri (str): + The relative URI. + + The relative URI must begin with "/" and must be + a valid HTTP relative URI. It can contain a path + and query string arguments. If the relative URI + is empty, then the root path "/" will be used. + No spaces are allowed, and the maximum length + allowed is 2083 characters. + headers (MutableMapping[str, str]): + HTTP request headers. + + This map contains the header field names and values. Headers + can be set when the [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + Repeated headers are not supported but a header value can + contain commas. + + Cloud Tasks sets some headers to default values: + + - ``User-Agent``: By default, this header is + ``"AppEngine-Google; (+http://code.google.com/appengine)"``. + This header can be modified, but Cloud Tasks will append + ``"AppEngine-Google; (+http://code.google.com/appengine)"`` + to the modified ``User-Agent``. + + If the task has a + [body][google.cloud.tasks.v2beta3.AppEngineHttpRequest.body], + Cloud Tasks sets the following headers: + + - ``Content-Type``: By default, the ``Content-Type`` header + is set to ``"application/octet-stream"``. The default can + be overridden by explicitly setting ``Content-Type`` to a + particular media type when the [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/json"``. + - ``Content-Length``: This is computed by Cloud Tasks. This + value is output only. It cannot be changed. + + The headers below cannot be set or overridden: + + - ``Host`` + - ``X-Google-*`` + - ``X-AppEngine-*`` + + In addition, Cloud Tasks sets some headers when the task is + dispatched, such as headers containing information about the + task; see `request + headers `__. + These headers are set only when the task is dispatched, so + they are not visible when the task is returned in a Cloud + Tasks response. + + Although there is no specific limit for the maximum number + of headers or the size, there is a limit on the maximum size + of the [Task][google.cloud.tasks.v2beta3.Task]. For more + information, see the + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] + documentation. + body (bytes): + HTTP request body. + + A request body is allowed only if the HTTP method is POST or + PUT. It is an error to set a body on a task with an + incompatible + [HttpMethod][google.cloud.tasks.v2beta3.HttpMethod]. + """ + + http_method: "HttpMethod" = proto.Field( + proto.ENUM, + number=1, + enum="HttpMethod", + ) + app_engine_routing: "AppEngineRouting" = proto.Field( + proto.MESSAGE, + number=2, + message="AppEngineRouting", + ) + relative_uri: str = proto.Field( + proto.STRING, + number=3, + ) + headers: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + body: bytes = proto.Field( + proto.BYTES, + number=5, + ) + + +class AppEngineRouting(proto.Message): + r"""App Engine Routing. + + Defines routing characteristics specific to App Engine - service, + version, and instance. + + For more information about services, versions, and instances see `An + Overview of App + Engine `__, + `Microservices Architecture on Google App + Engine `__, + `App Engine Standard request + routing `__, + and `App Engine Flex request + routing `__. + + Attributes: + service (str): + App service. + + By default, the task is sent to the service which is the + default service when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is + not parsable into + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is + not parsable, then + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance] + are the empty string. + version (str): + App version. + + By default, the task is sent to the version which is the + default version when the task is attempted. + + For some queues or tasks which were created using the App + Engine Task Queue API, + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is + not parsable into + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. + For example, some tasks which were created using the App + Engine SDK use a custom domain name; custom domains are not + parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is + not parsable, then + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance] + are the empty string. + instance (str): + App instance. + + By default, the task is sent to an instance which is + available when the task is attempted. + + Requests can only be sent to a specific instance if `manual + scaling is used in App Engine + Standard `__. + App Engine Flex does not support instances. For more + information, see `App Engine Standard request + routing `__ + and `App Engine Flex request + routing `__. + host (str): + Output only. The host that the task is sent to. + + The host is constructed from the domain name of the app + associated with the queue's project ID (for example + .appspot.com), and the + [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + and + [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. + Tasks which were created using the App Engine SDK might have + a custom domain name. + + For more information, see `How Requests are + Routed `__. + """ + + service: str = proto.Field( + proto.STRING, + number=1, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + instance: str = proto.Field( + proto.STRING, + number=3, + ) + host: str = proto.Field( + proto.STRING, + number=4, + ) + + +class OAuthToken(proto.Message): + r"""Contains information needed for generating an `OAuth + token `__. + This type of authorization should generally only be used when + calling Google APIs hosted on \*.googleapis.com. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OAuth token. The service account + must be within the same project as the queue. The caller + must have iam.serviceAccounts.actAs permission for the + service account. + scope (str): + OAuth scope to be used for generating OAuth + access token. If not specified, + "https://www.googleapis.com/auth/cloud-platform" + will be used. + """ + + service_account_email: str = proto.Field( + proto.STRING, + number=1, + ) + scope: str = proto.Field( + proto.STRING, + number=2, + ) + + +class OidcToken(proto.Message): + r"""Contains information needed for generating an `OpenID Connect + token `__. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the + token yourself. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating OIDC token. The service account + must be within the same project as the queue. The caller + must have iam.serviceAccounts.actAs permission for the + service account. + audience (str): + Audience to be used when generating OIDC + token. If not specified, the URI specified in + target will be used. + """ + + service_account_email: str = proto.Field( + proto.STRING, + number=1, + ) + audience: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/task.py b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/task.py new file mode 100644 index 000000000000..bbaae9e99a60 --- /dev/null +++ b/packages/google-cloud-tasks/google/cloud/tasks_v2beta3/types/task.py @@ -0,0 +1,320 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.tasks_v2beta3.types import target + +__protobuf__ = proto.module( + package="google.cloud.tasks.v2beta3", + manifest={ + "Task", + "Attempt", + }, +) + + +class Task(proto.Message): + r"""A unit of scheduled work. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Optionally caller-specified in + [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + + The task name. + + The task name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For + more information, see `Identifying + projects `__ + - ``LOCATION_ID`` is the canonical ID for the task's + location. The list of available locations can be obtained + by calling + [ListLocations][google.cloud.location.Locations.ListLocations]. + For more information, see + https://cloud.google.com/about/locations/. + - ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), or hyphens (-). The maximum length is 100 + characters. + - ``TASK_ID`` can contain only letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), or underscores (_). The maximum + length is 500 characters. + app_engine_http_request (google.cloud.tasks_v2beta3.types.AppEngineHttpRequest): + HTTP request that is sent to the App Engine app handler. + + An App Engine task is a task that has + [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] + set. + + This field is a member of `oneof`_ ``payload_type``. + http_request (google.cloud.tasks_v2beta3.types.HttpRequest): + HTTP request that is sent to the task's target. + + An HTTP task is a task that has + [HttpRequest][google.cloud.tasks.v2beta3.HttpRequest] set. + + This field is a member of `oneof`_ ``payload_type``. + pull_message (google.cloud.tasks_v2beta3.types.PullMessage): + Pull Message contained in a task in a + [PULL][google.cloud.tasks.v2beta3.Queue.type] queue type. + This payload type cannot be explicitly set through Cloud + Tasks API. Its purpose, currently is to provide backward + compatibility with App Engine Task Queue + `pull `__ + queues to provide a way to inspect contents of pull tasks + through the + [CloudTasks.GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. + + This field is a member of `oneof`_ ``payload_type``. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the task is scheduled to be attempted. + + For App Engine queues, this is when the task will be + attempted or retried. + + ``schedule_time`` will be truncated to the nearest + microsecond. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that the task was created. + + ``create_time`` will be truncated to the nearest second. + dispatch_deadline (google.protobuf.duration_pb2.Duration): + The deadline for requests sent to the worker. If the worker + does not respond by this deadline then the request is + cancelled and the attempt is marked as a + ``DEADLINE_EXCEEDED`` failure. Cloud Tasks will retry the + task according to the + [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + + Note that when the request is cancelled, Cloud Tasks will + stop listening for the response, but whether the worker + stops processing depends on the worker. For example, if the + worker is stuck, it may not react to cancelled requests. + + The default and maximum values depend on the type of + request: + + - For [HTTP tasks][google.cloud.tasks.v2beta3.HttpRequest], + the default is 10 minutes. The deadline must be in the + interval [15 seconds, 30 minutes]. + + - For [App Engine + tasks][google.cloud.tasks.v2beta3.AppEngineHttpRequest], + 0 indicates that the request has the default deadline. + The default deadline depends on the `scaling + type `__ + of the service: 10 minutes for standard apps with + automatic scaling, 24 hours for standard apps with manual + and basic scaling, and 60 minutes for flex apps. If the + request deadline is set, it must be in the interval [15 + seconds, 24 hours 15 seconds]. Regardless of the task's + ``dispatch_deadline``, the app handler will not run for + longer than than the service's timeout. We recommend + setting the ``dispatch_deadline`` to at most a few + seconds more than the app handler's timeout. For more + information see + `Timeouts `__. + + ``dispatch_deadline`` will be truncated to the nearest + millisecond. The deadline is an approximate deadline. + dispatch_count (int): + Output only. The number of attempts + dispatched. + This count includes attempts which have been + dispatched but haven't received a response. + response_count (int): + Output only. The number of attempts which + have received a response. + first_attempt (google.cloud.tasks_v2beta3.types.Attempt): + Output only. The status of the task's first attempt. + + Only + [dispatch_time][google.cloud.tasks.v2beta3.Attempt.dispatch_time] + will be set. The other + [Attempt][google.cloud.tasks.v2beta3.Attempt] information is + not retained by Cloud Tasks. + last_attempt (google.cloud.tasks_v2beta3.types.Attempt): + Output only. The status of the task's last + attempt. + view (google.cloud.tasks_v2beta3.types.Task.View): + Output only. The view specifies which subset of the + [Task][google.cloud.tasks.v2beta3.Task] has been returned. + """ + + class View(proto.Enum): + r"""The view specifies a subset of + [Task][google.cloud.tasks.v2beta3.Task] data. + + When a task is returned in a response, not all information is + retrieved by default because some data, such as payloads, might be + desirable to return only when needed because of its large size or + because of the sensitivity of data that it contains. + + Values: + VIEW_UNSPECIFIED (0): + Unspecified. Defaults to BASIC. + BASIC (1): + The basic view omits fields which can be large or can + contain sensitive data. + + This view does not include the [body in + AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest.body]. + Bodies are desirable to return only when needed, because + they can be large and because of the sensitivity of the data + that you choose to store in it. + FULL (2): + All information is returned. + + Authorization for + [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google + IAM `__ permission on the + [Queue][google.cloud.tasks.v2beta3.Queue] resource. + """ + VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + app_engine_http_request: target.AppEngineHttpRequest = proto.Field( + proto.MESSAGE, + number=3, + oneof="payload_type", + message=target.AppEngineHttpRequest, + ) + http_request: target.HttpRequest = proto.Field( + proto.MESSAGE, + number=11, + oneof="payload_type", + message=target.HttpRequest, + ) + pull_message: target.PullMessage = proto.Field( + proto.MESSAGE, + number=13, + oneof="payload_type", + message=target.PullMessage, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + dispatch_deadline: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=12, + message=duration_pb2.Duration, + ) + dispatch_count: int = proto.Field( + proto.INT32, + number=6, + ) + response_count: int = proto.Field( + proto.INT32, + number=7, + ) + first_attempt: "Attempt" = proto.Field( + proto.MESSAGE, + number=8, + message="Attempt", + ) + last_attempt: "Attempt" = proto.Field( + proto.MESSAGE, + number=9, + message="Attempt", + ) + view: View = proto.Field( + proto.ENUM, + number=10, + enum=View, + ) + + +class Attempt(proto.Message): + r"""The status of a task attempt. + + Attributes: + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt was scheduled. + + ``schedule_time`` will be truncated to the nearest + microsecond. + dispatch_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt was dispatched. + + ``dispatch_time`` will be truncated to the nearest + microsecond. + response_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time that this attempt response was + received. + + ``response_time`` will be truncated to the nearest + microsecond. + response_status (google.rpc.status_pb2.Status): + Output only. The response from the worker for this attempt. + + If ``response_time`` is unset, then the task has not been + attempted or is currently running and the + ``response_status`` field is meaningless. + """ + + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + dispatch_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + response_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + response_status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-tasks/mypy.ini b/packages/google-cloud-tasks/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-tasks/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-tasks/noxfile.py b/packages/google-cloud-tasks/noxfile.py new file mode 100644 index 000000000000..be54712bfa8f --- /dev/null +++ b/packages/google-cloud-tasks/noxfile.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-tasks/renovate.json b/packages/google-cloud-tasks/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-tasks/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-tasks/samples/snippets/README.md b/packages/google-cloud-tasks/samples/snippets/README.md new file mode 100644 index 000000000000..8d4b4622a61f --- /dev/null +++ b/packages/google-cloud-tasks/samples/snippets/README.md @@ -0,0 +1,4 @@ +Samples migrated +================ + +New location: https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/cloud_tasks/snippets \ No newline at end of file diff --git a/packages/google-cloud-tasks/scripts/decrypt-secrets.sh b/packages/google-cloud-tasks/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-tasks/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-tasks/scripts/fixup_tasks_v2_keywords.py b/packages/google-cloud-tasks/scripts/fixup_tasks_v2_keywords.py new file mode 100644 index 000000000000..2e4991a948fc --- /dev/null +++ b/packages/google-cloud-tasks/scripts/fixup_tasks_v2_keywords.py @@ -0,0 +1,191 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class tasksCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_queue': ('parent', 'queue', ), + 'create_task': ('parent', 'task', 'response_view', ), + 'delete_queue': ('name', ), + 'delete_task': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_queue': ('name', ), + 'get_task': ('name', 'response_view', ), + 'list_queues': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_tasks': ('parent', 'response_view', 'page_size', 'page_token', ), + 'pause_queue': ('name', ), + 'purge_queue': ('name', ), + 'resume_queue': ('name', ), + 'run_task': ('name', 'response_view', ), + 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_queue': ('queue', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=tasksCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the tasks client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-tasks/scripts/fixup_tasks_v2beta2_keywords.py b/packages/google-cloud-tasks/scripts/fixup_tasks_v2beta2_keywords.py new file mode 100644 index 000000000000..a585e51b63f7 --- /dev/null +++ b/packages/google-cloud-tasks/scripts/fixup_tasks_v2beta2_keywords.py @@ -0,0 +1,197 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class tasksCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'acknowledge_task': ('name', 'schedule_time', ), + 'buffer_task': ('queue', 'task_id', 'body', ), + 'cancel_lease': ('name', 'schedule_time', 'response_view', ), + 'create_queue': ('parent', 'queue', ), + 'create_task': ('parent', 'task', 'response_view', ), + 'delete_queue': ('name', ), + 'delete_task': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_queue': ('name', 'read_mask', ), + 'get_task': ('name', 'response_view', ), + 'lease_tasks': ('parent', 'lease_duration', 'max_tasks', 'response_view', 'filter', ), + 'list_queues': ('parent', 'filter', 'page_size', 'page_token', 'read_mask', ), + 'list_tasks': ('parent', 'response_view', 'page_size', 'page_token', ), + 'pause_queue': ('name', ), + 'purge_queue': ('name', ), + 'renew_lease': ('name', 'schedule_time', 'lease_duration', 'response_view', ), + 'resume_queue': ('name', ), + 'run_task': ('name', 'response_view', ), + 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_queue': ('queue', 'update_mask', ), + 'upload_queue_yaml': ('app_id', 'http_body', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=tasksCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the tasks client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-tasks/scripts/fixup_tasks_v2beta3_keywords.py b/packages/google-cloud-tasks/scripts/fixup_tasks_v2beta3_keywords.py new file mode 100644 index 000000000000..d08dd32eaad6 --- /dev/null +++ b/packages/google-cloud-tasks/scripts/fixup_tasks_v2beta3_keywords.py @@ -0,0 +1,192 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class tasksCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'buffer_task': ('queue', 'task_id', 'body', ), + 'create_queue': ('parent', 'queue', ), + 'create_task': ('parent', 'task', 'response_view', ), + 'delete_queue': ('name', ), + 'delete_task': ('name', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_queue': ('name', 'read_mask', ), + 'get_task': ('name', 'response_view', ), + 'list_queues': ('parent', 'filter', 'page_size', 'page_token', 'read_mask', ), + 'list_tasks': ('parent', 'response_view', 'page_size', 'page_token', ), + 'pause_queue': ('name', ), + 'purge_queue': ('name', ), + 'resume_queue': ('name', ), + 'run_task': ('name', 'response_view', ), + 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_queue': ('queue', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=tasksCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the tasks client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-tasks/scripts/readme-gen/readme_gen.py b/packages/google-cloud-tasks/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..1acc119835b5 --- /dev/null +++ b/packages/google-cloud-tasks/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-tasks/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-tasks/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-tasks/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-tasks/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-tasks/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-tasks/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-tasks/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-tasks/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-tasks/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-tasks/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-tasks/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-tasks/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-tasks/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-tasks/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-tasks/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-tasks/setup.cfg b/packages/google-cloud-tasks/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-tasks/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-tasks/setup.py b/packages/google-cloud-tasks/setup.py new file mode 100644 index 000000000000..0b51b471c127 --- /dev/null +++ b/packages/google-cloud-tasks/setup.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-tasks" + + +description = "Google Cloud Tasks API client library" + +version = {} +with open(os.path.join(package_root, "google/cloud/tasks/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-tasks/tasks-v2-py.tar.gz b/packages/google-cloud-tasks/tasks-v2-py.tar.gz new file mode 100644 index 000000000000..aef0fb0139d6 Binary files /dev/null and b/packages/google-cloud-tasks/tasks-v2-py.tar.gz differ diff --git a/packages/google-cloud-tasks/testing/.gitignore b/packages/google-cloud-tasks/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-tasks/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-tasks/testing/constraints-3.10.txt b/packages/google-cloud-tasks/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-tasks/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-tasks/testing/constraints-3.11.txt b/packages/google-cloud-tasks/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-tasks/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-tasks/testing/constraints-3.12.txt b/packages/google-cloud-tasks/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-tasks/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-tasks/testing/constraints-3.7.txt b/packages/google-cloud-tasks/testing/constraints-3.7.txt new file mode 100644 index 000000000000..2beecf99e0be --- /dev/null +++ b/packages/google-cloud-tasks/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/packages/google-cloud-tasks/testing/constraints-3.8.txt b/packages/google-cloud-tasks/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-tasks/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-tasks/testing/constraints-3.9.txt b/packages/google-cloud-tasks/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-tasks/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-tasks/tests/__init__.py b/packages/google-cloud-tasks/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-tasks/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-tasks/tests/system/__init__.py b/packages/google-cloud-tasks/tests/system/__init__.py new file mode 100644 index 000000000000..6cfb48f4cdf8 --- /dev/null +++ b/packages/google-cloud-tasks/tests/system/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-tasks/tests/system/smoke_test.py b/packages/google-cloud-tasks/tests/system/smoke_test.py new file mode 100644 index 000000000000..1b16dae443b7 --- /dev/null +++ b/packages/google-cloud-tasks/tests/system/smoke_test.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + +from google.cloud import tasks_v2 + + +@pytest.fixture(scope="session") +def project_id(): + return os.environ["PROJECT_ID"] + + +@pytest.mark.parametrize("transport", ["grpc", "rest"]) +def test_list_queues(project_id: str, transport: str): + client = tasks_v2.CloudTasksClient(transport=transport) + + parent = client.common_location_path(project_id, location="us-central1") + client.list_queues(parent=parent) + + # The purpose of this smoke test is to test the communication with the API server, + # rather than API-specific functionality. + # If the smoke test fails, we won't reach this line. + assert True diff --git a/packages/google-cloud-tasks/tests/unit/__init__.py b/packages/google-cloud-tasks/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-tasks/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-tasks/tests/unit/gapic/__init__.py b/packages/google-cloud-tasks/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-tasks/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2/__init__.py b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2/test_cloud_tasks.py b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2/test_cloud_tasks.py new file mode 100644 index 000000000000..1a7a193a9277 --- /dev/null +++ b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2/test_cloud_tasks.py @@ -0,0 +1,10669 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.tasks_v2.services.cloud_tasks import ( + CloudTasksAsyncClient, + CloudTasksClient, + pagers, + transports, +) +from google.cloud.tasks_v2.types import cloudtasks +from google.cloud.tasks_v2.types import queue +from google.cloud.tasks_v2.types import queue as gct_queue +from google.cloud.tasks_v2.types import target +from google.cloud.tasks_v2.types import task +from google.cloud.tasks_v2.types import task as gct_task + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudTasksClient._get_default_mtls_endpoint(None) is None + assert ( + CloudTasksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert CloudTasksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudTasksClient, "grpc"), + (CloudTasksAsyncClient, "grpc_asyncio"), + (CloudTasksClient, "rest"), + ], +) +def test_cloud_tasks_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudtasks.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudtasks.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CloudTasksGrpcTransport, "grpc"), + (transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudTasksRestTransport, "rest"), + ], +) +def test_cloud_tasks_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudTasksClient, "grpc"), + (CloudTasksAsyncClient, "grpc_asyncio"), + (CloudTasksClient, "rest"), + ], +) +def test_cloud_tasks_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudtasks.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudtasks.googleapis.com" + ) + + +def test_cloud_tasks_client_get_transport_class(): + transport = CloudTasksClient.get_transport_class() + available_transports = [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksRestTransport, + ] + assert transport in available_transports + + transport = CloudTasksClient.get_transport_class("grpc") + assert transport == transports.CloudTasksGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest"), + ], +) +@mock.patch.object( + CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient) +) +@mock.patch.object( + CloudTasksAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudTasksAsyncClient), +) +def test_cloud_tasks_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudTasksClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudTasksClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "true"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "false"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest", "true"), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient) +) +@mock.patch.object( + CloudTasksAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudTasksAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_tasks_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [CloudTasksClient, CloudTasksAsyncClient]) +@mock.patch.object( + CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient) +) +@mock.patch.object( + CloudTasksAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudTasksAsyncClient), +) +def test_cloud_tasks_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest"), + ], +) +def test_cloud_tasks_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", grpc_helpers), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest", None), + ], +) +def test_cloud_tasks_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_cloud_tasks_client_client_options_from_dict(): + with mock.patch( + "google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudTasksClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", grpc_helpers), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cloud_tasks_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudtasks.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="cloudtasks.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ListQueuesRequest, + dict, + ], +) +def test_list_queues(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_queues_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + client.list_queues() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + +@pytest.mark.asyncio +async def test_list_queues_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.ListQueuesRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_queues_async_from_dict(): + await test_list_queues_async(request_type=dict) + + +def test_list_queues_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + call.return_value = cloudtasks.ListQueuesResponse() + client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_queues_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse() + ) + await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_queues_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_queues( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_queues_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_queues( + cloudtasks.ListQueuesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_queues_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_queues( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_queues_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_queues( + cloudtasks.ListQueuesRequest(), + parent="parent_value", + ) + + +def test_list_queues_pager(transport_name: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_queues(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, queue.Queue) for i in results) + + +def test_list_queues_pages(transport_name: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + pages = list(client.list_queues(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_queues_async_pager(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_queues( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, queue.Queue) for i in responses) + + +@pytest.mark.asyncio +async def test_list_queues_async_pages(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_queues(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.GetQueueRequest, + dict, + ], +) +def test_get_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + response = client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_get_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + client.get_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + +@pytest.mark.asyncio +async def test_get_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.GetQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + ) + response = await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_get_queue_async_from_dict(): + await test_get_queue_async(request_type=dict) + + +def test_get_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + call.return_value = queue.Queue() + client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_queue( + cloudtasks.GetQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_queue( + cloudtasks.GetQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.CreateQueueRequest, + dict, + ], +) +def test_create_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + ) + response = client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_create_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + client.create_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + +@pytest.mark.asyncio +async def test_create_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.CreateQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + ) + ) + response = await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_create_queue_async_from_dict(): + await test_create_queue_async(request_type=dict) + + +def test_create_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + call.return_value = gct_queue.Queue() + client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_queue( + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].queue + mock_val = gct_queue.Queue(name="name_value") + assert arg == mock_val + + +def test_create_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_queue( + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].queue + mock_val = gct_queue.Queue(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.UpdateQueueRequest, + dict, + ], +) +def test_update_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + ) + response = client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_update_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + client.update_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + +@pytest.mark.asyncio +async def test_update_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.UpdateQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + ) + ) + response = await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_update_queue_async_from_dict(): + await test_update_queue_async(request_type=dict) + + +def test_update_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + + request.queue.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + call.return_value = gct_queue.Queue() + client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "queue.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + + request.queue.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "queue.name=name_value", + ) in kw["metadata"] + + +def test_update_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_queue( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].queue + mock_val = gct_queue.Queue(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_queue( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].queue + mock_val = gct_queue.Queue(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.DeleteQueueRequest, + dict, + ], +) +def test_delete_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + client.delete_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + +@pytest.mark.asyncio +async def test_delete_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.DeleteQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_queue_async_from_dict(): + await test_delete_queue_async(request_type=dict) + + +def test_delete_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + call.return_value = None + client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.PurgeQueueRequest, + dict, + ], +) +def test_purge_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + response = client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_purge_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + client.purge_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + +@pytest.mark.asyncio +async def test_purge_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.PurgeQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + ) + response = await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_purge_queue_async_from_dict(): + await test_purge_queue_async(request_type=dict) + + +def test_purge_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + call.return_value = queue.Queue() + client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_purge_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.purge_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_purge_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.purge_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.PauseQueueRequest, + dict, + ], +) +def test_pause_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + response = client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_pause_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + client.pause_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + +@pytest.mark.asyncio +async def test_pause_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.PauseQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + ) + response = await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_pause_queue_async_from_dict(): + await test_pause_queue_async(request_type=dict) + + +def test_pause_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + call.return_value = queue.Queue() + client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_pause_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_pause_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pause_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_pause_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_queue( + cloudtasks.PauseQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pause_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pause_queue( + cloudtasks.PauseQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ResumeQueueRequest, + dict, + ], +) +def test_resume_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + response = client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_resume_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + client.resume_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + +@pytest.mark.asyncio +async def test_resume_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.ResumeQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + ) + response = await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_resume_queue_async_from_dict(): + await test_resume_queue_async(request_type=dict) + + +def test_resume_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + call.return_value = queue.Queue() + client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_resume_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_resume_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resume_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_resume_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resume_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +def test_get_iam_policy_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +def test_set_iam_policy_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async( + transport: str = "grpc_asyncio", + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource="resource_value", + permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + arg = args[0].permissions + mock_val = ["permissions_value"] + assert arg == mock_val + + +def test_test_iam_permissions_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource="resource_value", + permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + arg = args[0].permissions + mock_val = ["permissions_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ListTasksRequest, + dict, + ], +) +def test_list_tasks(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tasks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + client.list_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + +@pytest.mark.asyncio +async def test_list_tasks_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.ListTasksRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_tasks_async_from_dict(): + await test_list_tasks_async(request_type=dict) + + +def test_list_tasks_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + call.return_value = cloudtasks.ListTasksResponse() + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_tasks_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse() + ) + await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_tasks_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_tasks_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + cloudtasks.ListTasksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tasks( + cloudtasks.ListTasksRequest(), + parent="parent_value", + ) + + +def test_list_tasks_pager(transport_name: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_tasks(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, task.Task) for i in results) + + +def test_list_tasks_pages(transport_name: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tasks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tasks_async_pager(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tasks( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, task.Task) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tasks_async_pages(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_tasks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.GetTaskRequest, + dict, + ], +) +def test_get_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + response = client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +def test_get_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + client.get_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + +@pytest.mark.asyncio +async def test_get_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.GetTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + ) + response = await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_get_task_async_from_dict(): + await test_get_task_async(request_type=dict) + + +def test_get_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value = task.Task() + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + cloudtasks.GetTaskRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_task( + cloudtasks.GetTaskRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.CreateTaskRequest, + dict, + ], +) +def test_create_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=gct_task.Task.View.BASIC, + ) + response = client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + client.create_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + +@pytest.mark.asyncio +async def test_create_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.CreateTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=gct_task.Task.View.BASIC, + ) + ) + response = await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == gct_task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_create_task_async_from_dict(): + await test_create_task_async(request_type=dict) + + +def test_create_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + call.return_value = gct_task.Task() + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_task( + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].task + mock_val = gct_task.Task(name="name_value") + assert arg == mock_val + + +def test_create_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_task( + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].task + mock_val = gct_task.Task(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.DeleteTaskRequest, + dict, + ], +) +def test_delete_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + client.delete_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + +@pytest.mark.asyncio +async def test_delete_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.DeleteTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_task_async_from_dict(): + await test_delete_task_async(request_type=dict) + + +def test_delete_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + call.return_value = None + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + cloudtasks.DeleteTaskRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_task( + cloudtasks.DeleteTaskRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.RunTaskRequest, + dict, + ], +) +def test_run_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + response = client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +def test_run_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + client.run_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + +@pytest.mark.asyncio +async def test_run_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.RunTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + ) + response = await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_run_task_async_from_dict(): + await test_run_task_async(request_type=dict) + + +def test_run_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + call.return_value = task.Task() + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_run_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_run_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_run_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + cloudtasks.RunTaskRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_run_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_run_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_task( + cloudtasks.RunTaskRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ListQueuesRequest, + dict, + ], +) +def test_list_queues_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListQueuesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.ListQueuesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_queues(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_queues_rest_required_fields(request_type=cloudtasks.ListQueuesRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_queues._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_queues._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListQueuesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudtasks.ListQueuesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_queues(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_queues_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_queues._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_queues_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_list_queues" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_list_queues" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.ListQueuesRequest.pb(cloudtasks.ListQueuesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudtasks.ListQueuesResponse.to_json( + cloudtasks.ListQueuesResponse() + ) + + request = cloudtasks.ListQueuesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudtasks.ListQueuesResponse() + + client.list_queues( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_queues_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.ListQueuesRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_queues(request) + + +def test_list_queues_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListQueuesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.ListQueuesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_queues(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/locations/*}/queues" % client.transport._host, + args[1], + ) + + +def test_list_queues_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_queues( + cloudtasks.ListQueuesRequest(), + parent="parent_value", + ) + + +def test_list_queues_rest_pager(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudtasks.ListQueuesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_queues(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, queue.Queue) for i in results) + + pages = list(client.list_queues(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.GetQueueRequest, + dict, + ], +) +def test_get_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_get_queue_rest_required_fields(request_type=cloudtasks.GetQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_get_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_get_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.GetQueueRequest.pb(cloudtasks.GetQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = queue.Queue.to_json(queue.Queue()) + + request = cloudtasks.GetQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = queue.Queue() + + client.get_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.GetQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_queue(request) + + +def test_get_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/queues/*}" % client.transport._host, + args[1], + ) + + +def test_get_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_queue( + cloudtasks.GetQueueRequest(), + name="name_value", + ) + + +def test_get_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.CreateQueueRequest, + dict, + ], +) +def test_create_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["queue"] = { + "name": "name_value", + "app_engine_routing_override": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + }, + "rate_limits": { + "max_dispatches_per_second": 0.26380000000000003, + "max_burst_size": 1519, + "max_concurrent_dispatches": 2671, + }, + "retry_config": { + "max_attempts": 1303, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff": {}, + "max_backoff": {}, + "max_doublings": 1388, + }, + "state": 1, + "purge_time": {"seconds": 751, "nanos": 543}, + "stackdriver_logging_config": {"sampling_ratio": 0.1497}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_create_queue_rest_required_fields(request_type=cloudtasks.CreateQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_queue._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "queue", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_create_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_create_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.CreateQueueRequest.pb(cloudtasks.CreateQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gct_queue.Queue.to_json(gct_queue.Queue()) + + request = cloudtasks.CreateQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gct_queue.Queue() + + client.create_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.CreateQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["queue"] = { + "name": "name_value", + "app_engine_routing_override": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + }, + "rate_limits": { + "max_dispatches_per_second": 0.26380000000000003, + "max_burst_size": 1519, + "max_concurrent_dispatches": 2671, + }, + "retry_config": { + "max_attempts": 1303, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff": {}, + "max_backoff": {}, + "max_doublings": 1388, + }, + "state": 1, + "purge_time": {"seconds": 751, "nanos": 543}, + "stackdriver_logging_config": {"sampling_ratio": 0.1497}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_queue(request) + + +def test_create_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/locations/*}/queues" % client.transport._host, + args[1], + ) + + +def test_create_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +def test_create_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.UpdateQueueRequest, + dict, + ], +) +def test_update_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "queue": {"name": "projects/sample1/locations/sample2/queues/sample3"} + } + request_init["queue"] = { + "name": "projects/sample1/locations/sample2/queues/sample3", + "app_engine_routing_override": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + }, + "rate_limits": { + "max_dispatches_per_second": 0.26380000000000003, + "max_burst_size": 1519, + "max_concurrent_dispatches": 2671, + }, + "retry_config": { + "max_attempts": 1303, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff": {}, + "max_backoff": {}, + "max_doublings": 1388, + }, + "state": 1, + "purge_time": {"seconds": 751, "nanos": 543}, + "stackdriver_logging_config": {"sampling_ratio": 0.1497}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_update_queue_rest_required_fields(request_type=cloudtasks.UpdateQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_queue._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("queue",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_update_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_update_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.UpdateQueueRequest.pb(cloudtasks.UpdateQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gct_queue.Queue.to_json(gct_queue.Queue()) + + request = cloudtasks.UpdateQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gct_queue.Queue() + + client.update_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.UpdateQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "queue": {"name": "projects/sample1/locations/sample2/queues/sample3"} + } + request_init["queue"] = { + "name": "projects/sample1/locations/sample2/queues/sample3", + "app_engine_routing_override": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + }, + "rate_limits": { + "max_dispatches_per_second": 0.26380000000000003, + "max_burst_size": 1519, + "max_concurrent_dispatches": 2671, + }, + "retry_config": { + "max_attempts": 1303, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff": {}, + "max_backoff": {}, + "max_doublings": 1388, + }, + "state": 1, + "purge_time": {"seconds": 751, "nanos": 543}, + "stackdriver_logging_config": {"sampling_ratio": 0.1497}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_queue(request) + + +def test_update_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = { + "queue": {"name": "projects/sample1/locations/sample2/queues/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{queue.name=projects/*/locations/*/queues/*}" + % client.transport._host, + args[1], + ) + + +def test_update_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.DeleteQueueRequest, + dict, + ], +) +def test_delete_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_queue(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_rest_required_fields(request_type=cloudtasks.DeleteQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_delete_queue" + ) as pre: + pre.assert_not_called() + pb_message = cloudtasks.DeleteQueueRequest.pb(cloudtasks.DeleteQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cloudtasks.DeleteQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.DeleteQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_queue(request) + + +def test_delete_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/queues/*}" % client.transport._host, + args[1], + ) + + +def test_delete_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name="name_value", + ) + + +def test_delete_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.PurgeQueueRequest, + dict, + ], +) +def test_purge_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.purge_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_purge_queue_rest_required_fields(request_type=cloudtasks.PurgeQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.purge_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_purge_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.purge_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_purge_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_purge_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_purge_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.PurgeQueueRequest.pb(cloudtasks.PurgeQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = queue.Queue.to_json(queue.Queue()) + + request = cloudtasks.PurgeQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = queue.Queue() + + client.purge_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_purge_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.PurgeQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.purge_queue(request) + + +def test_purge_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.purge_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/queues/*}:purge" + % client.transport._host, + args[1], + ) + + +def test_purge_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name="name_value", + ) + + +def test_purge_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.PauseQueueRequest, + dict, + ], +) +def test_pause_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.pause_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_pause_queue_rest_required_fields(request_type=cloudtasks.PauseQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pause_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pause_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.pause_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_pause_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.pause_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_pause_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_pause_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_pause_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.PauseQueueRequest.pb(cloudtasks.PauseQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = queue.Queue.to_json(queue.Queue()) + + request = cloudtasks.PauseQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = queue.Queue() + + client.pause_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_pause_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.PauseQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.pause_queue(request) + + +def test_pause_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.pause_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/queues/*}:pause" + % client.transport._host, + args[1], + ) + + +def test_pause_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_queue( + cloudtasks.PauseQueueRequest(), + name="name_value", + ) + + +def test_pause_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ResumeQueueRequest, + dict, + ], +) +def test_resume_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.resume_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_resume_queue_rest_required_fields(request_type=cloudtasks.ResumeQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resume_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resume_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.resume_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_resume_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.resume_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resume_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_resume_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_resume_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.ResumeQueueRequest.pb(cloudtasks.ResumeQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = queue.Queue.to_json(queue.Queue()) + + request = cloudtasks.ResumeQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = queue.Queue() + + client.resume_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resume_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.ResumeQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resume_queue(request) + + +def test_resume_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.resume_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/queues/*}:resume" + % client.transport._host, + args[1], + ) + + +def test_resume_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name="name_value", + ) + + +def test_resume_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_get_iam_policy" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_get_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.GetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(policy_pb2.Policy()) + + request = iam_policy_pb2.GetIamPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + + client.get_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/queues/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{resource=projects/*/locations/*/queues/*}:getIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_get_iam_policy_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "policy", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_set_iam_policy" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_set_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.SetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(policy_pb2.Policy()) + + request = iam_policy_pb2.SetIamPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + + client.set_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/queues/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{resource=projects/*/locations/*/queues/*}:setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_set_iam_policy_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_rest_required_fields( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["resource"] = "" + request_init["permissions"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + jsonified_request["permissions"] = "permissions_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + assert "permissions" in jsonified_request + assert jsonified_request["permissions"] == "permissions_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "permissions", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_test_iam_permissions" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_test_iam_permissions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.TestIamPermissionsRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + request = iam_policy_pb2.TestIamPermissionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/queues/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + permissions=["permissions_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{resource=projects/*/locations/*/queues/*}:testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +def test_test_iam_permissions_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ListTasksRequest, + dict, + ], +) +def test_list_tasks_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListTasksResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.ListTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_tasks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tasks_rest_required_fields(request_type=cloudtasks.ListTasksRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tasks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tasks._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "response_view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListTasksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudtasks.ListTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_tasks(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_tasks_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_tasks._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "responseView", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_tasks_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_list_tasks" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_list_tasks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.ListTasksRequest.pb(cloudtasks.ListTasksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudtasks.ListTasksResponse.to_json( + cloudtasks.ListTasksResponse() + ) + + request = cloudtasks.ListTasksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudtasks.ListTasksResponse() + + client.list_tasks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_tasks_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.ListTasksRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_tasks(request) + + +def test_list_tasks_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListTasksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.ListTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_tasks(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/locations/*/queues/*}/tasks" + % client.transport._host, + args[1], + ) + + +def test_list_tasks_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + cloudtasks.ListTasksRequest(), + parent="parent_value", + ) + + +def test_list_tasks_rest_pager(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudtasks.ListTasksResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + + pager = client.list_tasks(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, task.Task) for i in results) + + pages = list(client.list_tasks(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.GetTaskRequest, + dict, + ], +) +def test_get_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +def test_get_task_rest_required_fields(request_type=cloudtasks.GetTaskRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_task._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("response_view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = task.Task() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(("responseView",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_get_task" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_get_task" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.GetTaskRequest.pb(cloudtasks.GetTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = task.Task.to_json(task.Task()) + + request = cloudtasks.GetTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = task.Task() + + client.get_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.GetTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_task(request) + + +def test_get_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/queues/*/tasks/*}" + % client.transport._host, + args[1], + ) + + +def test_get_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + cloudtasks.GetTaskRequest(), + name="name_value", + ) + + +def test_get_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.CreateTaskRequest, + dict, + ], +) +def test_create_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=gct_task.Task.View.BASIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_rest_required_fields(request_type=cloudtasks.CreateTaskRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gct_task.Task() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gct_task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_task._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "task", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_create_task" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_create_task" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.CreateTaskRequest.pb(cloudtasks.CreateTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gct_task.Task.to_json(gct_task.Task()) + + request = cloudtasks.CreateTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gct_task.Task() + + client.create_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.CreateTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_task(request) + + +def test_create_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_task.Task() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{parent=projects/*/locations/*/queues/*}/tasks" + % client.transport._host, + args[1], + ) + + +def test_create_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +def test_create_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.DeleteTaskRequest, + dict, + ], +) +def test_delete_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_task(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_rest_required_fields(request_type=cloudtasks.DeleteTaskRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_delete_task" + ) as pre: + pre.assert_not_called() + pb_message = cloudtasks.DeleteTaskRequest.pb(cloudtasks.DeleteTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cloudtasks.DeleteTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.DeleteTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_task(request) + + +def test_delete_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/queues/*/tasks/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + cloudtasks.DeleteTaskRequest(), + name="name_value", + ) + + +def test_delete_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.RunTaskRequest, + dict, + ], +) +def test_run_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +def test_run_task_rest_required_fields(request_type=cloudtasks.RunTaskRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = task.Task() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.run_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_run_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.run_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_run_task" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_run_task" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.RunTaskRequest.pb(cloudtasks.RunTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = task.Task.to_json(task.Task()) + + request = cloudtasks.RunTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = task.Task() + + client.run_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.RunTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_task(request) + + +def test_run_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.run_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2/{name=projects/*/locations/*/queues/*/tasks/*}:run" + % client.transport._host, + args[1], + ) + + +def test_run_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + cloudtasks.RunTaskRequest(), + name="name_value", + ) + + +def test_run_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudTasksClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudTasksGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + transports.CloudTasksRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CloudTasksClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudTasksGrpcTransport, + ) + + +def test_cloud_tasks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudTasksTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_tasks_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudTasksTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_queues", + "get_queue", + "create_queue", + "update_queue", + "delete_queue", + "purge_queue", + "pause_queue", + "resume_queue", + "get_iam_policy", + "set_iam_policy", + "test_iam_permissions", + "list_tasks", + "get_task", + "create_task", + "delete_task", + "run_task", + "get_location", + "list_locations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cloud_tasks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_tasks_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.tasks_v2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport() + adc.assert_called_once() + + +def test_cloud_tasks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudTasksClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + ], +) +def test_cloud_tasks_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + transports.CloudTasksRestTransport, + ], +) +def test_cloud_tasks_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudTasksGrpcTransport, grpc_helpers), + (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "cloudtasks.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="cloudtasks.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport], +) +def test_cloud_tasks_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_cloud_tasks_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CloudTasksRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cloud_tasks_host_no_port(transport_name): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudtasks.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudtasks.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudtasks.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cloud_tasks_host_with_port(transport_name): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudtasks.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudtasks.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudtasks.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_cloud_tasks_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudTasksClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudTasksClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_queues._session + session2 = client2.transport.list_queues._session + assert session1 != session2 + session1 = client1.transport.get_queue._session + session2 = client2.transport.get_queue._session + assert session1 != session2 + session1 = client1.transport.create_queue._session + session2 = client2.transport.create_queue._session + assert session1 != session2 + session1 = client1.transport.update_queue._session + session2 = client2.transport.update_queue._session + assert session1 != session2 + session1 = client1.transport.delete_queue._session + session2 = client2.transport.delete_queue._session + assert session1 != session2 + session1 = client1.transport.purge_queue._session + session2 = client2.transport.purge_queue._session + assert session1 != session2 + session1 = client1.transport.pause_queue._session + session2 = client2.transport.pause_queue._session + assert session1 != session2 + session1 = client1.transport.resume_queue._session + session2 = client2.transport.resume_queue._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.list_tasks._session + session2 = client2.transport.list_tasks._session + assert session1 != session2 + session1 = client1.transport.get_task._session + session2 = client2.transport.get_task._session + assert session1 != session2 + session1 = client1.transport.create_task._session + session2 = client2.transport.create_task._session + assert session1 != session2 + session1 = client1.transport.delete_task._session + session2 = client2.transport.delete_task._session + assert session1 != session2 + session1 = client1.transport.run_task._session + session2 = client2.transport.run_task._session + assert session1 != session2 + + +def test_cloud_tasks_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_tasks_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport], +) +def test_cloud_tasks_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport], +) +def test_cloud_tasks_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_queue_path(): + project = "squid" + location = "clam" + queue = "whelk" + expected = "projects/{project}/locations/{location}/queues/{queue}".format( + project=project, + location=location, + queue=queue, + ) + actual = CloudTasksClient.queue_path(project, location, queue) + assert expected == actual + + +def test_parse_queue_path(): + expected = { + "project": "octopus", + "location": "oyster", + "queue": "nudibranch", + } + path = CloudTasksClient.queue_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_queue_path(path) + assert expected == actual + + +def test_task_path(): + project = "cuttlefish" + location = "mussel" + queue = "winkle" + task = "nautilus" + expected = ( + "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format( + project=project, + location=location, + queue=queue, + task=task, + ) + ) + actual = CloudTasksClient.task_path(project, location, queue, task) + assert expected == actual + + +def test_parse_task_path(): + expected = { + "project": "scallop", + "location": "abalone", + "queue": "squid", + "task": "clam", + } + path = CloudTasksClient.task_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_task_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CloudTasksClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = CloudTasksClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = CloudTasksClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = CloudTasksClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = CloudTasksClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = CloudTasksClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = CloudTasksClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = CloudTasksClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = CloudTasksClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = CloudTasksClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudTasksTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudTasksTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudTasksClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations(transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = CloudTasksClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CloudTasksAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/__init__.py b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py new file mode 100644 index 000000000000..c6dbddda9e23 --- /dev/null +++ b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta2/test_cloud_tasks.py @@ -0,0 +1,13502 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api import httpbody_pb2 # type: ignore +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.tasks_v2beta2.services.cloud_tasks import ( + CloudTasksAsyncClient, + CloudTasksClient, + pagers, + transports, +) +from google.cloud.tasks_v2beta2.types import cloudtasks +from google.cloud.tasks_v2beta2.types import queue +from google.cloud.tasks_v2beta2.types import queue as gct_queue +from google.cloud.tasks_v2beta2.types import target +from google.cloud.tasks_v2beta2.types import task +from google.cloud.tasks_v2beta2.types import task as gct_task + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudTasksClient._get_default_mtls_endpoint(None) is None + assert ( + CloudTasksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert CloudTasksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudTasksClient, "grpc"), + (CloudTasksAsyncClient, "grpc_asyncio"), + (CloudTasksClient, "rest"), + ], +) +def test_cloud_tasks_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudtasks.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudtasks.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CloudTasksGrpcTransport, "grpc"), + (transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudTasksRestTransport, "rest"), + ], +) +def test_cloud_tasks_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudTasksClient, "grpc"), + (CloudTasksAsyncClient, "grpc_asyncio"), + (CloudTasksClient, "rest"), + ], +) +def test_cloud_tasks_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudtasks.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudtasks.googleapis.com" + ) + + +def test_cloud_tasks_client_get_transport_class(): + transport = CloudTasksClient.get_transport_class() + available_transports = [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksRestTransport, + ] + assert transport in available_transports + + transport = CloudTasksClient.get_transport_class("grpc") + assert transport == transports.CloudTasksGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest"), + ], +) +@mock.patch.object( + CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient) +) +@mock.patch.object( + CloudTasksAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudTasksAsyncClient), +) +def test_cloud_tasks_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudTasksClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudTasksClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "true"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "false"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest", "true"), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient) +) +@mock.patch.object( + CloudTasksAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudTasksAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_tasks_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [CloudTasksClient, CloudTasksAsyncClient]) +@mock.patch.object( + CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient) +) +@mock.patch.object( + CloudTasksAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudTasksAsyncClient), +) +def test_cloud_tasks_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest"), + ], +) +def test_cloud_tasks_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", grpc_helpers), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest", None), + ], +) +def test_cloud_tasks_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_cloud_tasks_client_client_options_from_dict(): + with mock.patch( + "google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudTasksClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", grpc_helpers), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cloud_tasks_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudtasks.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="cloudtasks.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ListQueuesRequest, + dict, + ], +) +def test_list_queues(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_queues_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + client.list_queues() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + +@pytest.mark.asyncio +async def test_list_queues_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.ListQueuesRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_queues_async_from_dict(): + await test_list_queues_async(request_type=dict) + + +def test_list_queues_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + call.return_value = cloudtasks.ListQueuesResponse() + client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_queues_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse() + ) + await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_queues_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_queues( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_queues_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_queues( + cloudtasks.ListQueuesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_queues_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_queues( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_queues_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_queues( + cloudtasks.ListQueuesRequest(), + parent="parent_value", + ) + + +def test_list_queues_pager(transport_name: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_queues(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, queue.Queue) for i in results) + + +def test_list_queues_pages(transport_name: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + pages = list(client.list_queues(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_queues_async_pager(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_queues( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, queue.Queue) for i in responses) + + +@pytest.mark.asyncio +async def test_list_queues_async_pages(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_queues(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.GetQueueRequest, + dict, + ], +) +def test_get_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + response = client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_get_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + client.get_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + +@pytest.mark.asyncio +async def test_get_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.GetQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + ) + response = await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_get_queue_async_from_dict(): + await test_get_queue_async(request_type=dict) + + +def test_get_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + call.return_value = queue.Queue() + client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_queue( + cloudtasks.GetQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_queue( + cloudtasks.GetQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.CreateQueueRequest, + dict, + ], +) +def test_create_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + ) + response = client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_create_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + client.create_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + +@pytest.mark.asyncio +async def test_create_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.CreateQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + ) + ) + response = await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_create_queue_async_from_dict(): + await test_create_queue_async(request_type=dict) + + +def test_create_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + call.return_value = gct_queue.Queue() + client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_queue( + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].queue + mock_val = gct_queue.Queue(name="name_value") + assert arg == mock_val + + +def test_create_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_queue( + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].queue + mock_val = gct_queue.Queue(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.UpdateQueueRequest, + dict, + ], +) +def test_update_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + ) + response = client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_update_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + client.update_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + +@pytest.mark.asyncio +async def test_update_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.UpdateQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + ) + ) + response = await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_update_queue_async_from_dict(): + await test_update_queue_async(request_type=dict) + + +def test_update_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + + request.queue.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + call.return_value = gct_queue.Queue() + client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "queue.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + + request.queue.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "queue.name=name_value", + ) in kw["metadata"] + + +def test_update_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_queue( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].queue + mock_val = gct_queue.Queue(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_queue( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].queue + mock_val = gct_queue.Queue(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.DeleteQueueRequest, + dict, + ], +) +def test_delete_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + client.delete_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + +@pytest.mark.asyncio +async def test_delete_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.DeleteQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_queue_async_from_dict(): + await test_delete_queue_async(request_type=dict) + + +def test_delete_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + call.return_value = None + client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.PurgeQueueRequest, + dict, + ], +) +def test_purge_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + response = client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_purge_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + client.purge_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + +@pytest.mark.asyncio +async def test_purge_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.PurgeQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + ) + response = await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_purge_queue_async_from_dict(): + await test_purge_queue_async(request_type=dict) + + +def test_purge_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + call.return_value = queue.Queue() + client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_purge_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.purge_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_purge_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.purge_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.PauseQueueRequest, + dict, + ], +) +def test_pause_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + response = client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_pause_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + client.pause_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + +@pytest.mark.asyncio +async def test_pause_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.PauseQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + ) + response = await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_pause_queue_async_from_dict(): + await test_pause_queue_async(request_type=dict) + + +def test_pause_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + call.return_value = queue.Queue() + client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_pause_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_pause_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pause_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_pause_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_queue( + cloudtasks.PauseQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pause_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pause_queue( + cloudtasks.PauseQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ResumeQueueRequest, + dict, + ], +) +def test_resume_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + response = client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_resume_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + client.resume_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + +@pytest.mark.asyncio +async def test_resume_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.ResumeQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + ) + response = await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +@pytest.mark.asyncio +async def test_resume_queue_async_from_dict(): + await test_resume_queue_async(request_type=dict) + + +def test_resume_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + call.return_value = queue.Queue() + client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_resume_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_resume_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resume_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_resume_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resume_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.UploadQueueYamlRequest, + dict, + ], +) +def test_upload_queue_yaml(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upload_queue_yaml), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.upload_queue_yaml(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UploadQueueYamlRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_upload_queue_yaml_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upload_queue_yaml), "__call__" + ) as call: + client.upload_queue_yaml() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UploadQueueYamlRequest() + + +@pytest.mark.asyncio +async def test_upload_queue_yaml_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.UploadQueueYamlRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upload_queue_yaml), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.upload_queue_yaml(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UploadQueueYamlRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_upload_queue_yaml_async_from_dict(): + await test_upload_queue_yaml_async(request_type=dict) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +def test_get_iam_policy_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +def test_set_iam_policy_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async( + transport: str = "grpc_asyncio", + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource="resource_value", + permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + arg = args[0].permissions + mock_val = ["permissions_value"] + assert arg == mock_val + + +def test_test_iam_permissions_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource="resource_value", + permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + arg = args[0].permissions + mock_val = ["permissions_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ListTasksRequest, + dict, + ], +) +def test_list_tasks(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tasks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + client.list_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + +@pytest.mark.asyncio +async def test_list_tasks_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.ListTasksRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_tasks_async_from_dict(): + await test_list_tasks_async(request_type=dict) + + +def test_list_tasks_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + call.return_value = cloudtasks.ListTasksResponse() + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_tasks_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse() + ) + await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_tasks_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_tasks_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + cloudtasks.ListTasksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tasks( + cloudtasks.ListTasksRequest(), + parent="parent_value", + ) + + +def test_list_tasks_pager(transport_name: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_tasks(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, task.Task) for i in results) + + +def test_list_tasks_pages(transport_name: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tasks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tasks_async_pager(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tasks( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, task.Task) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tasks_async_pages(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_tasks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.GetTaskRequest, + dict, + ], +) +def test_get_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + view=task.Task.View.BASIC, + ) + response = client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.view == task.Task.View.BASIC + + +def test_get_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + client.get_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + +@pytest.mark.asyncio +async def test_get_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.GetTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task( + name="name_value", + view=task.Task.View.BASIC, + ) + ) + response = await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_get_task_async_from_dict(): + await test_get_task_async(request_type=dict) + + +def test_get_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value = task.Task() + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + cloudtasks.GetTaskRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_task( + cloudtasks.GetTaskRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.CreateTaskRequest, + dict, + ], +) +def test_create_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task( + name="name_value", + view=gct_task.Task.View.BASIC, + ) + response = client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == "name_value" + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + client.create_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + +@pytest.mark.asyncio +async def test_create_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.CreateTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_task.Task( + name="name_value", + view=gct_task.Task.View.BASIC, + ) + ) + response = await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == "name_value" + assert response.view == gct_task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_create_task_async_from_dict(): + await test_create_task_async(request_type=dict) + + +def test_create_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + call.return_value = gct_task.Task() + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_task( + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].task + mock_val = gct_task.Task(name="name_value") + assert arg == mock_val + + +def test_create_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_task( + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].task + mock_val = gct_task.Task(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.DeleteTaskRequest, + dict, + ], +) +def test_delete_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + client.delete_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + +@pytest.mark.asyncio +async def test_delete_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.DeleteTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_task_async_from_dict(): + await test_delete_task_async(request_type=dict) + + +def test_delete_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + call.return_value = None + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + cloudtasks.DeleteTaskRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_task( + cloudtasks.DeleteTaskRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.LeaseTasksRequest, + dict, + ], +) +def test_lease_tasks(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lease_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.LeaseTasksResponse() + response = client.lease_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.LeaseTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudtasks.LeaseTasksResponse) + + +def test_lease_tasks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lease_tasks), "__call__") as call: + client.lease_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.LeaseTasksRequest() + + +@pytest.mark.asyncio +async def test_lease_tasks_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.LeaseTasksRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lease_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.LeaseTasksResponse() + ) + response = await client.lease_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.LeaseTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudtasks.LeaseTasksResponse) + + +@pytest.mark.asyncio +async def test_lease_tasks_async_from_dict(): + await test_lease_tasks_async(request_type=dict) + + +def test_lease_tasks_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.LeaseTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lease_tasks), "__call__") as call: + call.return_value = cloudtasks.LeaseTasksResponse() + client.lease_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_lease_tasks_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.LeaseTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lease_tasks), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.LeaseTasksResponse() + ) + await client.lease_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_lease_tasks_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lease_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.LeaseTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.lease_tasks( + parent="parent_value", + lease_duration=duration_pb2.Duration(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration( + seconds=751 + ) + + +def test_lease_tasks_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.lease_tasks( + cloudtasks.LeaseTasksRequest(), + parent="parent_value", + lease_duration=duration_pb2.Duration(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_lease_tasks_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.lease_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.LeaseTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.LeaseTasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.lease_tasks( + parent="parent_value", + lease_duration=duration_pb2.Duration(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration( + seconds=751 + ) + + +@pytest.mark.asyncio +async def test_lease_tasks_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.lease_tasks( + cloudtasks.LeaseTasksRequest(), + parent="parent_value", + lease_duration=duration_pb2.Duration(seconds=751), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.AcknowledgeTaskRequest, + dict, + ], +) +def test_acknowledge_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.acknowledge_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.AcknowledgeTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_acknowledge_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge_task), "__call__") as call: + client.acknowledge_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.AcknowledgeTaskRequest() + + +@pytest.mark.asyncio +async def test_acknowledge_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.AcknowledgeTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.acknowledge_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.AcknowledgeTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_acknowledge_task_async_from_dict(): + await test_acknowledge_task_async(request_type=dict) + + +def test_acknowledge_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.AcknowledgeTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge_task), "__call__") as call: + call.return_value = None + client.acknowledge_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_acknowledge_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.AcknowledgeTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.acknowledge_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_acknowledge_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.acknowledge_task( + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + + +def test_acknowledge_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.acknowledge_task( + cloudtasks.AcknowledgeTaskRequest(), + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_acknowledge_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.acknowledge_task( + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + + +@pytest.mark.asyncio +async def test_acknowledge_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.acknowledge_task( + cloudtasks.AcknowledgeTaskRequest(), + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.RenewLeaseRequest, + dict, + ], +) +def test_renew_lease(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.renew_lease), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + view=task.Task.View.BASIC, + ) + response = client.renew_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RenewLeaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.view == task.Task.View.BASIC + + +def test_renew_lease_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.renew_lease), "__call__") as call: + client.renew_lease() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RenewLeaseRequest() + + +@pytest.mark.asyncio +async def test_renew_lease_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.RenewLeaseRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.renew_lease), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task( + name="name_value", + view=task.Task.View.BASIC, + ) + ) + response = await client.renew_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RenewLeaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_renew_lease_async_from_dict(): + await test_renew_lease_async(request_type=dict) + + +def test_renew_lease_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RenewLeaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.renew_lease), "__call__") as call: + call.return_value = task.Task() + client.renew_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_renew_lease_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RenewLeaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.renew_lease), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.renew_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_renew_lease_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.renew_lease), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.renew_lease( + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + lease_duration=duration_pb2.Duration(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration( + seconds=751 + ) + + +def test_renew_lease_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.renew_lease( + cloudtasks.RenewLeaseRequest(), + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + lease_duration=duration_pb2.Duration(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_renew_lease_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.renew_lease), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.renew_lease( + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + lease_duration=duration_pb2.Duration(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + assert DurationRule().to_proto(args[0].lease_duration) == duration_pb2.Duration( + seconds=751 + ) + + +@pytest.mark.asyncio +async def test_renew_lease_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.renew_lease( + cloudtasks.RenewLeaseRequest(), + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + lease_duration=duration_pb2.Duration(seconds=751), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.CancelLeaseRequest, + dict, + ], +) +def test_cancel_lease(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_lease), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + view=task.Task.View.BASIC, + ) + response = client.cancel_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CancelLeaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.view == task.Task.View.BASIC + + +def test_cancel_lease_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_lease), "__call__") as call: + client.cancel_lease() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CancelLeaseRequest() + + +@pytest.mark.asyncio +async def test_cancel_lease_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.CancelLeaseRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_lease), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task( + name="name_value", + view=task.Task.View.BASIC, + ) + ) + response = await client.cancel_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CancelLeaseRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_cancel_lease_async_from_dict(): + await test_cancel_lease_async(request_type=dict) + + +def test_cancel_lease_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CancelLeaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_lease), "__call__") as call: + call.return_value = task.Task() + client.cancel_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_lease_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CancelLeaseRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_lease), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.cancel_lease(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_cancel_lease_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_lease), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_lease( + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + + +def test_cancel_lease_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_lease( + cloudtasks.CancelLeaseRequest(), + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_cancel_lease_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_lease), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_lease( + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + + +@pytest.mark.asyncio +async def test_cancel_lease_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_lease( + cloudtasks.CancelLeaseRequest(), + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.RunTaskRequest, + dict, + ], +) +def test_run_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + view=task.Task.View.BASIC, + ) + response = client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.view == task.Task.View.BASIC + + +def test_run_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + client.run_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + +@pytest.mark.asyncio +async def test_run_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.RunTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task( + name="name_value", + view=task.Task.View.BASIC, + ) + ) + response = await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_run_task_async_from_dict(): + await test_run_task_async(request_type=dict) + + +def test_run_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + call.return_value = task.Task() + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_run_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_run_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_run_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + cloudtasks.RunTaskRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_run_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_run_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_task( + cloudtasks.RunTaskRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.BufferTaskRequest, + dict, + ], +) +def test_buffer_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.BufferTaskResponse() + response = client.buffer_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.BufferTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudtasks.BufferTaskResponse) + + +def test_buffer_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: + client.buffer_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.BufferTaskRequest() + + +@pytest.mark.asyncio +async def test_buffer_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.BufferTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.BufferTaskResponse() + ) + response = await client.buffer_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.BufferTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudtasks.BufferTaskResponse) + + +@pytest.mark.asyncio +async def test_buffer_task_async_from_dict(): + await test_buffer_task_async(request_type=dict) + + +def test_buffer_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.BufferTaskRequest() + + request.queue = "queue_value" + request.task_id = "task_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: + call.return_value = cloudtasks.BufferTaskResponse() + client.buffer_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "queue=queue_value&task_id=task_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_buffer_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.BufferTaskRequest() + + request.queue = "queue_value" + request.task_id = "task_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.BufferTaskResponse() + ) + await client.buffer_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "queue=queue_value&task_id=task_id_value", + ) in kw["metadata"] + + +def test_buffer_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.BufferTaskResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.buffer_task( + queue="queue_value", + task_id="task_id_value", + body=httpbody_pb2.HttpBody(content_type="content_type_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].queue + mock_val = "queue_value" + assert arg == mock_val + arg = args[0].task_id + mock_val = "task_id_value" + assert arg == mock_val + arg = args[0].body + mock_val = httpbody_pb2.HttpBody(content_type="content_type_value") + assert arg == mock_val + + +def test_buffer_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.buffer_task( + cloudtasks.BufferTaskRequest(), + queue="queue_value", + task_id="task_id_value", + body=httpbody_pb2.HttpBody(content_type="content_type_value"), + ) + + +@pytest.mark.asyncio +async def test_buffer_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.BufferTaskResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.BufferTaskResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.buffer_task( + queue="queue_value", + task_id="task_id_value", + body=httpbody_pb2.HttpBody(content_type="content_type_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].queue + mock_val = "queue_value" + assert arg == mock_val + arg = args[0].task_id + mock_val = "task_id_value" + assert arg == mock_val + arg = args[0].body + mock_val = httpbody_pb2.HttpBody(content_type="content_type_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_buffer_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.buffer_task( + cloudtasks.BufferTaskRequest(), + queue="queue_value", + task_id="task_id_value", + body=httpbody_pb2.HttpBody(content_type="content_type_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ListQueuesRequest, + dict, + ], +) +def test_list_queues_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListQueuesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.ListQueuesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_queues(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_queues_rest_required_fields(request_type=cloudtasks.ListQueuesRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_queues._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_queues._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + "read_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListQueuesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudtasks.ListQueuesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_queues(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_queues_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_queues._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + "readMask", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_queues_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_list_queues" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_list_queues" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.ListQueuesRequest.pb(cloudtasks.ListQueuesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudtasks.ListQueuesResponse.to_json( + cloudtasks.ListQueuesResponse() + ) + + request = cloudtasks.ListQueuesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudtasks.ListQueuesResponse() + + client.list_queues( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_queues_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.ListQueuesRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_queues(request) + + +def test_list_queues_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListQueuesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.ListQueuesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_queues(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{parent=projects/*/locations/*}/queues" + % client.transport._host, + args[1], + ) + + +def test_list_queues_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_queues( + cloudtasks.ListQueuesRequest(), + parent="parent_value", + ) + + +def test_list_queues_rest_pager(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudtasks.ListQueuesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_queues(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, queue.Queue) for i in results) + + pages = list(client.list_queues(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.GetQueueRequest, + dict, + ], +) +def test_get_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_get_queue_rest_required_fields(request_type=cloudtasks.GetQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_queue._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("read_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(("readMask",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_get_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_get_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.GetQueueRequest.pb(cloudtasks.GetQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = queue.Queue.to_json(queue.Queue()) + + request = cloudtasks.GetQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = queue.Queue() + + client.get_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.GetQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_queue(request) + + +def test_get_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{name=projects/*/locations/*/queues/*}" + % client.transport._host, + args[1], + ) + + +def test_get_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_queue( + cloudtasks.GetQueueRequest(), + name="name_value", + ) + + +def test_get_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.CreateQueueRequest, + dict, + ], +) +def test_create_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["queue"] = { + "name": "name_value", + "app_engine_http_target": { + "app_engine_routing_override": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + } + }, + "pull_target": {}, + "http_target": { + "uri_override": { + "scheme": 1, + "host": "host_value", + "port": 453, + "path_override": {"path": "path_value"}, + "query_override": {"query_params": "query_params_value"}, + "uri_override_enforce_mode": 1, + }, + "http_method": 1, + "header_overrides": [ + {"header": {"key": "key_value", "value": "value_value"}} + ], + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "rate_limits": { + "max_tasks_dispatched_per_second": 0.32680000000000003, + "max_burst_size": 1519, + "max_concurrent_tasks": 2157, + }, + "retry_config": { + "max_attempts": 1303, + "unlimited_attempts": True, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff": {}, + "max_backoff": {}, + "max_doublings": 1388, + }, + "state": 1, + "purge_time": {"seconds": 751, "nanos": 543}, + "task_ttl": {}, + "tombstone_ttl": {}, + "stats": { + "tasks_count": 1198, + "oldest_estimated_arrival_time": {}, + "executed_last_minute_count": 2787, + "concurrent_dispatches_count": 2898, + "effective_execution_rate": 0.2543, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_create_queue_rest_required_fields(request_type=cloudtasks.CreateQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_queue._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "queue", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_create_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_create_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.CreateQueueRequest.pb(cloudtasks.CreateQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gct_queue.Queue.to_json(gct_queue.Queue()) + + request = cloudtasks.CreateQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gct_queue.Queue() + + client.create_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.CreateQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["queue"] = { + "name": "name_value", + "app_engine_http_target": { + "app_engine_routing_override": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + } + }, + "pull_target": {}, + "http_target": { + "uri_override": { + "scheme": 1, + "host": "host_value", + "port": 453, + "path_override": {"path": "path_value"}, + "query_override": {"query_params": "query_params_value"}, + "uri_override_enforce_mode": 1, + }, + "http_method": 1, + "header_overrides": [ + {"header": {"key": "key_value", "value": "value_value"}} + ], + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "rate_limits": { + "max_tasks_dispatched_per_second": 0.32680000000000003, + "max_burst_size": 1519, + "max_concurrent_tasks": 2157, + }, + "retry_config": { + "max_attempts": 1303, + "unlimited_attempts": True, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff": {}, + "max_backoff": {}, + "max_doublings": 1388, + }, + "state": 1, + "purge_time": {"seconds": 751, "nanos": 543}, + "task_ttl": {}, + "tombstone_ttl": {}, + "stats": { + "tasks_count": 1198, + "oldest_estimated_arrival_time": {}, + "executed_last_minute_count": 2787, + "concurrent_dispatches_count": 2898, + "effective_execution_rate": 0.2543, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_queue(request) + + +def test_create_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{parent=projects/*/locations/*}/queues" + % client.transport._host, + args[1], + ) + + +def test_create_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +def test_create_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.UpdateQueueRequest, + dict, + ], +) +def test_update_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "queue": {"name": "projects/sample1/locations/sample2/queues/sample3"} + } + request_init["queue"] = { + "name": "projects/sample1/locations/sample2/queues/sample3", + "app_engine_http_target": { + "app_engine_routing_override": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + } + }, + "pull_target": {}, + "http_target": { + "uri_override": { + "scheme": 1, + "host": "host_value", + "port": 453, + "path_override": {"path": "path_value"}, + "query_override": {"query_params": "query_params_value"}, + "uri_override_enforce_mode": 1, + }, + "http_method": 1, + "header_overrides": [ + {"header": {"key": "key_value", "value": "value_value"}} + ], + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "rate_limits": { + "max_tasks_dispatched_per_second": 0.32680000000000003, + "max_burst_size": 1519, + "max_concurrent_tasks": 2157, + }, + "retry_config": { + "max_attempts": 1303, + "unlimited_attempts": True, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff": {}, + "max_backoff": {}, + "max_doublings": 1388, + }, + "state": 1, + "purge_time": {"seconds": 751, "nanos": 543}, + "task_ttl": {}, + "tombstone_ttl": {}, + "stats": { + "tasks_count": 1198, + "oldest_estimated_arrival_time": {}, + "executed_last_minute_count": 2787, + "concurrent_dispatches_count": 2898, + "effective_execution_rate": 0.2543, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + + +def test_update_queue_rest_required_fields(request_type=cloudtasks.UpdateQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_queue._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("queue",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_update_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_update_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.UpdateQueueRequest.pb(cloudtasks.UpdateQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gct_queue.Queue.to_json(gct_queue.Queue()) + + request = cloudtasks.UpdateQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gct_queue.Queue() + + client.update_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.UpdateQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "queue": {"name": "projects/sample1/locations/sample2/queues/sample3"} + } + request_init["queue"] = { + "name": "projects/sample1/locations/sample2/queues/sample3", + "app_engine_http_target": { + "app_engine_routing_override": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + } + }, + "pull_target": {}, + "http_target": { + "uri_override": { + "scheme": 1, + "host": "host_value", + "port": 453, + "path_override": {"path": "path_value"}, + "query_override": {"query_params": "query_params_value"}, + "uri_override_enforce_mode": 1, + }, + "http_method": 1, + "header_overrides": [ + {"header": {"key": "key_value", "value": "value_value"}} + ], + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "rate_limits": { + "max_tasks_dispatched_per_second": 0.32680000000000003, + "max_burst_size": 1519, + "max_concurrent_tasks": 2157, + }, + "retry_config": { + "max_attempts": 1303, + "unlimited_attempts": True, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff": {}, + "max_backoff": {}, + "max_doublings": 1388, + }, + "state": 1, + "purge_time": {"seconds": 751, "nanos": 543}, + "task_ttl": {}, + "tombstone_ttl": {}, + "stats": { + "tasks_count": 1198, + "oldest_estimated_arrival_time": {}, + "executed_last_minute_count": 2787, + "concurrent_dispatches_count": 2898, + "effective_execution_rate": 0.2543, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_queue(request) + + +def test_update_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = { + "queue": {"name": "projects/sample1/locations/sample2/queues/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{queue.name=projects/*/locations/*/queues/*}" + % client.transport._host, + args[1], + ) + + +def test_update_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.DeleteQueueRequest, + dict, + ], +) +def test_delete_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_queue(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_rest_required_fields(request_type=cloudtasks.DeleteQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_delete_queue" + ) as pre: + pre.assert_not_called() + pb_message = cloudtasks.DeleteQueueRequest.pb(cloudtasks.DeleteQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cloudtasks.DeleteQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.DeleteQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_queue(request) + + +def test_delete_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{name=projects/*/locations/*/queues/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name="name_value", + ) + + +def test_delete_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.PurgeQueueRequest, + dict, + ], +) +def test_purge_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.purge_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_purge_queue_rest_required_fields(request_type=cloudtasks.PurgeQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.purge_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_purge_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.purge_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_purge_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_purge_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_purge_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.PurgeQueueRequest.pb(cloudtasks.PurgeQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = queue.Queue.to_json(queue.Queue()) + + request = cloudtasks.PurgeQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = queue.Queue() + + client.purge_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_purge_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.PurgeQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.purge_queue(request) + + +def test_purge_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.purge_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{name=projects/*/locations/*/queues/*}:purge" + % client.transport._host, + args[1], + ) + + +def test_purge_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name="name_value", + ) + + +def test_purge_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.PauseQueueRequest, + dict, + ], +) +def test_pause_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.pause_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_pause_queue_rest_required_fields(request_type=cloudtasks.PauseQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pause_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pause_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.pause_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_pause_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.pause_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_pause_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_pause_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_pause_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.PauseQueueRequest.pb(cloudtasks.PauseQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = queue.Queue.to_json(queue.Queue()) + + request = cloudtasks.PauseQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = queue.Queue() + + client.pause_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_pause_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.PauseQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.pause_queue(request) + + +def test_pause_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.pause_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{name=projects/*/locations/*/queues/*}:pause" + % client.transport._host, + args[1], + ) + + +def test_pause_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_queue( + cloudtasks.PauseQueueRequest(), + name="name_value", + ) + + +def test_pause_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ResumeQueueRequest, + dict, + ], +) +def test_resume_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.resume_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + + +def test_resume_queue_rest_required_fields(request_type=cloudtasks.ResumeQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resume_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resume_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.resume_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_resume_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.resume_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resume_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_resume_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_resume_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.ResumeQueueRequest.pb(cloudtasks.ResumeQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = queue.Queue.to_json(queue.Queue()) + + request = cloudtasks.ResumeQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = queue.Queue() + + client.resume_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resume_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.ResumeQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resume_queue(request) + + +def test_resume_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.resume_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{name=projects/*/locations/*/queues/*}:resume" + % client.transport._host, + args[1], + ) + + +def test_resume_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name="name_value", + ) + + +def test_resume_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_upload_queue_yaml_rest_no_http_options(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = cloudtasks.UploadQueueYamlRequest() + with pytest.raises(RuntimeError): + client.upload_queue_yaml(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_get_iam_policy" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_get_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.GetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(policy_pb2.Policy()) + + request = iam_policy_pb2.GetIamPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + + client.get_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/queues/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{resource=projects/*/locations/*/queues/*}:getIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_get_iam_policy_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "policy", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_set_iam_policy" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_set_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.SetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(policy_pb2.Policy()) + + request = iam_policy_pb2.SetIamPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + + client.set_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/queues/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{resource=projects/*/locations/*/queues/*}:setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_set_iam_policy_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_rest_required_fields( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["resource"] = "" + request_init["permissions"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + jsonified_request["permissions"] = "permissions_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + assert "permissions" in jsonified_request + assert jsonified_request["permissions"] == "permissions_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "permissions", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_test_iam_permissions" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_test_iam_permissions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.TestIamPermissionsRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + request = iam_policy_pb2.TestIamPermissionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/queues/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + permissions=["permissions_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{resource=projects/*/locations/*/queues/*}:testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +def test_test_iam_permissions_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ListTasksRequest, + dict, + ], +) +def test_list_tasks_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListTasksResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.ListTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_tasks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tasks_rest_required_fields(request_type=cloudtasks.ListTasksRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tasks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tasks._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "response_view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListTasksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudtasks.ListTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_tasks(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_tasks_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_tasks._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "responseView", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_tasks_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_list_tasks" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_list_tasks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.ListTasksRequest.pb(cloudtasks.ListTasksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudtasks.ListTasksResponse.to_json( + cloudtasks.ListTasksResponse() + ) + + request = cloudtasks.ListTasksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudtasks.ListTasksResponse() + + client.list_tasks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_tasks_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.ListTasksRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_tasks(request) + + +def test_list_tasks_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListTasksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.ListTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_tasks(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks" + % client.transport._host, + args[1], + ) + + +def test_list_tasks_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + cloudtasks.ListTasksRequest(), + parent="parent_value", + ) + + +def test_list_tasks_rest_pager(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudtasks.ListTasksResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + + pager = client.list_tasks(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, task.Task) for i in results) + + pages = list(client.list_tasks(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.GetTaskRequest, + dict, + ], +) +def test_get_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task( + name="name_value", + view=task.Task.View.BASIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.view == task.Task.View.BASIC + + +def test_get_task_rest_required_fields(request_type=cloudtasks.GetTaskRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_task._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("response_view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = task.Task() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(("responseView",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_get_task" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_get_task" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.GetTaskRequest.pb(cloudtasks.GetTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = task.Task.to_json(task.Task()) + + request = cloudtasks.GetTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = task.Task() + + client.get_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.GetTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_task(request) + + +def test_get_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}" + % client.transport._host, + args[1], + ) + + +def test_get_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + cloudtasks.GetTaskRequest(), + name="name_value", + ) + + +def test_get_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.CreateTaskRequest, + dict, + ], +) +def test_create_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_task.Task( + name="name_value", + view=gct_task.Task.View.BASIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == "name_value" + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_rest_required_fields(request_type=cloudtasks.CreateTaskRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gct_task.Task() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gct_task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_task._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "task", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_create_task" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_create_task" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.CreateTaskRequest.pb(cloudtasks.CreateTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gct_task.Task.to_json(gct_task.Task()) + + request = cloudtasks.CreateTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gct_task.Task() + + client.create_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.CreateTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_task(request) + + +def test_create_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_task.Task() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks" + % client.transport._host, + args[1], + ) + + +def test_create_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +def test_create_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.DeleteTaskRequest, + dict, + ], +) +def test_delete_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_task(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_rest_required_fields(request_type=cloudtasks.DeleteTaskRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_delete_task" + ) as pre: + pre.assert_not_called() + pb_message = cloudtasks.DeleteTaskRequest.pb(cloudtasks.DeleteTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cloudtasks.DeleteTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.DeleteTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_task(request) + + +def test_delete_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + cloudtasks.DeleteTaskRequest(), + name="name_value", + ) + + +def test_delete_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.LeaseTasksRequest, + dict, + ], +) +def test_lease_tasks_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.LeaseTasksResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.LeaseTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.lease_tasks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudtasks.LeaseTasksResponse) + + +def test_lease_tasks_rest_required_fields(request_type=cloudtasks.LeaseTasksRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lease_tasks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).lease_tasks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudtasks.LeaseTasksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudtasks.LeaseTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.lease_tasks(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_lease_tasks_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.lease_tasks._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "leaseDuration", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_lease_tasks_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_lease_tasks" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_lease_tasks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.LeaseTasksRequest.pb(cloudtasks.LeaseTasksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudtasks.LeaseTasksResponse.to_json( + cloudtasks.LeaseTasksResponse() + ) + + request = cloudtasks.LeaseTasksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudtasks.LeaseTasksResponse() + + client.lease_tasks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_lease_tasks_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.LeaseTasksRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.lease_tasks(request) + + +def test_lease_tasks_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.LeaseTasksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + lease_duration=duration_pb2.Duration(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.LeaseTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.lease_tasks(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:lease" + % client.transport._host, + args[1], + ) + + +def test_lease_tasks_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.lease_tasks( + cloudtasks.LeaseTasksRequest(), + parent="parent_value", + lease_duration=duration_pb2.Duration(seconds=751), + ) + + +def test_lease_tasks_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.AcknowledgeTaskRequest, + dict, + ], +) +def test_acknowledge_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.acknowledge_task(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_acknowledge_task_rest_required_fields( + request_type=cloudtasks.AcknowledgeTaskRequest, +): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).acknowledge_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).acknowledge_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.acknowledge_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_acknowledge_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.acknowledge_task._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "scheduleTime", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_acknowledge_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_acknowledge_task" + ) as pre: + pre.assert_not_called() + pb_message = cloudtasks.AcknowledgeTaskRequest.pb( + cloudtasks.AcknowledgeTaskRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cloudtasks.AcknowledgeTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.acknowledge_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_acknowledge_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.AcknowledgeTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.acknowledge_task(request) + + +def test_acknowledge_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.acknowledge_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:acknowledge" + % client.transport._host, + args[1], + ) + + +def test_acknowledge_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.acknowledge_task( + cloudtasks.AcknowledgeTaskRequest(), + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_acknowledge_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.RenewLeaseRequest, + dict, + ], +) +def test_renew_lease_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task( + name="name_value", + view=task.Task.View.BASIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.renew_lease(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.view == task.Task.View.BASIC + + +def test_renew_lease_rest_required_fields(request_type=cloudtasks.RenewLeaseRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).renew_lease._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).renew_lease._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = task.Task() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.renew_lease(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_renew_lease_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.renew_lease._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "scheduleTime", + "leaseDuration", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_renew_lease_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_renew_lease" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_renew_lease" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.RenewLeaseRequest.pb(cloudtasks.RenewLeaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = task.Task.to_json(task.Task()) + + request = cloudtasks.RenewLeaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = task.Task() + + client.renew_lease( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_renew_lease_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.RenewLeaseRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.renew_lease(request) + + +def test_renew_lease_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + lease_duration=duration_pb2.Duration(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.renew_lease(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:renewLease" + % client.transport._host, + args[1], + ) + + +def test_renew_lease_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.renew_lease( + cloudtasks.RenewLeaseRequest(), + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + lease_duration=duration_pb2.Duration(seconds=751), + ) + + +def test_renew_lease_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.CancelLeaseRequest, + dict, + ], +) +def test_cancel_lease_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task( + name="name_value", + view=task.Task.View.BASIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.cancel_lease(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.view == task.Task.View.BASIC + + +def test_cancel_lease_rest_required_fields(request_type=cloudtasks.CancelLeaseRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_lease._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_lease._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = task.Task() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_lease(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_cancel_lease_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.cancel_lease._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "scheduleTime", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_lease_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_cancel_lease" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_cancel_lease" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.CancelLeaseRequest.pb(cloudtasks.CancelLeaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = task.Task.to_json(task.Task()) + + request = cloudtasks.CancelLeaseRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = task.Task() + + client.cancel_lease( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_cancel_lease_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.CancelLeaseRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_lease(request) + + +def test_cancel_lease_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.cancel_lease(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:cancelLease" + % client.transport._host, + args[1], + ) + + +def test_cancel_lease_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_lease( + cloudtasks.CancelLeaseRequest(), + name="name_value", + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_cancel_lease_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.RunTaskRequest, + dict, + ], +) +def test_run_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task( + name="name_value", + view=task.Task.View.BASIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.view == task.Task.View.BASIC + + +def test_run_task_rest_required_fields(request_type=cloudtasks.RunTaskRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = task.Task() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.run_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_run_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.run_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_run_task" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_run_task" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.RunTaskRequest.pb(cloudtasks.RunTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = task.Task.to_json(task.Task()) + + request = cloudtasks.RunTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = task.Task() + + client.run_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.RunTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_task(request) + + +def test_run_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.run_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:run" + % client.transport._host, + args[1], + ) + + +def test_run_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + cloudtasks.RunTaskRequest(), + name="name_value", + ) + + +def test_run_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.BufferTaskRequest, + dict, + ], +) +def test_buffer_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "queue": "projects/sample1/locations/sample2/queues/sample3", + "task_id": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.BufferTaskResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.BufferTaskResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.buffer_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudtasks.BufferTaskResponse) + + +def test_buffer_task_rest_required_fields(request_type=cloudtasks.BufferTaskRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["queue"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).buffer_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["queue"] = "queue_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).buffer_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "queue" in jsonified_request + assert jsonified_request["queue"] == "queue_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudtasks.BufferTaskResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudtasks.BufferTaskResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.buffer_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_buffer_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.buffer_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("queue",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_buffer_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_buffer_task" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_buffer_task" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.BufferTaskRequest.pb(cloudtasks.BufferTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudtasks.BufferTaskResponse.to_json( + cloudtasks.BufferTaskResponse() + ) + + request = cloudtasks.BufferTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudtasks.BufferTaskResponse() + + client.buffer_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_buffer_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.BufferTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "queue": "projects/sample1/locations/sample2/queues/sample3", + "task_id": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.buffer_task(request) + + +def test_buffer_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.BufferTaskResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "queue": "projects/sample1/locations/sample2/queues/sample3", + "task_id": "sample4", + } + + # get truthy value for each flattened field + mock_args = dict( + queue="queue_value", + task_id="task_id_value", + body=httpbody_pb2.HttpBody(content_type="content_type_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.BufferTaskResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.buffer_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta2/{queue=projects/*/locations/*/queues/*}/tasks/{task_id}:buffer" + % client.transport._host, + args[1], + ) + + +def test_buffer_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.buffer_task( + cloudtasks.BufferTaskRequest(), + queue="queue_value", + task_id="task_id_value", + body=httpbody_pb2.HttpBody(content_type="content_type_value"), + ) + + +def test_buffer_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_upload_queue_yaml_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.upload_queue_yaml({}) + assert "Method UploadQueueYaml is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudTasksClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudTasksGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + transports.CloudTasksRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CloudTasksClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudTasksGrpcTransport, + ) + + +def test_cloud_tasks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudTasksTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_tasks_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudTasksTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_queues", + "get_queue", + "create_queue", + "update_queue", + "delete_queue", + "purge_queue", + "pause_queue", + "resume_queue", + "upload_queue_yaml", + "get_iam_policy", + "set_iam_policy", + "test_iam_permissions", + "list_tasks", + "get_task", + "create_task", + "delete_task", + "lease_tasks", + "acknowledge_task", + "renew_lease", + "cancel_lease", + "run_task", + "buffer_task", + "get_location", + "list_locations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cloud_tasks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_tasks_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.tasks_v2beta2.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport() + adc.assert_called_once() + + +def test_cloud_tasks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudTasksClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + ], +) +def test_cloud_tasks_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + transports.CloudTasksRestTransport, + ], +) +def test_cloud_tasks_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudTasksGrpcTransport, grpc_helpers), + (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "cloudtasks.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="cloudtasks.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport], +) +def test_cloud_tasks_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_cloud_tasks_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CloudTasksRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cloud_tasks_host_no_port(transport_name): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudtasks.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudtasks.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudtasks.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cloud_tasks_host_with_port(transport_name): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudtasks.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudtasks.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudtasks.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_cloud_tasks_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudTasksClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudTasksClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_queues._session + session2 = client2.transport.list_queues._session + assert session1 != session2 + session1 = client1.transport.get_queue._session + session2 = client2.transport.get_queue._session + assert session1 != session2 + session1 = client1.transport.create_queue._session + session2 = client2.transport.create_queue._session + assert session1 != session2 + session1 = client1.transport.update_queue._session + session2 = client2.transport.update_queue._session + assert session1 != session2 + session1 = client1.transport.delete_queue._session + session2 = client2.transport.delete_queue._session + assert session1 != session2 + session1 = client1.transport.purge_queue._session + session2 = client2.transport.purge_queue._session + assert session1 != session2 + session1 = client1.transport.pause_queue._session + session2 = client2.transport.pause_queue._session + assert session1 != session2 + session1 = client1.transport.resume_queue._session + session2 = client2.transport.resume_queue._session + assert session1 != session2 + session1 = client1.transport.upload_queue_yaml._session + session2 = client2.transport.upload_queue_yaml._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.list_tasks._session + session2 = client2.transport.list_tasks._session + assert session1 != session2 + session1 = client1.transport.get_task._session + session2 = client2.transport.get_task._session + assert session1 != session2 + session1 = client1.transport.create_task._session + session2 = client2.transport.create_task._session + assert session1 != session2 + session1 = client1.transport.delete_task._session + session2 = client2.transport.delete_task._session + assert session1 != session2 + session1 = client1.transport.lease_tasks._session + session2 = client2.transport.lease_tasks._session + assert session1 != session2 + session1 = client1.transport.acknowledge_task._session + session2 = client2.transport.acknowledge_task._session + assert session1 != session2 + session1 = client1.transport.renew_lease._session + session2 = client2.transport.renew_lease._session + assert session1 != session2 + session1 = client1.transport.cancel_lease._session + session2 = client2.transport.cancel_lease._session + assert session1 != session2 + session1 = client1.transport.run_task._session + session2 = client2.transport.run_task._session + assert session1 != session2 + session1 = client1.transport.buffer_task._session + session2 = client2.transport.buffer_task._session + assert session1 != session2 + + +def test_cloud_tasks_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_tasks_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport], +) +def test_cloud_tasks_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport], +) +def test_cloud_tasks_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_queue_path(): + project = "squid" + location = "clam" + queue = "whelk" + expected = "projects/{project}/locations/{location}/queues/{queue}".format( + project=project, + location=location, + queue=queue, + ) + actual = CloudTasksClient.queue_path(project, location, queue) + assert expected == actual + + +def test_parse_queue_path(): + expected = { + "project": "octopus", + "location": "oyster", + "queue": "nudibranch", + } + path = CloudTasksClient.queue_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_queue_path(path) + assert expected == actual + + +def test_task_path(): + project = "cuttlefish" + location = "mussel" + queue = "winkle" + task = "nautilus" + expected = ( + "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format( + project=project, + location=location, + queue=queue, + task=task, + ) + ) + actual = CloudTasksClient.task_path(project, location, queue, task) + assert expected == actual + + +def test_parse_task_path(): + expected = { + "project": "scallop", + "location": "abalone", + "queue": "squid", + "task": "clam", + } + path = CloudTasksClient.task_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_task_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CloudTasksClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = CloudTasksClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = CloudTasksClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = CloudTasksClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = CloudTasksClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = CloudTasksClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = CloudTasksClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = CloudTasksClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = CloudTasksClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = CloudTasksClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudTasksTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudTasksTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudTasksClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations(transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = CloudTasksClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CloudTasksAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/__init__.py b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py new file mode 100644 index 000000000000..f0aa7de98f4c --- /dev/null +++ b/packages/google-cloud-tasks/tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py @@ -0,0 +1,11377 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api import httpbody_pb2 # type: ignore +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.tasks_v2beta3.services.cloud_tasks import ( + CloudTasksAsyncClient, + CloudTasksClient, + pagers, + transports, +) +from google.cloud.tasks_v2beta3.types import cloudtasks +from google.cloud.tasks_v2beta3.types import queue +from google.cloud.tasks_v2beta3.types import queue as gct_queue +from google.cloud.tasks_v2beta3.types import target +from google.cloud.tasks_v2beta3.types import task +from google.cloud.tasks_v2beta3.types import task as gct_task + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudTasksClient._get_default_mtls_endpoint(None) is None + assert ( + CloudTasksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudTasksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert CloudTasksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudTasksClient, "grpc"), + (CloudTasksAsyncClient, "grpc_asyncio"), + (CloudTasksClient, "rest"), + ], +) +def test_cloud_tasks_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudtasks.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudtasks.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CloudTasksGrpcTransport, "grpc"), + (transports.CloudTasksGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudTasksRestTransport, "rest"), + ], +) +def test_cloud_tasks_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudTasksClient, "grpc"), + (CloudTasksAsyncClient, "grpc_asyncio"), + (CloudTasksClient, "rest"), + ], +) +def test_cloud_tasks_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudtasks.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudtasks.googleapis.com" + ) + + +def test_cloud_tasks_client_get_transport_class(): + transport = CloudTasksClient.get_transport_class() + available_transports = [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksRestTransport, + ] + assert transport in available_transports + + transport = CloudTasksClient.get_transport_class("grpc") + assert transport == transports.CloudTasksGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest"), + ], +) +@mock.patch.object( + CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient) +) +@mock.patch.object( + CloudTasksAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudTasksAsyncClient), +) +def test_cloud_tasks_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudTasksClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudTasksClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "true"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", "false"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest", "true"), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient) +) +@mock.patch.object( + CloudTasksAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudTasksAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_tasks_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [CloudTasksClient, CloudTasksAsyncClient]) +@mock.patch.object( + CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient) +) +@mock.patch.object( + CloudTasksAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudTasksAsyncClient), +) +def test_cloud_tasks_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest"), + ], +) +def test_cloud_tasks_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", grpc_helpers), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (CloudTasksClient, transports.CloudTasksRestTransport, "rest", None), + ], +) +def test_cloud_tasks_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_cloud_tasks_client_client_options_from_dict(): + with mock.patch( + "google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CloudTasksClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc", grpc_helpers), + ( + CloudTasksAsyncClient, + transports.CloudTasksGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cloud_tasks_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudtasks.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="cloudtasks.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ListQueuesRequest, + dict, + ], +) +def test_list_queues(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_queues_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + client.list_queues() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + +@pytest.mark.asyncio +async def test_list_queues_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.ListQueuesRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListQueuesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_queues_async_from_dict(): + await test_list_queues_async(request_type=dict) + + +def test_list_queues_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + call.return_value = cloudtasks.ListQueuesResponse() + client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_queues_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListQueuesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse() + ) + await client.list_queues(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_queues_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_queues( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_queues_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_queues( + cloudtasks.ListQueuesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_queues_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListQueuesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListQueuesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_queues( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_queues_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_queues( + cloudtasks.ListQueuesRequest(), + parent="parent_value", + ) + + +def test_list_queues_pager(transport_name: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_queues(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, queue.Queue) for i in results) + + +def test_list_queues_pages(transport_name: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_queues), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + pages = list(client.list_queues(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_queues_async_pager(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_queues( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, queue.Queue) for i in responses) + + +@pytest.mark.asyncio +async def test_list_queues_async_pages(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_queues), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_queues(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.GetQueueRequest, + dict, + ], +) +def test_get_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + ) + response = client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +def test_get_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + client.get_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + +@pytest.mark.asyncio +async def test_get_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.GetQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + ) + ) + response = await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +@pytest.mark.asyncio +async def test_get_queue_async_from_dict(): + await test_get_queue_async(request_type=dict) + + +def test_get_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + call.return_value = queue.Queue() + client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.get_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_queue( + cloudtasks.GetQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_queue( + cloudtasks.GetQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.CreateQueueRequest, + dict, + ], +) +def test_create_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + type_=gct_queue.Queue.Type.PULL, + ) + response = client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + assert response.type_ == gct_queue.Queue.Type.PULL + + +def test_create_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + client.create_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + +@pytest.mark.asyncio +async def test_create_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.CreateQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + type_=gct_queue.Queue.Type.PULL, + ) + ) + response = await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + assert response.type_ == gct_queue.Queue.Type.PULL + + +@pytest.mark.asyncio +async def test_create_queue_async_from_dict(): + await test_create_queue_async(request_type=dict) + + +def test_create_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + call.return_value = gct_queue.Queue() + client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateQueueRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + await client.create_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_queue( + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].queue + mock_val = gct_queue.Queue(name="name_value") + assert arg == mock_val + + +def test_create_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_queue( + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].queue + mock_val = gct_queue.Queue(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.UpdateQueueRequest, + dict, + ], +) +def test_update_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + type_=gct_queue.Queue.Type.PULL, + ) + response = client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + assert response.type_ == gct_queue.Queue.Type.PULL + + +def test_update_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + client.update_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + +@pytest.mark.asyncio +async def test_update_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.UpdateQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + type_=gct_queue.Queue.Type.PULL, + ) + ) + response = await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.UpdateQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + assert response.type_ == gct_queue.Queue.Type.PULL + + +@pytest.mark.asyncio +async def test_update_queue_async_from_dict(): + await test_update_queue_async(request_type=dict) + + +def test_update_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + + request.queue.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + call.return_value = gct_queue.Queue() + client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "queue.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.UpdateQueueRequest() + + request.queue.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + await client.update_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "queue.name=name_value", + ) in kw["metadata"] + + +def test_update_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_queue( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].queue + mock_val = gct_queue.Queue(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_queue( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].queue + mock_val = gct_queue.Queue(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.DeleteQueueRequest, + dict, + ], +) +def test_delete_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + client.delete_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + +@pytest.mark.asyncio +async def test_delete_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.DeleteQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteQueueRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_queue_async_from_dict(): + await test_delete_queue_async(request_type=dict) + + +def test_delete_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + call.return_value = None + client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.PurgeQueueRequest, + dict, + ], +) +def test_purge_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + ) + response = client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +def test_purge_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + client.purge_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + +@pytest.mark.asyncio +async def test_purge_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.PurgeQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + ) + ) + response = await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PurgeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +@pytest.mark.asyncio +async def test_purge_queue_async_from_dict(): + await test_purge_queue_async(request_type=dict) + + +def test_purge_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + call.return_value = queue.Queue() + client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PurgeQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.purge_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_purge_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.purge_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_purge_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.purge_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.purge_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_purge_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.PauseQueueRequest, + dict, + ], +) +def test_pause_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + ) + response = client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +def test_pause_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + client.pause_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + +@pytest.mark.asyncio +async def test_pause_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.PauseQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + ) + ) + response = await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.PauseQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +@pytest.mark.asyncio +async def test_pause_queue_async_from_dict(): + await test_pause_queue_async(request_type=dict) + + +def test_pause_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + call.return_value = queue.Queue() + client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_pause_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.PauseQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.pause_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_pause_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pause_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_pause_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_queue( + cloudtasks.PauseQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pause_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pause_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_pause_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pause_queue( + cloudtasks.PauseQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ResumeQueueRequest, + dict, + ], +) +def test_resume_queue(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + ) + response = client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +def test_resume_queue_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + client.resume_queue() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + +@pytest.mark.asyncio +async def test_resume_queue_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.ResumeQueueRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + ) + ) + response = await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ResumeQueueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +@pytest.mark.asyncio +async def test_resume_queue_async_from_dict(): + await test_resume_queue_async(request_type=dict) + + +def test_resume_queue_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + call.return_value = queue.Queue() + client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_resume_queue_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ResumeQueueRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + await client.resume_queue(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_resume_queue_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resume_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_resume_queue_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.resume_queue), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = queue.Queue() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resume_queue( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_resume_queue_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +def test_get_iam_policy_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +def test_set_iam_policy_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async( + transport: str = "grpc_asyncio", + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict_foreign(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource="resource_value", + permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + arg = args[0].permissions + mock_val = ["permissions_value"] + assert arg == mock_val + + +def test_test_iam_permissions_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource="resource_value", + permissions=["permissions_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + arg = args[0].permissions + mock_val = ["permissions_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ListTasksRequest, + dict, + ], +) +def test_list_tasks(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tasks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + client.list_tasks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + +@pytest.mark.asyncio +async def test_list_tasks_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.ListTasksRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.ListTasksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_tasks_async_from_dict(): + await test_list_tasks_async(request_type=dict) + + +def test_list_tasks_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + call.return_value = cloudtasks.ListTasksResponse() + client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_tasks_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.ListTasksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse() + ) + await client.list_tasks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_tasks_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_tasks_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + cloudtasks.ListTasksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.ListTasksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.ListTasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_tasks_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tasks( + cloudtasks.ListTasksRequest(), + parent="parent_value", + ) + + +def test_list_tasks_pager(transport_name: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_tasks(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, task.Task) for i in results) + + +def test_list_tasks_pages(transport_name: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_tasks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tasks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tasks_async_pager(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tasks( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, task.Task) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tasks_async_pages(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tasks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_tasks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.GetTaskRequest, + dict, + ], +) +def test_get_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + response = client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +def test_get_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + client.get_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + +@pytest.mark.asyncio +async def test_get_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.GetTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + ) + response = await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.GetTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_get_task_async_from_dict(): + await test_get_task_async(request_type=dict) + + +def test_get_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value = task.Task() + client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.GetTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.get_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + cloudtasks.GetTaskRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_task( + cloudtasks.GetTaskRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.CreateTaskRequest, + dict, + ], +) +def test_create_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=gct_task.Task.View.BASIC, + ) + response = client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + client.create_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + +@pytest.mark.asyncio +async def test_create_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.CreateTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gct_task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=gct_task.Task.View.BASIC, + ) + ) + response = await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.CreateTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == gct_task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_create_task_async_from_dict(): + await test_create_task_async(request_type=dict) + + +def test_create_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + call.return_value = gct_task.Task() + client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.CreateTaskRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + await client.create_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_task( + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].task + mock_val = gct_task.Task(name="name_value") + assert arg == mock_val + + +def test_create_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gct_task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_task( + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].task + mock_val = gct_task.Task(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.DeleteTaskRequest, + dict, + ], +) +def test_delete_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + client.delete_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + +@pytest.mark.asyncio +async def test_delete_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.DeleteTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.DeleteTaskRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_task_async_from_dict(): + await test_delete_task_async(request_type=dict) + + +def test_delete_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + call.return_value = None + client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.DeleteTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + cloudtasks.DeleteTaskRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_task( + cloudtasks.DeleteTaskRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.RunTaskRequest, + dict, + ], +) +def test_run_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + response = client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +def test_run_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + client.run_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + +@pytest.mark.asyncio +async def test_run_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.RunTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + ) + response = await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.RunTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +@pytest.mark.asyncio +async def test_run_task_async_from_dict(): + await test_run_task_async(request_type=dict) + + +def test_run_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + call.return_value = task.Task() + client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_run_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.RunTaskRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + await client.run_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_run_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.run_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_run_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + cloudtasks.RunTaskRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_run_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.run_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = task.Task() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.run_task( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_run_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.run_task( + cloudtasks.RunTaskRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.BufferTaskRequest, + dict, + ], +) +def test_buffer_task(request_type, transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.BufferTaskResponse() + response = client.buffer_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.BufferTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudtasks.BufferTaskResponse) + + +def test_buffer_task_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: + client.buffer_task() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.BufferTaskRequest() + + +@pytest.mark.asyncio +async def test_buffer_task_async( + transport: str = "grpc_asyncio", request_type=cloudtasks.BufferTaskRequest +): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.BufferTaskResponse() + ) + response = await client.buffer_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloudtasks.BufferTaskRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudtasks.BufferTaskResponse) + + +@pytest.mark.asyncio +async def test_buffer_task_async_from_dict(): + await test_buffer_task_async(request_type=dict) + + +def test_buffer_task_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.BufferTaskRequest() + + request.queue = "queue_value" + request.task_id = "task_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: + call.return_value = cloudtasks.BufferTaskResponse() + client.buffer_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "queue=queue_value&task_id=task_id_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_buffer_task_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloudtasks.BufferTaskRequest() + + request.queue = "queue_value" + request.task_id = "task_id_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.BufferTaskResponse() + ) + await client.buffer_task(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "queue=queue_value&task_id=task_id_value", + ) in kw["metadata"] + + +def test_buffer_task_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.BufferTaskResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.buffer_task( + queue="queue_value", + task_id="task_id_value", + body=httpbody_pb2.HttpBody(content_type="content_type_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].queue + mock_val = "queue_value" + assert arg == mock_val + arg = args[0].task_id + mock_val = "task_id_value" + assert arg == mock_val + arg = args[0].body + mock_val = httpbody_pb2.HttpBody(content_type="content_type_value") + assert arg == mock_val + + +def test_buffer_task_flattened_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.buffer_task( + cloudtasks.BufferTaskRequest(), + queue="queue_value", + task_id="task_id_value", + body=httpbody_pb2.HttpBody(content_type="content_type_value"), + ) + + +@pytest.mark.asyncio +async def test_buffer_task_flattened_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.buffer_task), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloudtasks.BufferTaskResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudtasks.BufferTaskResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.buffer_task( + queue="queue_value", + task_id="task_id_value", + body=httpbody_pb2.HttpBody(content_type="content_type_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].queue + mock_val = "queue_value" + assert arg == mock_val + arg = args[0].task_id + mock_val = "task_id_value" + assert arg == mock_val + arg = args[0].body + mock_val = httpbody_pb2.HttpBody(content_type="content_type_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_buffer_task_flattened_error_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.buffer_task( + cloudtasks.BufferTaskRequest(), + queue="queue_value", + task_id="task_id_value", + body=httpbody_pb2.HttpBody(content_type="content_type_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ListQueuesRequest, + dict, + ], +) +def test_list_queues_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListQueuesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.ListQueuesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_queues(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListQueuesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_queues_rest_required_fields(request_type=cloudtasks.ListQueuesRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_queues._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_queues._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + "read_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListQueuesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudtasks.ListQueuesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_queues(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_queues_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_queues._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + "readMask", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_queues_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_list_queues" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_list_queues" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.ListQueuesRequest.pb(cloudtasks.ListQueuesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudtasks.ListQueuesResponse.to_json( + cloudtasks.ListQueuesResponse() + ) + + request = cloudtasks.ListQueuesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudtasks.ListQueuesResponse() + + client.list_queues( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_queues_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.ListQueuesRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_queues(request) + + +def test_list_queues_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListQueuesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.ListQueuesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_queues(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{parent=projects/*/locations/*}/queues" + % client.transport._host, + args[1], + ) + + +def test_list_queues_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_queues( + cloudtasks.ListQueuesRequest(), + parent="parent_value", + ) + + +def test_list_queues_rest_pager(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + queue.Queue(), + ], + next_page_token="abc", + ), + cloudtasks.ListQueuesResponse( + queues=[], + next_page_token="def", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + ], + next_page_token="ghi", + ), + cloudtasks.ListQueuesResponse( + queues=[ + queue.Queue(), + queue.Queue(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudtasks.ListQueuesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_queues(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, queue.Queue) for i in results) + + pages = list(client.list_queues(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.GetQueueRequest, + dict, + ], +) +def test_get_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +def test_get_queue_rest_required_fields(request_type=cloudtasks.GetQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_queue._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("read_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(("readMask",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_get_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_get_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.GetQueueRequest.pb(cloudtasks.GetQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = queue.Queue.to_json(queue.Queue()) + + request = cloudtasks.GetQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = queue.Queue() + + client.get_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.GetQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_queue(request) + + +def test_get_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{name=projects/*/locations/*/queues/*}" + % client.transport._host, + args[1], + ) + + +def test_get_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_queue( + cloudtasks.GetQueueRequest(), + name="name_value", + ) + + +def test_get_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.CreateQueueRequest, + dict, + ], +) +def test_create_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["queue"] = { + "name": "name_value", + "app_engine_http_queue": { + "app_engine_routing_override": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + } + }, + "http_target": { + "uri_override": { + "scheme": 1, + "host": "host_value", + "port": 453, + "path_override": {"path": "path_value"}, + "query_override": {"query_params": "query_params_value"}, + "uri_override_enforce_mode": 1, + }, + "http_method": 1, + "header_overrides": [ + {"header": {"key": "key_value", "value": "value_value"}} + ], + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "rate_limits": { + "max_dispatches_per_second": 0.26380000000000003, + "max_burst_size": 1519, + "max_concurrent_dispatches": 2671, + }, + "retry_config": { + "max_attempts": 1303, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff": {}, + "max_backoff": {}, + "max_doublings": 1388, + }, + "state": 1, + "purge_time": {"seconds": 751, "nanos": 543}, + "task_ttl": {}, + "tombstone_ttl": {}, + "stackdriver_logging_config": {"sampling_ratio": 0.1497}, + "type_": 1, + "stats": { + "tasks_count": 1198, + "oldest_estimated_arrival_time": {}, + "executed_last_minute_count": 2787, + "concurrent_dispatches_count": 2898, + "effective_execution_rate": 0.2543, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + type_=gct_queue.Queue.Type.PULL, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + assert response.type_ == gct_queue.Queue.Type.PULL + + +def test_create_queue_rest_required_fields(request_type=cloudtasks.CreateQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_queue._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "queue", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_create_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_create_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.CreateQueueRequest.pb(cloudtasks.CreateQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gct_queue.Queue.to_json(gct_queue.Queue()) + + request = cloudtasks.CreateQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gct_queue.Queue() + + client.create_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.CreateQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["queue"] = { + "name": "name_value", + "app_engine_http_queue": { + "app_engine_routing_override": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + } + }, + "http_target": { + "uri_override": { + "scheme": 1, + "host": "host_value", + "port": 453, + "path_override": {"path": "path_value"}, + "query_override": {"query_params": "query_params_value"}, + "uri_override_enforce_mode": 1, + }, + "http_method": 1, + "header_overrides": [ + {"header": {"key": "key_value", "value": "value_value"}} + ], + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "rate_limits": { + "max_dispatches_per_second": 0.26380000000000003, + "max_burst_size": 1519, + "max_concurrent_dispatches": 2671, + }, + "retry_config": { + "max_attempts": 1303, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff": {}, + "max_backoff": {}, + "max_doublings": 1388, + }, + "state": 1, + "purge_time": {"seconds": 751, "nanos": 543}, + "task_ttl": {}, + "tombstone_ttl": {}, + "stackdriver_logging_config": {"sampling_ratio": 0.1497}, + "type_": 1, + "stats": { + "tasks_count": 1198, + "oldest_estimated_arrival_time": {}, + "executed_last_minute_count": 2787, + "concurrent_dispatches_count": 2898, + "effective_execution_rate": 0.2543, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_queue(request) + + +def test_create_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{parent=projects/*/locations/*}/queues" + % client.transport._host, + args[1], + ) + + +def test_create_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_queue( + cloudtasks.CreateQueueRequest(), + parent="parent_value", + queue=gct_queue.Queue(name="name_value"), + ) + + +def test_create_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.UpdateQueueRequest, + dict, + ], +) +def test_update_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "queue": {"name": "projects/sample1/locations/sample2/queues/sample3"} + } + request_init["queue"] = { + "name": "projects/sample1/locations/sample2/queues/sample3", + "app_engine_http_queue": { + "app_engine_routing_override": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + } + }, + "http_target": { + "uri_override": { + "scheme": 1, + "host": "host_value", + "port": 453, + "path_override": {"path": "path_value"}, + "query_override": {"query_params": "query_params_value"}, + "uri_override_enforce_mode": 1, + }, + "http_method": 1, + "header_overrides": [ + {"header": {"key": "key_value", "value": "value_value"}} + ], + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "rate_limits": { + "max_dispatches_per_second": 0.26380000000000003, + "max_burst_size": 1519, + "max_concurrent_dispatches": 2671, + }, + "retry_config": { + "max_attempts": 1303, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff": {}, + "max_backoff": {}, + "max_doublings": 1388, + }, + "state": 1, + "purge_time": {"seconds": 751, "nanos": 543}, + "task_ttl": {}, + "tombstone_ttl": {}, + "stackdriver_logging_config": {"sampling_ratio": 0.1497}, + "type_": 1, + "stats": { + "tasks_count": 1198, + "oldest_estimated_arrival_time": {}, + "executed_last_minute_count": 2787, + "concurrent_dispatches_count": 2898, + "effective_execution_rate": 0.2543, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue( + name="name_value", + state=gct_queue.Queue.State.RUNNING, + type_=gct_queue.Queue.Type.PULL, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_queue.Queue) + assert response.name == "name_value" + assert response.state == gct_queue.Queue.State.RUNNING + assert response.type_ == gct_queue.Queue.Type.PULL + + +def test_update_queue_rest_required_fields(request_type=cloudtasks.UpdateQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_queue._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("queue",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_update_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_update_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.UpdateQueueRequest.pb(cloudtasks.UpdateQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gct_queue.Queue.to_json(gct_queue.Queue()) + + request = cloudtasks.UpdateQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gct_queue.Queue() + + client.update_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.UpdateQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "queue": {"name": "projects/sample1/locations/sample2/queues/sample3"} + } + request_init["queue"] = { + "name": "projects/sample1/locations/sample2/queues/sample3", + "app_engine_http_queue": { + "app_engine_routing_override": { + "service": "service_value", + "version": "version_value", + "instance": "instance_value", + "host": "host_value", + } + }, + "http_target": { + "uri_override": { + "scheme": 1, + "host": "host_value", + "port": 453, + "path_override": {"path": "path_value"}, + "query_override": {"query_params": "query_params_value"}, + "uri_override_enforce_mode": 1, + }, + "http_method": 1, + "header_overrides": [ + {"header": {"key": "key_value", "value": "value_value"}} + ], + "oauth_token": { + "service_account_email": "service_account_email_value", + "scope": "scope_value", + }, + "oidc_token": { + "service_account_email": "service_account_email_value", + "audience": "audience_value", + }, + }, + "rate_limits": { + "max_dispatches_per_second": 0.26380000000000003, + "max_burst_size": 1519, + "max_concurrent_dispatches": 2671, + }, + "retry_config": { + "max_attempts": 1303, + "max_retry_duration": {"seconds": 751, "nanos": 543}, + "min_backoff": {}, + "max_backoff": {}, + "max_doublings": 1388, + }, + "state": 1, + "purge_time": {"seconds": 751, "nanos": 543}, + "task_ttl": {}, + "tombstone_ttl": {}, + "stackdriver_logging_config": {"sampling_ratio": 0.1497}, + "type_": 1, + "stats": { + "tasks_count": 1198, + "oldest_estimated_arrival_time": {}, + "executed_last_minute_count": 2787, + "concurrent_dispatches_count": 2898, + "effective_execution_rate": 0.2543, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_queue(request) + + +def test_update_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = { + "queue": {"name": "projects/sample1/locations/sample2/queues/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{queue.name=projects/*/locations/*/queues/*}" + % client.transport._host, + args[1], + ) + + +def test_update_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_queue( + cloudtasks.UpdateQueueRequest(), + queue=gct_queue.Queue(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.DeleteQueueRequest, + dict, + ], +) +def test_delete_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_queue(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_queue_rest_required_fields(request_type=cloudtasks.DeleteQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_delete_queue" + ) as pre: + pre.assert_not_called() + pb_message = cloudtasks.DeleteQueueRequest.pb(cloudtasks.DeleteQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cloudtasks.DeleteQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.DeleteQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_queue(request) + + +def test_delete_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{name=projects/*/locations/*/queues/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_queue( + cloudtasks.DeleteQueueRequest(), + name="name_value", + ) + + +def test_delete_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.PurgeQueueRequest, + dict, + ], +) +def test_purge_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.purge_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +def test_purge_queue_rest_required_fields(request_type=cloudtasks.PurgeQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.purge_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_purge_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.purge_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_purge_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_purge_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_purge_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.PurgeQueueRequest.pb(cloudtasks.PurgeQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = queue.Queue.to_json(queue.Queue()) + + request = cloudtasks.PurgeQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = queue.Queue() + + client.purge_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_purge_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.PurgeQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.purge_queue(request) + + +def test_purge_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.purge_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{name=projects/*/locations/*/queues/*}:purge" + % client.transport._host, + args[1], + ) + + +def test_purge_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.purge_queue( + cloudtasks.PurgeQueueRequest(), + name="name_value", + ) + + +def test_purge_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.PauseQueueRequest, + dict, + ], +) +def test_pause_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.pause_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +def test_pause_queue_rest_required_fields(request_type=cloudtasks.PauseQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pause_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pause_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.pause_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_pause_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.pause_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_pause_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_pause_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_pause_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.PauseQueueRequest.pb(cloudtasks.PauseQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = queue.Queue.to_json(queue.Queue()) + + request = cloudtasks.PauseQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = queue.Queue() + + client.pause_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_pause_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.PauseQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.pause_queue(request) + + +def test_pause_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.pause_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{name=projects/*/locations/*/queues/*}:pause" + % client.transport._host, + args[1], + ) + + +def test_pause_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_queue( + cloudtasks.PauseQueueRequest(), + name="name_value", + ) + + +def test_pause_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ResumeQueueRequest, + dict, + ], +) +def test_resume_queue_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue( + name="name_value", + state=queue.Queue.State.RUNNING, + type_=queue.Queue.Type.PULL, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.resume_queue(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, queue.Queue) + assert response.name == "name_value" + assert response.state == queue.Queue.State.RUNNING + assert response.type_ == queue.Queue.Type.PULL + + +def test_resume_queue_rest_required_fields(request_type=cloudtasks.ResumeQueueRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resume_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).resume_queue._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.resume_queue(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_resume_queue_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.resume_queue._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_resume_queue_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_resume_queue" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_resume_queue" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.ResumeQueueRequest.pb(cloudtasks.ResumeQueueRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = queue.Queue.to_json(queue.Queue()) + + request = cloudtasks.ResumeQueueRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = queue.Queue() + + client.resume_queue( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_resume_queue_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.ResumeQueueRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resume_queue(request) + + +def test_resume_queue_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = queue.Queue() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = queue.Queue.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.resume_queue(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{name=projects/*/locations/*/queues/*}:resume" + % client.transport._host, + args[1], + ) + + +def test_resume_queue_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_queue( + cloudtasks.ResumeQueueRequest(), + name="name_value", + ) + + +def test_resume_queue_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_get_iam_policy" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_get_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.GetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(policy_pb2.Policy()) + + request = iam_policy_pb2.GetIamPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + + client.get_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/queues/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{resource=projects/*/locations/*/queues/*}:getIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_get_iam_policy_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "policy", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_set_iam_policy" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_set_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.SetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(policy_pb2.Policy()) + + request = iam_policy_pb2.SetIamPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + + client.set_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/queues/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{resource=projects/*/locations/*/queues/*}:setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_set_iam_policy_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_rest_required_fields( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["resource"] = "" + request_init["permissions"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + jsonified_request["permissions"] = "permissions_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + assert "permissions" in jsonified_request + assert jsonified_request["permissions"] == "permissions_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "permissions", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_test_iam_permissions" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_test_iam_permissions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = iam_policy_pb2.TestIamPermissionsRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + request = iam_policy_pb2.TestIamPermissionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/queues/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + permissions=["permissions_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = return_value + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{resource=projects/*/locations/*/queues/*}:testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource="resource_value", + permissions=["permissions_value"], + ) + + +def test_test_iam_permissions_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.ListTasksRequest, + dict, + ], +) +def test_list_tasks_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListTasksResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.ListTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_tasks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTasksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tasks_rest_required_fields(request_type=cloudtasks.ListTasksRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tasks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_tasks._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "response_view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListTasksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudtasks.ListTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_tasks(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_tasks_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_tasks._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "responseView", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_tasks_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_list_tasks" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_list_tasks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.ListTasksRequest.pb(cloudtasks.ListTasksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudtasks.ListTasksResponse.to_json( + cloudtasks.ListTasksResponse() + ) + + request = cloudtasks.ListTasksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudtasks.ListTasksResponse() + + client.list_tasks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_tasks_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.ListTasksRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_tasks(request) + + +def test_list_tasks_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.ListTasksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.ListTasksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_tasks(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks" + % client.transport._host, + args[1], + ) + + +def test_list_tasks_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tasks( + cloudtasks.ListTasksRequest(), + parent="parent_value", + ) + + +def test_list_tasks_rest_pager(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + task.Task(), + ], + next_page_token="abc", + ), + cloudtasks.ListTasksResponse( + tasks=[], + next_page_token="def", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + ], + next_page_token="ghi", + ), + cloudtasks.ListTasksResponse( + tasks=[ + task.Task(), + task.Task(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudtasks.ListTasksResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + + pager = client.list_tasks(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, task.Task) for i in results) + + pages = list(client.list_tasks(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.GetTaskRequest, + dict, + ], +) +def test_get_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +def test_get_task_rest_required_fields(request_type=cloudtasks.GetTaskRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_task._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("response_view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = task.Task() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(("responseView",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_get_task" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_get_task" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.GetTaskRequest.pb(cloudtasks.GetTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = task.Task.to_json(task.Task()) + + request = cloudtasks.GetTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = task.Task() + + client.get_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.GetTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_task(request) + + +def test_get_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}" + % client.transport._host, + args[1], + ) + + +def test_get_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_task( + cloudtasks.GetTaskRequest(), + name="name_value", + ) + + +def test_get_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.CreateTaskRequest, + dict, + ], +) +def test_create_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=gct_task.Task.View.BASIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gct_task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == gct_task.Task.View.BASIC + + +def test_create_task_rest_required_fields(request_type=cloudtasks.CreateTaskRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gct_task.Task() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gct_task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_task._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "task", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_create_task" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_create_task" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.CreateTaskRequest.pb(cloudtasks.CreateTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gct_task.Task.to_json(gct_task.Task()) + + request = cloudtasks.CreateTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gct_task.Task() + + client.create_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.CreateTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_task(request) + + +def test_create_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gct_task.Task() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2/queues/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gct_task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks" + % client.transport._host, + args[1], + ) + + +def test_create_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_task( + cloudtasks.CreateTaskRequest(), + parent="parent_value", + task=gct_task.Task(name="name_value"), + ) + + +def test_create_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.DeleteTaskRequest, + dict, + ], +) +def test_delete_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_task(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_task_rest_required_fields(request_type=cloudtasks.DeleteTaskRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_delete_task" + ) as pre: + pre.assert_not_called() + pb_message = cloudtasks.DeleteTaskRequest.pb(cloudtasks.DeleteTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cloudtasks.DeleteTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.DeleteTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_task(request) + + +def test_delete_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_task( + cloudtasks.DeleteTaskRequest(), + name="name_value", + ) + + +def test_delete_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.RunTaskRequest, + dict, + ], +) +def test_run_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task( + name="name_value", + dispatch_count=1496, + response_count=1527, + view=task.Task.View.BASIC, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, task.Task) + assert response.name == "name_value" + assert response.dispatch_count == 1496 + assert response.response_count == 1527 + assert response.view == task.Task.View.BASIC + + +def test_run_task_rest_required_fields(request_type=cloudtasks.RunTaskRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = task.Task() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.run_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_run_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.run_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_run_task" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_run_task" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.RunTaskRequest.pb(cloudtasks.RunTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = task.Task.to_json(task.Task()) + + request = cloudtasks.RunTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = task.Task() + + client.run_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.RunTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_task(request) + + +def test_run_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = task.Task() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/queues/sample3/tasks/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = task.Task.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.run_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}:run" + % client.transport._host, + args[1], + ) + + +def test_run_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_task( + cloudtasks.RunTaskRequest(), + name="name_value", + ) + + +def test_run_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudtasks.BufferTaskRequest, + dict, + ], +) +def test_buffer_task_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "queue": "projects/sample1/locations/sample2/queues/sample3", + "task_id": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.BufferTaskResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.BufferTaskResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.buffer_task(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudtasks.BufferTaskResponse) + + +def test_buffer_task_rest_required_fields(request_type=cloudtasks.BufferTaskRequest): + transport_class = transports.CloudTasksRestTransport + + request_init = {} + request_init["queue"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).buffer_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["queue"] = "queue_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).buffer_task._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "queue" in jsonified_request + assert jsonified_request["queue"] == "queue_value" + + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudtasks.BufferTaskResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudtasks.BufferTaskResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.buffer_task(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_buffer_task_rest_unset_required_fields(): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.buffer_task._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("queue",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_buffer_task_rest_interceptors(null_interceptor): + transport = transports.CloudTasksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudTasksRestInterceptor(), + ) + client = CloudTasksClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudTasksRestInterceptor, "post_buffer_task" + ) as post, mock.patch.object( + transports.CloudTasksRestInterceptor, "pre_buffer_task" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudtasks.BufferTaskRequest.pb(cloudtasks.BufferTaskRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudtasks.BufferTaskResponse.to_json( + cloudtasks.BufferTaskResponse() + ) + + request = cloudtasks.BufferTaskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudtasks.BufferTaskResponse() + + client.buffer_task( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_buffer_task_rest_bad_request( + transport: str = "rest", request_type=cloudtasks.BufferTaskRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "queue": "projects/sample1/locations/sample2/queues/sample3", + "task_id": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.buffer_task(request) + + +def test_buffer_task_rest_flattened(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudtasks.BufferTaskResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "queue": "projects/sample1/locations/sample2/queues/sample3", + "task_id": "sample4", + } + + # get truthy value for each flattened field + mock_args = dict( + queue="queue_value", + task_id="task_id_value", + body=httpbody_pb2.HttpBody(content_type="content_type_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudtasks.BufferTaskResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.buffer_task(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta3/{queue=projects/*/locations/*/queues/*}/tasks/{task_id}:buffer" + % client.transport._host, + args[1], + ) + + +def test_buffer_task_rest_flattened_error(transport: str = "rest"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.buffer_task( + cloudtasks.BufferTaskRequest(), + queue="queue_value", + task_id="task_id_value", + body=httpbody_pb2.HttpBody(content_type="content_type_value"), + ) + + +def test_buffer_task_rest_error(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudTasksClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudTasksClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudTasksGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudTasksGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + transports.CloudTasksRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CloudTasksClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudTasksGrpcTransport, + ) + + +def test_cloud_tasks_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudTasksTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_tasks_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudTasksTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_queues", + "get_queue", + "create_queue", + "update_queue", + "delete_queue", + "purge_queue", + "pause_queue", + "resume_queue", + "get_iam_policy", + "set_iam_policy", + "test_iam_permissions", + "list_tasks", + "get_task", + "create_task", + "delete_task", + "run_task", + "buffer_task", + "get_location", + "list_locations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cloud_tasks_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_cloud_tasks_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudTasksTransport() + adc.assert_called_once() + + +def test_cloud_tasks_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudTasksClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + ], +) +def test_cloud_tasks_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudTasksGrpcTransport, + transports.CloudTasksGrpcAsyncIOTransport, + transports.CloudTasksRestTransport, + ], +) +def test_cloud_tasks_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudTasksGrpcTransport, grpc_helpers), + (transports.CloudTasksGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_cloud_tasks_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "cloudtasks.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="cloudtasks.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport], +) +def test_cloud_tasks_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_cloud_tasks_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CloudTasksRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cloud_tasks_host_no_port(transport_name): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudtasks.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudtasks.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudtasks.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cloud_tasks_host_with_port(transport_name): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudtasks.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudtasks.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudtasks.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_cloud_tasks_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudTasksClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudTasksClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_queues._session + session2 = client2.transport.list_queues._session + assert session1 != session2 + session1 = client1.transport.get_queue._session + session2 = client2.transport.get_queue._session + assert session1 != session2 + session1 = client1.transport.create_queue._session + session2 = client2.transport.create_queue._session + assert session1 != session2 + session1 = client1.transport.update_queue._session + session2 = client2.transport.update_queue._session + assert session1 != session2 + session1 = client1.transport.delete_queue._session + session2 = client2.transport.delete_queue._session + assert session1 != session2 + session1 = client1.transport.purge_queue._session + session2 = client2.transport.purge_queue._session + assert session1 != session2 + session1 = client1.transport.pause_queue._session + session2 = client2.transport.pause_queue._session + assert session1 != session2 + session1 = client1.transport.resume_queue._session + session2 = client2.transport.resume_queue._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.list_tasks._session + session2 = client2.transport.list_tasks._session + assert session1 != session2 + session1 = client1.transport.get_task._session + session2 = client2.transport.get_task._session + assert session1 != session2 + session1 = client1.transport.create_task._session + session2 = client2.transport.create_task._session + assert session1 != session2 + session1 = client1.transport.delete_task._session + session2 = client2.transport.delete_task._session + assert session1 != session2 + session1 = client1.transport.run_task._session + session2 = client2.transport.run_task._session + assert session1 != session2 + session1 = client1.transport.buffer_task._session + session2 = client2.transport.buffer_task._session + assert session1 != session2 + + +def test_cloud_tasks_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudTasksGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_tasks_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudTasksGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport], +) +def test_cloud_tasks_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.CloudTasksGrpcTransport, transports.CloudTasksGrpcAsyncIOTransport], +) +def test_cloud_tasks_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_queue_path(): + project = "squid" + location = "clam" + queue = "whelk" + expected = "projects/{project}/locations/{location}/queues/{queue}".format( + project=project, + location=location, + queue=queue, + ) + actual = CloudTasksClient.queue_path(project, location, queue) + assert expected == actual + + +def test_parse_queue_path(): + expected = { + "project": "octopus", + "location": "oyster", + "queue": "nudibranch", + } + path = CloudTasksClient.queue_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_queue_path(path) + assert expected == actual + + +def test_task_path(): + project = "cuttlefish" + location = "mussel" + queue = "winkle" + task = "nautilus" + expected = ( + "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format( + project=project, + location=location, + queue=queue, + task=task, + ) + ) + actual = CloudTasksClient.task_path(project, location, queue, task) + assert expected == actual + + +def test_parse_task_path(): + expected = { + "project": "scallop", + "location": "abalone", + "queue": "squid", + "task": "clam", + } + path = CloudTasksClient.task_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_task_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CloudTasksClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = CloudTasksClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = CloudTasksClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = CloudTasksClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = CloudTasksClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = CloudTasksClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = CloudTasksClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = CloudTasksClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = CloudTasksClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = CloudTasksClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudTasksClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudTasksTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudTasksTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudTasksClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations(transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = CloudTasksClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CloudTasksAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CloudTasksAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = CloudTasksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CloudTasksClient, transports.CloudTasksGrpcTransport), + (CloudTasksAsyncClient, transports.CloudTasksGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-texttospeech/.OwlBot.yaml b/packages/google-cloud-texttospeech/.OwlBot.yaml new file mode 100644 index 000000000000..4618254e09b4 --- /dev/null +++ b/packages/google-cloud-texttospeech/.OwlBot.yaml @@ -0,0 +1,24 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/texttospeech/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-texttospeech/$1 + +begin-after-commit-hash: f92897093108f24c013e9c15c3be2709417ba5d6 + diff --git a/packages/google-cloud-texttospeech/.coveragerc b/packages/google-cloud-texttospeech/.coveragerc new file mode 100644 index 000000000000..8f3a0ab7f83b --- /dev/null +++ b/packages/google-cloud-texttospeech/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/texttospeech/__init__.py + google/cloud/texttospeech/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-texttospeech/.flake8 b/packages/google-cloud-texttospeech/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-texttospeech/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-texttospeech/.gitignore b/packages/google-cloud-texttospeech/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-texttospeech/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-texttospeech/.repo-metadata.json b/packages/google-cloud-texttospeech/.repo-metadata.json new file mode 100644 index 000000000000..b8f167867386 --- /dev/null +++ b/packages/google-cloud-texttospeech/.repo-metadata.json @@ -0,0 +1,18 @@ +{ + "name": "texttospeech", + "name_pretty": "Google Cloud Text-to-Speech", + "product_documentation": "https://cloud.google.com/text-to-speech", + "client_documentation": "https://cloud.google.com/python/docs/reference/texttospeech/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5235428", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-texttospeech", + "api_id": "texttospeech.googleapis.com", + "requires_billing": true, + "default_version": "v1", + "codeowner_team": "@googleapis/cdpe-cloudai", + "api_shortname": "texttospeech", + "api_description": "enables easy integration of Google text recognition technologies into developer applications. Send text and receive synthesized audio output from the Cloud Text-to-Speech API service." +} diff --git a/packages/google-cloud-texttospeech/CHANGELOG.md b/packages/google-cloud-texttospeech/CHANGELOG.md new file mode 100644 index 000000000000..7f4792531cfc --- /dev/null +++ b/packages/google-cloud-texttospeech/CHANGELOG.md @@ -0,0 +1,423 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-texttospeech/#history + +## [2.14.1](https://github.com/googleapis/python-texttospeech/compare/v2.14.0...v2.14.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([6415162](https://github.com/googleapis/python-texttospeech/commit/641516224405ac91e08d5fee161a5fcde4ed0ba0)) + + +### Documentation + +* Add documentation for enums ([6415162](https://github.com/googleapis/python-texttospeech/commit/641516224405ac91e08d5fee161a5fcde4ed0ba0)) + +## [2.14.0](https://github.com/googleapis/python-texttospeech/compare/v2.13.0...v2.14.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#353](https://github.com/googleapis/python-texttospeech/issues/353)) ([91e5500](https://github.com/googleapis/python-texttospeech/commit/91e5500cde7939e5c8f37e3134d21742c741892a)) + +## [2.13.0](https://github.com/googleapis/python-texttospeech/compare/v2.12.3...v2.13.0) (2022-12-15) + + +### Features + +* Add LRS API ([07227a0](https://github.com/googleapis/python-texttospeech/commit/07227a06b555d590fc35055ec229f22c6d8dd139)) +* Add support for `google.cloud.texttospeech.__version__` ([a2a534c](https://github.com/googleapis/python-texttospeech/commit/a2a534ceccbad9114cb30599129eb168ab60cb5f)) +* Add typing to proto.Message based class attributes ([a2a534c](https://github.com/googleapis/python-texttospeech/commit/a2a534ceccbad9114cb30599129eb168ab60cb5f)) + + +### Bug Fixes + +* Add dict typing for client_options ([a2a534c](https://github.com/googleapis/python-texttospeech/commit/a2a534ceccbad9114cb30599129eb168ab60cb5f)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([07227a0](https://github.com/googleapis/python-texttospeech/commit/07227a06b555d590fc35055ec229f22c6d8dd139)) +* Drop usage of pkg_resources ([07227a0](https://github.com/googleapis/python-texttospeech/commit/07227a06b555d590fc35055ec229f22c6d8dd139)) +* Fix timeout default values ([07227a0](https://github.com/googleapis/python-texttospeech/commit/07227a06b555d590fc35055ec229f22c6d8dd139)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([a2a534c](https://github.com/googleapis/python-texttospeech/commit/a2a534ceccbad9114cb30599129eb168ab60cb5f)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([07227a0](https://github.com/googleapis/python-texttospeech/commit/07227a06b555d590fc35055ec229f22c6d8dd139)) + +## [2.12.3](https://github.com/googleapis/python-texttospeech/compare/v2.12.2...v2.12.3) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#338](https://github.com/googleapis/python-texttospeech/issues/338)) ([aa92121](https://github.com/googleapis/python-texttospeech/commit/aa921215c05588b6555b7b41381b30f8b0079d54)) + +## [2.12.2](https://github.com/googleapis/python-texttospeech/compare/v2.12.1...v2.12.2) (2022-10-03) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#335](https://github.com/googleapis/python-texttospeech/issues/335)) ([50b394d](https://github.com/googleapis/python-texttospeech/commit/50b394dc5684b2df6cf544ba7bb5296d384ee0f8)) + +## [2.12.1](https://github.com/googleapis/python-texttospeech/compare/v2.12.0...v2.12.1) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#316](https://github.com/googleapis/python-texttospeech/issues/316)) ([ec75f1e](https://github.com/googleapis/python-texttospeech/commit/ec75f1e5382bd527d17fb347a7ecc7cd35f57ab0)) +* **deps:** require proto-plus >= 1.22.0 ([ec75f1e](https://github.com/googleapis/python-texttospeech/commit/ec75f1e5382bd527d17fb347a7ecc7cd35f57ab0)) + +## [2.12.0](https://github.com/googleapis/python-texttospeech/compare/v2.11.1...v2.12.0) (2022-07-16) + + +### Features + +* add audience parameter ([f450551](https://github.com/googleapis/python-texttospeech/commit/f450551482e52c7f5564bf735b705e22255dc5d8)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#304](https://github.com/googleapis/python-texttospeech/issues/304)) ([f450551](https://github.com/googleapis/python-texttospeech/commit/f450551482e52c7f5564bf735b705e22255dc5d8)) +* require python 3.7+ ([#306](https://github.com/googleapis/python-texttospeech/issues/306)) ([192277b](https://github.com/googleapis/python-texttospeech/commit/192277b9b62338840b55fe8e24be582a19f390cd)) + +## [2.11.1](https://github.com/googleapis/python-texttospeech/compare/v2.11.0...v2.11.1) (2022-06-06) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#293](https://github.com/googleapis/python-texttospeech/issues/293)) ([e5ab0d6](https://github.com/googleapis/python-texttospeech/commit/e5ab0d6b69e008653b1500742ede3ead3a748f58)) + + +### Documentation + +* fix changelog header to consistent size ([#294](https://github.com/googleapis/python-texttospeech/issues/294)) ([8a8b65c](https://github.com/googleapis/python-texttospeech/commit/8a8b65c0e4c39a5671e9998848ebb89ea38d301d)) + +## [2.11.0](https://github.com/googleapis/python-texttospeech/compare/v2.10.2...v2.11.0) (2022-03-10) + + +### Features + +* promote CustomVoiceParams to v1 ([#266](https://github.com/googleapis/python-texttospeech/issues/266)) ([f484e7f](https://github.com/googleapis/python-texttospeech/commit/f484e7fe036fe57a4f432bf30c6421a6541ea486)) + +## [2.10.2](https://github.com/googleapis/python-texttospeech/compare/v2.10.1...v2.10.2) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#261](https://github.com/googleapis/python-texttospeech/issues/261)) ([f993058](https://github.com/googleapis/python-texttospeech/commit/f993058b1535e8cc0c71877b8dca9beae3dffb1b)) +* **deps:** require proto-plus>=1.15.0 ([f993058](https://github.com/googleapis/python-texttospeech/commit/f993058b1535e8cc0c71877b8dca9beae3dffb1b)) + +## [2.10.1](https://github.com/googleapis/python-texttospeech/compare/v2.10.0...v2.10.1) (2022-02-26) + + +### Documentation + +* add generated snippets ([#249](https://github.com/googleapis/python-texttospeech/issues/249)) ([f918e82](https://github.com/googleapis/python-texttospeech/commit/f918e82df2e0c499356fa1e095b13f7ac2cd3b6b)) + +## [2.10.0](https://github.com/googleapis/python-texttospeech/compare/v2.9.1...v2.10.0) (2022-02-03) + + +### Features + +* add api key support ([#242](https://github.com/googleapis/python-texttospeech/issues/242)) ([3b4f0d0](https://github.com/googleapis/python-texttospeech/commit/3b4f0d0749529b04ed7fedec3c4b06b6d42c12cd)) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([4c11b12](https://github.com/googleapis/python-texttospeech/commit/4c11b127ece0009082fe91062f3f36c8f18e8ffc)) + + +### Documentation + +* update comments for ListVoicesRequest ([#244](https://github.com/googleapis/python-texttospeech/issues/244)) ([bc5b73f](https://github.com/googleapis/python-texttospeech/commit/bc5b73fbc62900f89a01486c6e8d42d459c34fd6)) + +## [2.9.1](https://www.github.com/googleapis/python-texttospeech/compare/v2.9.0...v2.9.1) (2022-01-08) + + +### Documentation + +* Added virtualenv comment for clarity ([#225](https://www.github.com/googleapis/python-texttospeech/issues/225)) ([61a7fce](https://www.github.com/googleapis/python-texttospeech/commit/61a7fcec0611712cdb1692b830db4aaca4d411b0)) +* update comments for ListVoicesRequest ([#229](https://www.github.com/googleapis/python-texttospeech/issues/229)) ([9ea340c](https://www.github.com/googleapis/python-texttospeech/commit/9ea340cee20298630a6a15bed0ed2df9f69b3d13)) + +## [2.9.0](https://www.github.com/googleapis/python-texttospeech/compare/v2.8.0...v2.9.0) (2021-11-16) + + +### Features + +* update v1 proto ([#221](https://www.github.com/googleapis/python-texttospeech/issues/221)) ([e8776d7](https://www.github.com/googleapis/python-texttospeech/commit/e8776d7a482f495ed5ae0e1235609e3e2d3e6067)) + +## [2.8.0](https://www.github.com/googleapis/python-texttospeech/compare/v2.7.1...v2.8.0) (2021-11-13) + + +### Features + +* **v1beta1:** add CustomVoiceParams ([#215](https://www.github.com/googleapis/python-texttospeech/issues/215)) ([6a18d0f](https://www.github.com/googleapis/python-texttospeech/commit/6a18d0f097e992bd4d90eaf5032ce98aa4af004a)) + + +### Documentation + +* fix docstring formatting ([#218](https://www.github.com/googleapis/python-texttospeech/issues/218)) ([2c57f95](https://www.github.com/googleapis/python-texttospeech/commit/2c57f95a1af747b49dac41628bd43d485a68583e)) + +## [2.7.1](https://www.github.com/googleapis/python-texttospeech/compare/v2.7.0...v2.7.1) (2021-11-01) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([99ac70b](https://www.github.com/googleapis/python-texttospeech/commit/99ac70ba45d7c500d0f19a30dead060c0db4453c)) +* **deps:** require google-api-core >= 1.28.0 ([99ac70b](https://www.github.com/googleapis/python-texttospeech/commit/99ac70ba45d7c500d0f19a30dead060c0db4453c)) + + +### Documentation + +* list oneofs in docstring ([99ac70b](https://www.github.com/googleapis/python-texttospeech/commit/99ac70ba45d7c500d0f19a30dead060c0db4453c)) + +## [2.7.0](https://www.github.com/googleapis/python-texttospeech/compare/v2.6.0...v2.7.0) (2021-10-18) + + +### Features + +* add support for python 3.10 ([#202](https://www.github.com/googleapis/python-texttospeech/issues/202)) ([2ffa70b](https://www.github.com/googleapis/python-texttospeech/commit/2ffa70b6c35707142d66476b0ef5e3bd8d6d0052)) + +## [2.6.0](https://www.github.com/googleapis/python-texttospeech/compare/v2.5.3...v2.6.0) (2021-10-07) + + +### Features + +* add context manager support in client ([#196](https://www.github.com/googleapis/python-texttospeech/issues/196)) ([73d9290](https://www.github.com/googleapis/python-texttospeech/commit/73d9290cdea69a00ba317ae017a1d07bcf734989)) + +## [2.5.3](https://www.github.com/googleapis/python-texttospeech/compare/v2.5.2...v2.5.3) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([e74b994](https://www.github.com/googleapis/python-texttospeech/commit/e74b9942d480bf7e360bc61ff183909873fc20a6)) + +## [2.5.2](https://www.github.com/googleapis/python-texttospeech/compare/v2.5.1...v2.5.2) (2021-07-28) + + +### Bug Fixes + +* enable self signed jwt for grpc ([#171](https://www.github.com/googleapis/python-texttospeech/issues/171)) ([9c1c437](https://www.github.com/googleapis/python-texttospeech/commit/9c1c4371caa712a749b406257c09d98f6b428e7b)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#166](https://www.github.com/googleapis/python-texttospeech/issues/166)) ([053abe3](https://www.github.com/googleapis/python-texttospeech/commit/053abe3afc1107bdacd164e6ca4cd60b5ca07df7)) + + +### Miscellaneous Chores + +* release as 2.5.2 ([#172](https://www.github.com/googleapis/python-texttospeech/issues/172)) ([3804727](https://www.github.com/googleapis/python-texttospeech/commit/3804727995d0357fa0c4c5c246210768e0ce7124)) + +## [2.5.1](https://www.github.com/googleapis/python-texttospeech/compare/v2.5.0...v2.5.1) (2021-07-20) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#165](https://www.github.com/googleapis/python-texttospeech/issues/165)) ([d78b384](https://www.github.com/googleapis/python-texttospeech/commit/d78b384d302a1976682f35875ce2d4f7b60d7a6c)) + +## [2.5.0](https://www.github.com/googleapis/python-texttospeech/compare/v2.4.0...v2.5.0) (2021-07-01) + + +### Features + +* add always_use_jwt_access ([#155](https://www.github.com/googleapis/python-texttospeech/issues/155)) ([cd10c37](https://www.github.com/googleapis/python-texttospeech/commit/cd10c3704db610f2abf65c9142cfdaa867d8490a)) + + +### Bug Fixes + +* disable always_use_jwt_access ([#159](https://www.github.com/googleapis/python-texttospeech/issues/159)) ([d109303](https://www.github.com/googleapis/python-texttospeech/commit/d109303898facc663a6e7fe9212440831c1eeb75)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-texttospeech/issues/1127)) ([#150](https://www.github.com/googleapis/python-texttospeech/issues/150)) ([d2954ba](https://www.github.com/googleapis/python-texttospeech/commit/d2954ba91385db6d581f75154fb11c969f6ca0e2)), closes [#1126](https://www.github.com/googleapis/python-texttospeech/issues/1126) + +## [2.4.0](https://www.github.com/googleapis/python-texttospeech/compare/v2.3.0...v2.4.0) (2021-05-28) + + +### Features + +* support self-signed JWT flow for service accounts ([8a08836](https://www.github.com/googleapis/python-texttospeech/commit/8a08836487c1b7e4e58d3c07a4e26005d40793f0)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([8a08836](https://www.github.com/googleapis/python-texttospeech/commit/8a08836487c1b7e4e58d3c07a4e26005d40793f0)) + +## [2.3.0](https://www.github.com/googleapis/python-texttospeech/compare/v2.2.0...v2.3.0) (2021-03-31) + + +### Features + +* add `from_service_account_info` to clients ([139e6e8](https://www.github.com/googleapis/python-texttospeech/commit/139e6e8511cdce4c0be7983520f7efc47092f3b1)) +* Add ALAW support on client library and improve the ListVoiceRequest message's documentation ([#113](https://www.github.com/googleapis/python-texttospeech/issues/113)) ([8bbd380](https://www.github.com/googleapis/python-texttospeech/commit/8bbd38014fe796b30f4b12ae9432d3a05c130063)) +* add common resource helper methods ([139e6e8](https://www.github.com/googleapis/python-texttospeech/commit/139e6e8511cdce4c0be7983520f7efc47092f3b1)) +* support custom client info ([#82](https://www.github.com/googleapis/python-texttospeech/issues/82)) ([0612793](https://www.github.com/googleapis/python-texttospeech/commit/06127932a920f6318db8f25d6430755b35d09bb5)) + + +### Bug Fixes + +* change default retry and timeout settings ([139e6e8](https://www.github.com/googleapis/python-texttospeech/commit/139e6e8511cdce4c0be7983520f7efc47092f3b1)) + + +### Documentation + +* use sphinx-1.5.5 for sphinx-docfx-yaml ([#89](https://www.github.com/googleapis/python-texttospeech/issues/89)) ([feb04c5](https://www.github.com/googleapis/python-texttospeech/commit/feb04c50b56c2c3359b92d8b5887c8ee50be2b95)) + +## [2.2.0](https://www.github.com/googleapis/python-texttospeech/compare/v2.1.0...v2.2.0) (2020-08-10) + + +### Features + +* incorporate upstream changes ([#73](https://www.github.com/googleapis/python-texttospeech/issues/73)) ([8ee5447](https://www.github.com/googleapis/python-texttospeech/commit/8ee544740f18497d9925bcf77e5ab96695503589)) + * support MULAW audio encoding + * support MP3_64_KBPS audio encoding + * support timepointing via SSML tag + * support quota_project_id + +## [2.1.0](https://www.github.com/googleapis/python-texttospeech/compare/v2.0.0...v2.1.0) (2020-06-20) + + +### Features + +* add async client ([#53](https://www.github.com/googleapis/python-texttospeech/issues/53)) ([887d8d5](https://www.github.com/googleapis/python-texttospeech/commit/887d8d501ce9255fee44170b5fc40ebfb1ea953d)) + + +### Documentation + +* change relative URLs to absolute URLs to fix broken links ([#40](https://www.github.com/googleapis/python-texttospeech/issues/40)) ([b68df44](https://www.github.com/googleapis/python-texttospeech/commit/b68df446daa7983cad1d31553ece6df569c932b2)) + +## [2.0.0](https://www.github.com/googleapis/python-texttospeech/compare/v1.0.1...v2.0.0) (2020-06-01) + + +### ⚠ BREAKING CHANGES + +* This release has breaking changes. See the [2.0.0 Migration Guide](https://github.com/googleapis/python-texttospeech/blob/main/UPGRADING.md#200-migration-guide) for details. + +### Features + +* regenerate with microgenerator ([#30](https://www.github.com/googleapis/python-texttospeech/issues/30)) ([3181b55](https://www.github.com/googleapis/python-texttospeech/commit/3181b55733da7aecde37009a0dd77117434deceb)) + + +### Bug Fixes + +* address PR comments ([65f903b](https://www.github.com/googleapis/python-texttospeech/commit/65f903b00395716fad272ca4fc973755735e1e20)) + +## [1.0.1](https://www.github.com/googleapis/python-texttospeech/compare/v1.0.0...v1.0.1) (2020-02-28) + + +### Bug Fixes + +* update url in setup.py ([#13](https://www.github.com/googleapis/python-texttospeech/issues/13)) ([dc17707](https://www.github.com/googleapis/python-texttospeech/commit/dc17707c41feb885d94f1045ef14e4b9e0898716)) + +## [1.0.0](https://www.github.com/googleapis/python-texttospeech/compare/v0.5.0...v1.0.0) (2020-02-28) + + +### Features + +* bump release status to GA ([#9](https://www.github.com/googleapis/python-texttospeech/issues/9)) ([03a639e](https://www.github.com/googleapis/python-texttospeech/commit/03a639e16256d9211c1fe88991440f2cc22eccd6)) + +## 0.5.0 + +07-24-2019 17:48 PDT + + +### Implementation Changes +- Allow kwargs to be passed to create_channel (via synth). ([#8407](https://github.com/googleapis/google-cloud-python/pull/8407)) +- Reformat protos, update nox session docs (via synth). ([#7941](https://github.com/googleapis/google-cloud-python/pull/7941)) +- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) + +### New Features +- Add 'client_options' support (via synth). ([#8525](https://github.com/googleapis/google-cloud-python/pull/8525)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) +- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) +- Repair top-level API reference page. ([#8435](https://github.com/googleapis/google-cloud-python/pull/8435)) +- Updated client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) + +### Internal / Testing Changes +- Pin black version (via synth). ([#8599](https://github.com/googleapis/google-cloud-python/pull/8599)) +- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) +- Declare encoding as utf-8 in pb2 files (via synth). ([#8367](https://github.com/googleapis/google-cloud-python/pull/8367)) +- Add disclaimer to auto-generated template files (via synth). ([#8331](https://github.com/googleapis/google-cloud-python/pull/8331)) +- Blacken (via synth). ([#8281](https://github.com/googleapis/google-cloud-python/pull/8281)) + +## 0.4.0 + +02-07-2019 15:21 PST + + +### Implementation Changes +- Pick up stub docstring fix in GAPIC generator. ([#6984](https://github.com/googleapis/google-cloud-python/pull/6984)) + +### New Features +- Protoc updates to include effects_profile_id. ([#7097](https://github.com/googleapis/google-cloud-python/pull/7097)) + +### Documentation +- Fix `Client Library Documentation` link ([#7109](https://github.com/googleapis/google-cloud-python/pull/7109)) + +### Internal / Testing Changes +- Copy proto files alongside protoc versions. +- Add protos as an artifact to library ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) +- Update copyright headers and docstring quoting + +## 0.3.0 + +12-18-2018 09:54 PST + + +### Implementation Changes +- Import `iam.policy` from `google.api_core`. ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) +- Pick up fixes to GAPIC generator. ([#6510](https://github.com/googleapis/google-cloud-python/pull/6510)) +- Fix `client_info` bug, update docstrings. ([#6423](https://github.com/googleapis/google-cloud-python/pull/6423)) +- Re-generate library using texttospeech/synth.py ([#5981](https://github.com/googleapis/google-cloud-python/pull/5981)) +- Add gRPC Transport layer. ([#5959](https://github.com/googleapis/google-cloud-python/pull/5959)) + +### Dependencies +- Bump minimum `api_core` version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391)) +- Avoid broken 'google-common-apis 1.5.4' release. ([#6355](https://github.com/googleapis/google-cloud-python/pull/6355)) + +### Documentation +- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) +- Docs: normalize use of support level badges ([#6159](https://github.com/googleapis/google-cloud-python/pull/6159)) +- Docs: Replace links to `/stable/` with `/latest/`. ([#5901](https://github.com/googleapis/google-cloud-python/pull/5901)) +- Fix docs links for TTS. ([#5483](https://github.com/googleapis/google-cloud-python/pull/5483)) + +### Internal / Testing Changes +- Add synth.metadata. ([#6870](https://github.com/googleapis/google-cloud-python/pull/6870)) +- Update noxfile. +- blacken all gen'd libs ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) +- omit local deps ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) +- Run black at end of synth.py ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) +- Run Black on Generated libraries ([#6666](https://github.com/googleapis/google-cloud-python/pull/6666)) +- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) +- Add 'mock' to unit test dependencies for autogen libs. ([#6402](https://github.com/googleapis/google-cloud-python/pull/6402)) +- Add / fix badges for PyPI / versions. ([#6158](https://github.com/googleapis/google-cloud-python/pull/6158)) +- Use new Nox ([#6175](https://github.com/googleapis/google-cloud-python/pull/6175)) + +## 0.2.0 + +### New Features + +- Add the text-to-speech v1 API surface. (#5468) +- Re-generate the text-to-speech v1beta1 API surface. (#5468) + +### Documentation + +- Rename releases to changelog and include from CHANGELOG.md (#5191) + +### Internal / Testing Changes + +- Add Test runs for Python 3.7 and remove 3.4 (#5295) + +## 0.1.0 + +### Interface additions + +- Added text-to-speech v1beta1. (#5049) diff --git a/packages/google-cloud-texttospeech/CODE_OF_CONDUCT.md b/packages/google-cloud-texttospeech/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-texttospeech/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-texttospeech/CONTRIBUTING.rst b/packages/google-cloud-texttospeech/CONTRIBUTING.rst new file mode 100644 index 000000000000..4e877bec5d65 --- /dev/null +++ b/packages/google-cloud-texttospeech/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.11 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-texttospeech + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-texttospeech/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-texttospeech/LICENSE b/packages/google-cloud-texttospeech/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-texttospeech/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-texttospeech/MANIFEST.in b/packages/google-cloud-texttospeech/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-texttospeech/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-texttospeech/README.rst b/packages/google-cloud-texttospeech/README.rst new file mode 100644 index 000000000000..bd8b21dc55f6 --- /dev/null +++ b/packages/google-cloud-texttospeech/README.rst @@ -0,0 +1,108 @@ +Python Client for Google Cloud Text-to-Speech +============================================= + +|stable| |pypi| |versions| + +`Google Cloud Text-to-Speech`_: enables easy integration of Google text recognition technologies into developer applications. Send text and receive synthesized audio output from the Cloud Text-to-Speech API service. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-texttospeech.svg + :target: https://pypi.org/project/google-cloud-texttospeech/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-texttospeech.svg + :target: https://pypi.org/project/google-cloud-texttospeech/ +.. _Google Cloud Text-to-Speech: https://cloud.google.com/text-to-speech +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/texttospeech/latest +.. _Product Documentation: https://cloud.google.com/text-to-speech + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Cloud Text-to-Speech.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Cloud Text-to-Speech.: https://cloud.google.com/text-to-speech +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-texttospeech + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-texttospeech + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Cloud Text-to-Speech + to see other available methods on the client. +- Read the `Google Cloud Text-to-Speech Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Cloud Text-to-Speech Product documentation: https://cloud.google.com/text-to-speech +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-texttospeech/SECURITY.md b/packages/google-cloud-texttospeech/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-texttospeech/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-texttospeech/docs/CHANGELOG.md b/packages/google-cloud-texttospeech/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-texttospeech/docs/README.rst b/packages/google-cloud-texttospeech/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-texttospeech/docs/_static/custom.css b/packages/google-cloud-texttospeech/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-texttospeech/docs/_templates/layout.html b/packages/google-cloud-texttospeech/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-texttospeech/docs/conf.py b/packages/google-cloud-texttospeech/docs/conf.py new file mode 100644 index 000000000000..88477d10c2e8 --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-texttospeech documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-texttospeech" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-texttospeech", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-texttospeech-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-texttospeech.tex", + "google-cloud-texttospeech Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-texttospeech", + "google-cloud-texttospeech Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-texttospeech", + "google-cloud-texttospeech Documentation", + author, + "google-cloud-texttospeech", + "google-cloud-texttospeech Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-texttospeech/docs/index.rst b/packages/google-cloud-texttospeech/docs/index.rst new file mode 100644 index 000000000000..6c1f116934b9 --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/index.rst @@ -0,0 +1,34 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of Google Cloud Text-to-Speech. +By default, you will get version ``texttospeech_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + texttospeech_v1/services + texttospeech_v1/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + texttospeech_v1beta1/services + texttospeech_v1beta1/types + + +Changelog +--------- + +For a list of all ``google-cloud-texttospeech`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-texttospeech/docs/multiprocessing.rst b/packages/google-cloud-texttospeech/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-texttospeech/docs/texttospeech_v1/services.rst b/packages/google-cloud-texttospeech/docs/texttospeech_v1/services.rst new file mode 100644 index 000000000000..71958842bafc --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/texttospeech_v1/services.rst @@ -0,0 +1,7 @@ +Services for Google Cloud Texttospeech v1 API +============================================= +.. toctree:: + :maxdepth: 2 + + text_to_speech + text_to_speech_long_audio_synthesize diff --git a/packages/google-cloud-texttospeech/docs/texttospeech_v1/text_to_speech.rst b/packages/google-cloud-texttospeech/docs/texttospeech_v1/text_to_speech.rst new file mode 100644 index 000000000000..2ad3a8e92df5 --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/texttospeech_v1/text_to_speech.rst @@ -0,0 +1,6 @@ +TextToSpeech +------------------------------ + +.. automodule:: google.cloud.texttospeech_v1.services.text_to_speech + :members: + :inherited-members: diff --git a/packages/google-cloud-texttospeech/docs/texttospeech_v1/text_to_speech_long_audio_synthesize.rst b/packages/google-cloud-texttospeech/docs/texttospeech_v1/text_to_speech_long_audio_synthesize.rst new file mode 100644 index 000000000000..2a202504277c --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/texttospeech_v1/text_to_speech_long_audio_synthesize.rst @@ -0,0 +1,6 @@ +TextToSpeechLongAudioSynthesize +------------------------------------------------- + +.. automodule:: google.cloud.texttospeech_v1.services.text_to_speech_long_audio_synthesize + :members: + :inherited-members: diff --git a/packages/google-cloud-texttospeech/docs/texttospeech_v1/types.rst b/packages/google-cloud-texttospeech/docs/texttospeech_v1/types.rst new file mode 100644 index 000000000000..bfe321928736 --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/texttospeech_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Texttospeech v1 API +========================================== + +.. automodule:: google.cloud.texttospeech_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-texttospeech/docs/texttospeech_v1beta1/services.rst b/packages/google-cloud-texttospeech/docs/texttospeech_v1beta1/services.rst new file mode 100644 index 000000000000..338598a60eb8 --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/texttospeech_v1beta1/services.rst @@ -0,0 +1,7 @@ +Services for Google Cloud Texttospeech v1beta1 API +================================================== +.. toctree:: + :maxdepth: 2 + + text_to_speech + text_to_speech_long_audio_synthesize diff --git a/packages/google-cloud-texttospeech/docs/texttospeech_v1beta1/text_to_speech.rst b/packages/google-cloud-texttospeech/docs/texttospeech_v1beta1/text_to_speech.rst new file mode 100644 index 000000000000..8521daf8070a --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/texttospeech_v1beta1/text_to_speech.rst @@ -0,0 +1,6 @@ +TextToSpeech +------------------------------ + +.. automodule:: google.cloud.texttospeech_v1beta1.services.text_to_speech + :members: + :inherited-members: diff --git a/packages/google-cloud-texttospeech/docs/texttospeech_v1beta1/text_to_speech_long_audio_synthesize.rst b/packages/google-cloud-texttospeech/docs/texttospeech_v1beta1/text_to_speech_long_audio_synthesize.rst new file mode 100644 index 000000000000..50ef34c6c59f --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/texttospeech_v1beta1/text_to_speech_long_audio_synthesize.rst @@ -0,0 +1,6 @@ +TextToSpeechLongAudioSynthesize +------------------------------------------------- + +.. automodule:: google.cloud.texttospeech_v1beta1.services.text_to_speech_long_audio_synthesize + :members: + :inherited-members: diff --git a/packages/google-cloud-texttospeech/docs/texttospeech_v1beta1/types.rst b/packages/google-cloud-texttospeech/docs/texttospeech_v1beta1/types.rst new file mode 100644 index 000000000000..57ba4be821d0 --- /dev/null +++ b/packages/google-cloud-texttospeech/docs/texttospeech_v1beta1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Texttospeech v1beta1 API +=============================================== + +.. automodule:: google.cloud.texttospeech_v1beta1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech/__init__.py new file mode 100644 index 000000000000..b297b9a35f6d --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech/__init__.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.texttospeech import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.texttospeech_v1.services.text_to_speech.async_client import ( + TextToSpeechAsyncClient, +) +from google.cloud.texttospeech_v1.services.text_to_speech.client import ( + TextToSpeechClient, +) +from google.cloud.texttospeech_v1.services.text_to_speech_long_audio_synthesize.async_client import ( + TextToSpeechLongAudioSynthesizeAsyncClient, +) +from google.cloud.texttospeech_v1.services.text_to_speech_long_audio_synthesize.client import ( + TextToSpeechLongAudioSynthesizeClient, +) +from google.cloud.texttospeech_v1.types.cloud_tts import ( + AudioConfig, + AudioEncoding, + CustomVoiceParams, + ListVoicesRequest, + ListVoicesResponse, + SsmlVoiceGender, + SynthesisInput, + SynthesizeSpeechRequest, + SynthesizeSpeechResponse, + Voice, + VoiceSelectionParams, +) +from google.cloud.texttospeech_v1.types.cloud_tts_lrs import ( + SynthesizeLongAudioMetadata, + SynthesizeLongAudioRequest, + SynthesizeLongAudioResponse, +) + +__all__ = ( + "TextToSpeechClient", + "TextToSpeechAsyncClient", + "TextToSpeechLongAudioSynthesizeClient", + "TextToSpeechLongAudioSynthesizeAsyncClient", + "AudioConfig", + "CustomVoiceParams", + "ListVoicesRequest", + "ListVoicesResponse", + "SynthesisInput", + "SynthesizeSpeechRequest", + "SynthesizeSpeechResponse", + "Voice", + "VoiceSelectionParams", + "AudioEncoding", + "SsmlVoiceGender", + "SynthesizeLongAudioMetadata", + "SynthesizeLongAudioRequest", + "SynthesizeLongAudioResponse", +) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py new file mode 100644 index 000000000000..ef7c50064e79 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.14.1" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech/py.typed b/packages/google-cloud-texttospeech/google/cloud/texttospeech/py.typed new file mode 100644 index 000000000000..9b87c1e1cbf9 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-texttospeech package uses inline types. diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/__init__.py new file mode 100644 index 000000000000..699d871c8539 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/__init__.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.texttospeech_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.text_to_speech import TextToSpeechAsyncClient, TextToSpeechClient +from .services.text_to_speech_long_audio_synthesize import ( + TextToSpeechLongAudioSynthesizeAsyncClient, + TextToSpeechLongAudioSynthesizeClient, +) +from .types.cloud_tts import ( + AudioConfig, + AudioEncoding, + CustomVoiceParams, + ListVoicesRequest, + ListVoicesResponse, + SsmlVoiceGender, + SynthesisInput, + SynthesizeSpeechRequest, + SynthesizeSpeechResponse, + Voice, + VoiceSelectionParams, +) +from .types.cloud_tts_lrs import ( + SynthesizeLongAudioMetadata, + SynthesizeLongAudioRequest, + SynthesizeLongAudioResponse, +) + +__all__ = ( + "TextToSpeechAsyncClient", + "TextToSpeechLongAudioSynthesizeAsyncClient", + "AudioConfig", + "AudioEncoding", + "CustomVoiceParams", + "ListVoicesRequest", + "ListVoicesResponse", + "SsmlVoiceGender", + "SynthesisInput", + "SynthesizeLongAudioMetadata", + "SynthesizeLongAudioRequest", + "SynthesizeLongAudioResponse", + "SynthesizeSpeechRequest", + "SynthesizeSpeechResponse", + "TextToSpeechClient", + "TextToSpeechLongAudioSynthesizeClient", + "Voice", + "VoiceSelectionParams", +) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_metadata.json b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_metadata.json new file mode 100644 index 000000000000..f8570d743151 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_metadata.json @@ -0,0 +1,92 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.texttospeech_v1", + "protoPackage": "google.cloud.texttospeech.v1", + "schema": "1.0", + "services": { + "TextToSpeech": { + "clients": { + "grpc": { + "libraryClient": "TextToSpeechClient", + "rpcs": { + "ListVoices": { + "methods": [ + "list_voices" + ] + }, + "SynthesizeSpeech": { + "methods": [ + "synthesize_speech" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TextToSpeechAsyncClient", + "rpcs": { + "ListVoices": { + "methods": [ + "list_voices" + ] + }, + "SynthesizeSpeech": { + "methods": [ + "synthesize_speech" + ] + } + } + }, + "rest": { + "libraryClient": "TextToSpeechClient", + "rpcs": { + "ListVoices": { + "methods": [ + "list_voices" + ] + }, + "SynthesizeSpeech": { + "methods": [ + "synthesize_speech" + ] + } + } + } + } + }, + "TextToSpeechLongAudioSynthesize": { + "clients": { + "grpc": { + "libraryClient": "TextToSpeechLongAudioSynthesizeClient", + "rpcs": { + "SynthesizeLongAudio": { + "methods": [ + "synthesize_long_audio" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TextToSpeechLongAudioSynthesizeAsyncClient", + "rpcs": { + "SynthesizeLongAudio": { + "methods": [ + "synthesize_long_audio" + ] + } + } + }, + "rest": { + "libraryClient": "TextToSpeechLongAudioSynthesizeClient", + "rpcs": { + "SynthesizeLongAudio": { + "methods": [ + "synthesize_long_audio" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py new file mode 100644 index 000000000000..ef7c50064e79 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.14.1" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/py.typed b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/py.typed new file mode 100644 index 000000000000..9b87c1e1cbf9 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-texttospeech package uses inline types. diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/__init__.py new file mode 100644 index 000000000000..028dfdc17414 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import TextToSpeechAsyncClient +from .client import TextToSpeechClient + +__all__ = ( + "TextToSpeechClient", + "TextToSpeechAsyncClient", +) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/async_client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/async_client.py new file mode 100644 index 000000000000..020b29c670ec --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/async_client.py @@ -0,0 +1,456 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.texttospeech_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.texttospeech_v1.types import cloud_tts + +from .client import TextToSpeechClient +from .transports.base import DEFAULT_CLIENT_INFO, TextToSpeechTransport +from .transports.grpc_asyncio import TextToSpeechGrpcAsyncIOTransport + + +class TextToSpeechAsyncClient: + """Service that implements Google Cloud Text-to-Speech API.""" + + _client: TextToSpeechClient + + DEFAULT_ENDPOINT = TextToSpeechClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TextToSpeechClient.DEFAULT_MTLS_ENDPOINT + + model_path = staticmethod(TextToSpeechClient.model_path) + parse_model_path = staticmethod(TextToSpeechClient.parse_model_path) + common_billing_account_path = staticmethod( + TextToSpeechClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + TextToSpeechClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(TextToSpeechClient.common_folder_path) + parse_common_folder_path = staticmethod(TextToSpeechClient.parse_common_folder_path) + common_organization_path = staticmethod(TextToSpeechClient.common_organization_path) + parse_common_organization_path = staticmethod( + TextToSpeechClient.parse_common_organization_path + ) + common_project_path = staticmethod(TextToSpeechClient.common_project_path) + parse_common_project_path = staticmethod( + TextToSpeechClient.parse_common_project_path + ) + common_location_path = staticmethod(TextToSpeechClient.common_location_path) + parse_common_location_path = staticmethod( + TextToSpeechClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechAsyncClient: The constructed client. + """ + return TextToSpeechClient.from_service_account_info.__func__(TextToSpeechAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechAsyncClient: The constructed client. + """ + return TextToSpeechClient.from_service_account_file.__func__(TextToSpeechAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TextToSpeechClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TextToSpeechTransport: + """Returns the transport used by the client instance. + + Returns: + TextToSpeechTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(TextToSpeechClient).get_transport_class, type(TextToSpeechClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, TextToSpeechTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the text to speech client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TextToSpeechTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TextToSpeechClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_voices( + self, + request: Optional[Union[cloud_tts.ListVoicesRequest, dict]] = None, + *, + language_code: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_tts.ListVoicesResponse: + r"""Returns a list of Voice supported for synthesis. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1 + + async def sample_list_voices(): + # Create a client + client = texttospeech_v1.TextToSpeechAsyncClient() + + # Initialize request argument(s) + request = texttospeech_v1.ListVoicesRequest( + ) + + # Make the request + response = await client.list_voices(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.texttospeech_v1.types.ListVoicesRequest, dict]]): + The request object. The top-level message sent by the client for the + ``ListVoices`` method. + language_code (:class:`str`): + Optional. Recommended. + `BCP-47 `__ + language tag. If not specified, the API will return all + supported voices. If specified, the ListVoices call will + only return voices that can be used to synthesize this + language_code. For example, if you specify ``"en-NZ"``, + all ``"en-NZ"`` voices will be returned. If you specify + ``"no"``, both ``"no-\*"`` (Norwegian) and ``"nb-\*"`` + (Norwegian Bokmal) voices will be returned. + + This corresponds to the ``language_code`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.texttospeech_v1.types.ListVoicesResponse: + The message returned to the client by the ListVoices + method. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([language_code]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_tts.ListVoicesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if language_code is not None: + request.language_code = language_code + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_voices, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def synthesize_speech( + self, + request: Optional[Union[cloud_tts.SynthesizeSpeechRequest, dict]] = None, + *, + input: Optional[cloud_tts.SynthesisInput] = None, + voice: Optional[cloud_tts.VoiceSelectionParams] = None, + audio_config: Optional[cloud_tts.AudioConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_tts.SynthesizeSpeechResponse: + r"""Synthesizes speech synchronously: receive results + after all text input has been processed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1 + + async def sample_synthesize_speech(): + # Create a client + client = texttospeech_v1.TextToSpeechAsyncClient() + + # Initialize request argument(s) + input = texttospeech_v1.SynthesisInput() + input.text = "text_value" + + voice = texttospeech_v1.VoiceSelectionParams() + voice.language_code = "language_code_value" + + audio_config = texttospeech_v1.AudioConfig() + audio_config.audio_encoding = "ALAW" + + request = texttospeech_v1.SynthesizeSpeechRequest( + input=input, + voice=voice, + audio_config=audio_config, + ) + + # Make the request + response = await client.synthesize_speech(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.texttospeech_v1.types.SynthesizeSpeechRequest, dict]]): + The request object. The top-level message sent by the client for the + ``SynthesizeSpeech`` method. + input (:class:`google.cloud.texttospeech_v1.types.SynthesisInput`): + Required. The Synthesizer requires + either plain text or SSML as input. + + This corresponds to the ``input`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + voice (:class:`google.cloud.texttospeech_v1.types.VoiceSelectionParams`): + Required. The desired voice of the + synthesized audio. + + This corresponds to the ``voice`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + audio_config (:class:`google.cloud.texttospeech_v1.types.AudioConfig`): + Required. The configuration of the + synthesized audio. + + This corresponds to the ``audio_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.texttospeech_v1.types.SynthesizeSpeechResponse: + The message returned to the client by the + SynthesizeSpeech method. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([input, voice, audio_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_tts.SynthesizeSpeechRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if input is not None: + request.input = input + if voice is not None: + request.voice = voice + if audio_config is not None: + request.audio_config = audio_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.synthesize_speech, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "TextToSpeechAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TextToSpeechAsyncClient",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/client.py new file mode 100644 index 000000000000..8fcfc6b4de35 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/client.py @@ -0,0 +1,697 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.texttospeech_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.texttospeech_v1.types import cloud_tts + +from .transports.base import DEFAULT_CLIENT_INFO, TextToSpeechTransport +from .transports.grpc import TextToSpeechGrpcTransport +from .transports.grpc_asyncio import TextToSpeechGrpcAsyncIOTransport +from .transports.rest import TextToSpeechRestTransport + + +class TextToSpeechClientMeta(type): + """Metaclass for the TextToSpeech client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[TextToSpeechTransport]] + _transport_registry["grpc"] = TextToSpeechGrpcTransport + _transport_registry["grpc_asyncio"] = TextToSpeechGrpcAsyncIOTransport + _transport_registry["rest"] = TextToSpeechRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[TextToSpeechTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TextToSpeechClient(metaclass=TextToSpeechClientMeta): + """Service that implements Google Cloud Text-to-Speech API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "texttospeech.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TextToSpeechTransport: + """Returns the transport used by the client instance. + + Returns: + TextToSpeechTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + project: str, + location: str, + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "projects/{project}/locations/{location}/models/{model}".format( + project=project, + location=location, + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/models/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TextToSpeechTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the text to speech client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TextToSpeechTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TextToSpeechTransport): + # transport is a TextToSpeechTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_voices( + self, + request: Optional[Union[cloud_tts.ListVoicesRequest, dict]] = None, + *, + language_code: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_tts.ListVoicesResponse: + r"""Returns a list of Voice supported for synthesis. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1 + + def sample_list_voices(): + # Create a client + client = texttospeech_v1.TextToSpeechClient() + + # Initialize request argument(s) + request = texttospeech_v1.ListVoicesRequest( + ) + + # Make the request + response = client.list_voices(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.texttospeech_v1.types.ListVoicesRequest, dict]): + The request object. The top-level message sent by the client for the + ``ListVoices`` method. + language_code (str): + Optional. Recommended. + `BCP-47 `__ + language tag. If not specified, the API will return all + supported voices. If specified, the ListVoices call will + only return voices that can be used to synthesize this + language_code. For example, if you specify ``"en-NZ"``, + all ``"en-NZ"`` voices will be returned. If you specify + ``"no"``, both ``"no-\*"`` (Norwegian) and ``"nb-\*"`` + (Norwegian Bokmal) voices will be returned. + + This corresponds to the ``language_code`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.texttospeech_v1.types.ListVoicesResponse: + The message returned to the client by the ListVoices + method. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([language_code]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_tts.ListVoicesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_tts.ListVoicesRequest): + request = cloud_tts.ListVoicesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if language_code is not None: + request.language_code = language_code + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_voices] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def synthesize_speech( + self, + request: Optional[Union[cloud_tts.SynthesizeSpeechRequest, dict]] = None, + *, + input: Optional[cloud_tts.SynthesisInput] = None, + voice: Optional[cloud_tts.VoiceSelectionParams] = None, + audio_config: Optional[cloud_tts.AudioConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_tts.SynthesizeSpeechResponse: + r"""Synthesizes speech synchronously: receive results + after all text input has been processed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1 + + def sample_synthesize_speech(): + # Create a client + client = texttospeech_v1.TextToSpeechClient() + + # Initialize request argument(s) + input = texttospeech_v1.SynthesisInput() + input.text = "text_value" + + voice = texttospeech_v1.VoiceSelectionParams() + voice.language_code = "language_code_value" + + audio_config = texttospeech_v1.AudioConfig() + audio_config.audio_encoding = "ALAW" + + request = texttospeech_v1.SynthesizeSpeechRequest( + input=input, + voice=voice, + audio_config=audio_config, + ) + + # Make the request + response = client.synthesize_speech(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.texttospeech_v1.types.SynthesizeSpeechRequest, dict]): + The request object. The top-level message sent by the client for the + ``SynthesizeSpeech`` method. + input (google.cloud.texttospeech_v1.types.SynthesisInput): + Required. The Synthesizer requires + either plain text or SSML as input. + + This corresponds to the ``input`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + voice (google.cloud.texttospeech_v1.types.VoiceSelectionParams): + Required. The desired voice of the + synthesized audio. + + This corresponds to the ``voice`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + audio_config (google.cloud.texttospeech_v1.types.AudioConfig): + Required. The configuration of the + synthesized audio. + + This corresponds to the ``audio_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.texttospeech_v1.types.SynthesizeSpeechResponse: + The message returned to the client by the + SynthesizeSpeech method. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([input, voice, audio_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_tts.SynthesizeSpeechRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_tts.SynthesizeSpeechRequest): + request = cloud_tts.SynthesizeSpeechRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if input is not None: + request.input = input + if voice is not None: + request.voice = voice + if audio_config is not None: + request.audio_config = audio_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.synthesize_speech] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "TextToSpeechClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TextToSpeechClient",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/__init__.py new file mode 100644 index 000000000000..4695cd402827 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TextToSpeechTransport +from .grpc import TextToSpeechGrpcTransport +from .grpc_asyncio import TextToSpeechGrpcAsyncIOTransport +from .rest import TextToSpeechRestInterceptor, TextToSpeechRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TextToSpeechTransport]] +_transport_registry["grpc"] = TextToSpeechGrpcTransport +_transport_registry["grpc_asyncio"] = TextToSpeechGrpcAsyncIOTransport +_transport_registry["rest"] = TextToSpeechRestTransport + +__all__ = ( + "TextToSpeechTransport", + "TextToSpeechGrpcTransport", + "TextToSpeechGrpcAsyncIOTransport", + "TextToSpeechRestTransport", + "TextToSpeechRestInterceptor", +) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/base.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/base.py new file mode 100644 index 000000000000..5b4fc7f865e0 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/base.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.texttospeech_v1 import gapic_version as package_version +from google.cloud.texttospeech_v1.types import cloud_tts + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class TextToSpeechTransport(abc.ABC): + """Abstract transport class for TextToSpeech.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "texttospeech.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_voices: gapic_v1.method.wrap_method( + self.list_voices, + default_timeout=None, + client_info=client_info, + ), + self.synthesize_speech: gapic_v1.method.wrap_method( + self.synthesize_speech, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_voices( + self, + ) -> Callable[ + [cloud_tts.ListVoicesRequest], + Union[cloud_tts.ListVoicesResponse, Awaitable[cloud_tts.ListVoicesResponse]], + ]: + raise NotImplementedError() + + @property + def synthesize_speech( + self, + ) -> Callable[ + [cloud_tts.SynthesizeSpeechRequest], + Union[ + cloud_tts.SynthesizeSpeechResponse, + Awaitable[cloud_tts.SynthesizeSpeechResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("TextToSpeechTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc.py new file mode 100644 index 000000000000..c2642c2ac8ab --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc.py @@ -0,0 +1,295 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.texttospeech_v1.types import cloud_tts + +from .base import DEFAULT_CLIENT_INFO, TextToSpeechTransport + + +class TextToSpeechGrpcTransport(TextToSpeechTransport): + """gRPC backend transport for TextToSpeech. + + Service that implements Google Cloud Text-to-Speech API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_voices( + self, + ) -> Callable[[cloud_tts.ListVoicesRequest], cloud_tts.ListVoicesResponse]: + r"""Return a callable for the list voices method over gRPC. + + Returns a list of Voice supported for synthesis. + + Returns: + Callable[[~.ListVoicesRequest], + ~.ListVoicesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_voices" not in self._stubs: + self._stubs["list_voices"] = self.grpc_channel.unary_unary( + "/google.cloud.texttospeech.v1.TextToSpeech/ListVoices", + request_serializer=cloud_tts.ListVoicesRequest.serialize, + response_deserializer=cloud_tts.ListVoicesResponse.deserialize, + ) + return self._stubs["list_voices"] + + @property + def synthesize_speech( + self, + ) -> Callable[ + [cloud_tts.SynthesizeSpeechRequest], cloud_tts.SynthesizeSpeechResponse + ]: + r"""Return a callable for the synthesize speech method over gRPC. + + Synthesizes speech synchronously: receive results + after all text input has been processed. + + Returns: + Callable[[~.SynthesizeSpeechRequest], + ~.SynthesizeSpeechResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "synthesize_speech" not in self._stubs: + self._stubs["synthesize_speech"] = self.grpc_channel.unary_unary( + "/google.cloud.texttospeech.v1.TextToSpeech/SynthesizeSpeech", + request_serializer=cloud_tts.SynthesizeSpeechRequest.serialize, + response_deserializer=cloud_tts.SynthesizeSpeechResponse.deserialize, + ) + return self._stubs["synthesize_speech"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("TextToSpeechGrpcTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc_asyncio.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc_asyncio.py new file mode 100644 index 000000000000..ae4e13695141 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/grpc_asyncio.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.texttospeech_v1.types import cloud_tts + +from .base import DEFAULT_CLIENT_INFO, TextToSpeechTransport +from .grpc import TextToSpeechGrpcTransport + + +class TextToSpeechGrpcAsyncIOTransport(TextToSpeechTransport): + """gRPC AsyncIO backend transport for TextToSpeech. + + Service that implements Google Cloud Text-to-Speech API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_voices( + self, + ) -> Callable[ + [cloud_tts.ListVoicesRequest], Awaitable[cloud_tts.ListVoicesResponse] + ]: + r"""Return a callable for the list voices method over gRPC. + + Returns a list of Voice supported for synthesis. + + Returns: + Callable[[~.ListVoicesRequest], + Awaitable[~.ListVoicesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_voices" not in self._stubs: + self._stubs["list_voices"] = self.grpc_channel.unary_unary( + "/google.cloud.texttospeech.v1.TextToSpeech/ListVoices", + request_serializer=cloud_tts.ListVoicesRequest.serialize, + response_deserializer=cloud_tts.ListVoicesResponse.deserialize, + ) + return self._stubs["list_voices"] + + @property + def synthesize_speech( + self, + ) -> Callable[ + [cloud_tts.SynthesizeSpeechRequest], + Awaitable[cloud_tts.SynthesizeSpeechResponse], + ]: + r"""Return a callable for the synthesize speech method over gRPC. + + Synthesizes speech synchronously: receive results + after all text input has been processed. + + Returns: + Callable[[~.SynthesizeSpeechRequest], + Awaitable[~.SynthesizeSpeechResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "synthesize_speech" not in self._stubs: + self._stubs["synthesize_speech"] = self.grpc_channel.unary_unary( + "/google.cloud.texttospeech.v1.TextToSpeech/SynthesizeSpeech", + request_serializer=cloud_tts.SynthesizeSpeechRequest.serialize, + response_deserializer=cloud_tts.SynthesizeSpeechResponse.deserialize, + ) + return self._stubs["synthesize_speech"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("TextToSpeechGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/rest.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/rest.py new file mode 100644 index 000000000000..c24a1e4ef72f --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech/transports/rest.py @@ -0,0 +1,438 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.texttospeech_v1.types import cloud_tts + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TextToSpeechTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TextToSpeechRestInterceptor: + """Interceptor for TextToSpeech. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TextToSpeechRestTransport. + + .. code-block:: python + class MyCustomTextToSpeechInterceptor(TextToSpeechRestInterceptor): + def pre_list_voices(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_voices(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_synthesize_speech(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_synthesize_speech(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TextToSpeechRestTransport(interceptor=MyCustomTextToSpeechInterceptor()) + client = TextToSpeechClient(transport=transport) + + + """ + + def pre_list_voices( + self, request: cloud_tts.ListVoicesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloud_tts.ListVoicesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_voices + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextToSpeech server. + """ + return request, metadata + + def post_list_voices( + self, response: cloud_tts.ListVoicesResponse + ) -> cloud_tts.ListVoicesResponse: + """Post-rpc interceptor for list_voices + + Override in a subclass to manipulate the response + after it is returned by the TextToSpeech server but before + it is returned to user code. + """ + return response + + def pre_synthesize_speech( + self, + request: cloud_tts.SynthesizeSpeechRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_tts.SynthesizeSpeechRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for synthesize_speech + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextToSpeech server. + """ + return request, metadata + + def post_synthesize_speech( + self, response: cloud_tts.SynthesizeSpeechResponse + ) -> cloud_tts.SynthesizeSpeechResponse: + """Post-rpc interceptor for synthesize_speech + + Override in a subclass to manipulate the response + after it is returned by the TextToSpeech server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TextToSpeechRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TextToSpeechRestInterceptor + + +class TextToSpeechRestTransport(TextToSpeechTransport): + """REST backend transport for TextToSpeech. + + Service that implements Google Cloud Text-to-Speech API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[TextToSpeechRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TextToSpeechRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ListVoices(TextToSpeechRestStub): + def __hash__(self): + return hash("ListVoices") + + def __call__( + self, + request: cloud_tts.ListVoicesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_tts.ListVoicesResponse: + r"""Call the list voices method over HTTP. + + Args: + request (~.cloud_tts.ListVoicesRequest): + The request object. The top-level message sent by the client for the + ``ListVoices`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_tts.ListVoicesResponse: + The message returned to the client by the ``ListVoices`` + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/voices", + }, + ] + request, metadata = self._interceptor.pre_list_voices(request, metadata) + pb_request = cloud_tts.ListVoicesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_tts.ListVoicesResponse() + pb_resp = cloud_tts.ListVoicesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_voices(resp) + return resp + + class _SynthesizeSpeech(TextToSpeechRestStub): + def __hash__(self): + return hash("SynthesizeSpeech") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_tts.SynthesizeSpeechRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_tts.SynthesizeSpeechResponse: + r"""Call the synthesize speech method over HTTP. + + Args: + request (~.cloud_tts.SynthesizeSpeechRequest): + The request object. The top-level message sent by the client for the + ``SynthesizeSpeech`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_tts.SynthesizeSpeechResponse: + The message returned to the client by the + ``SynthesizeSpeech`` method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/text:synthesize", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_synthesize_speech( + request, metadata + ) + pb_request = cloud_tts.SynthesizeSpeechRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_tts.SynthesizeSpeechResponse() + pb_resp = cloud_tts.SynthesizeSpeechResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_synthesize_speech(resp) + return resp + + @property + def list_voices( + self, + ) -> Callable[[cloud_tts.ListVoicesRequest], cloud_tts.ListVoicesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListVoices(self._session, self._host, self._interceptor) # type: ignore + + @property + def synthesize_speech( + self, + ) -> Callable[ + [cloud_tts.SynthesizeSpeechRequest], cloud_tts.SynthesizeSpeechResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SynthesizeSpeech(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("TextToSpeechRestTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/__init__.py new file mode 100644 index 000000000000..d46ede2765ca --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import TextToSpeechLongAudioSynthesizeAsyncClient +from .client import TextToSpeechLongAudioSynthesizeClient + +__all__ = ( + "TextToSpeechLongAudioSynthesizeClient", + "TextToSpeechLongAudioSynthesizeAsyncClient", +) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/async_client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/async_client.py new file mode 100644 index 000000000000..d4b72ec3f345 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/async_client.py @@ -0,0 +1,354 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.texttospeech_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.texttospeech_v1.types import cloud_tts_lrs + +from .client import TextToSpeechLongAudioSynthesizeClient +from .transports.base import ( + DEFAULT_CLIENT_INFO, + TextToSpeechLongAudioSynthesizeTransport, +) +from .transports.grpc_asyncio import TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport + + +class TextToSpeechLongAudioSynthesizeAsyncClient: + """Service that implements Google Cloud Text-to-Speech API.""" + + _client: TextToSpeechLongAudioSynthesizeClient + + DEFAULT_ENDPOINT = TextToSpeechLongAudioSynthesizeClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TextToSpeechLongAudioSynthesizeClient.DEFAULT_MTLS_ENDPOINT + + model_path = staticmethod(TextToSpeechLongAudioSynthesizeClient.model_path) + parse_model_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.parse_model_path + ) + common_billing_account_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.parse_common_organization_path + ) + common_project_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.common_project_path + ) + parse_common_project_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.parse_common_project_path + ) + common_location_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.common_location_path + ) + parse_common_location_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechLongAudioSynthesizeAsyncClient: The constructed client. + """ + return TextToSpeechLongAudioSynthesizeClient.from_service_account_info.__func__(TextToSpeechLongAudioSynthesizeAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechLongAudioSynthesizeAsyncClient: The constructed client. + """ + return TextToSpeechLongAudioSynthesizeClient.from_service_account_file.__func__(TextToSpeechLongAudioSynthesizeAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TextToSpeechLongAudioSynthesizeClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TextToSpeechLongAudioSynthesizeTransport: + """Returns the transport used by the client instance. + + Returns: + TextToSpeechLongAudioSynthesizeTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(TextToSpeechLongAudioSynthesizeClient).get_transport_class, + type(TextToSpeechLongAudioSynthesizeClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[ + str, TextToSpeechLongAudioSynthesizeTransport + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the text to speech long audio synthesize client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TextToSpeechLongAudioSynthesizeTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TextToSpeechLongAudioSynthesizeClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def synthesize_long_audio( + self, + request: Optional[Union[cloud_tts_lrs.SynthesizeLongAudioRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Synthesizes long form text asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1 + + async def sample_synthesize_long_audio(): + # Create a client + client = texttospeech_v1.TextToSpeechLongAudioSynthesizeAsyncClient() + + # Initialize request argument(s) + input = texttospeech_v1.SynthesisInput() + input.text = "text_value" + + audio_config = texttospeech_v1.AudioConfig() + audio_config.audio_encoding = "ALAW" + + voice = texttospeech_v1.VoiceSelectionParams() + voice.language_code = "language_code_value" + + request = texttospeech_v1.SynthesizeLongAudioRequest( + input=input, + audio_config=audio_config, + output_gcs_uri="output_gcs_uri_value", + voice=voice, + ) + + # Make the request + operation = client.synthesize_long_audio(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.texttospeech_v1.types.SynthesizeLongAudioRequest, dict]]): + The request object. The top-level message sent by the client for the + ``SynthesizeLongAudio`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.texttospeech_v1.types.SynthesizeLongAudioResponse` + The message returned to the client by the + SynthesizeLongAudio method. + + """ + # Create or coerce a protobuf request object. + request = cloud_tts_lrs.SynthesizeLongAudioRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.synthesize_long_audio, + default_timeout=5000.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_tts_lrs.SynthesizeLongAudioResponse, + metadata_type=cloud_tts_lrs.SynthesizeLongAudioMetadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "TextToSpeechLongAudioSynthesizeAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TextToSpeechLongAudioSynthesizeAsyncClient",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/client.py new file mode 100644 index 000000000000..94c606df3dba --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/client.py @@ -0,0 +1,589 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.texttospeech_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.texttospeech_v1.types import cloud_tts_lrs + +from .transports.base import ( + DEFAULT_CLIENT_INFO, + TextToSpeechLongAudioSynthesizeTransport, +) +from .transports.grpc import TextToSpeechLongAudioSynthesizeGrpcTransport +from .transports.grpc_asyncio import TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport +from .transports.rest import TextToSpeechLongAudioSynthesizeRestTransport + + +class TextToSpeechLongAudioSynthesizeClientMeta(type): + """Metaclass for the TextToSpeechLongAudioSynthesize client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[TextToSpeechLongAudioSynthesizeTransport]] + _transport_registry["grpc"] = TextToSpeechLongAudioSynthesizeGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport + _transport_registry["rest"] = TextToSpeechLongAudioSynthesizeRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[TextToSpeechLongAudioSynthesizeTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TextToSpeechLongAudioSynthesizeClient( + metaclass=TextToSpeechLongAudioSynthesizeClientMeta +): + """Service that implements Google Cloud Text-to-Speech API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "texttospeech.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechLongAudioSynthesizeClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechLongAudioSynthesizeClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TextToSpeechLongAudioSynthesizeTransport: + """Returns the transport used by the client instance. + + Returns: + TextToSpeechLongAudioSynthesizeTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + project: str, + location: str, + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "projects/{project}/locations/{location}/models/{model}".format( + project=project, + location=location, + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/models/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, TextToSpeechLongAudioSynthesizeTransport] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the text to speech long audio synthesize client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TextToSpeechLongAudioSynthesizeTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TextToSpeechLongAudioSynthesizeTransport): + # transport is a TextToSpeechLongAudioSynthesizeTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def synthesize_long_audio( + self, + request: Optional[Union[cloud_tts_lrs.SynthesizeLongAudioRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Synthesizes long form text asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1 + + def sample_synthesize_long_audio(): + # Create a client + client = texttospeech_v1.TextToSpeechLongAudioSynthesizeClient() + + # Initialize request argument(s) + input = texttospeech_v1.SynthesisInput() + input.text = "text_value" + + audio_config = texttospeech_v1.AudioConfig() + audio_config.audio_encoding = "ALAW" + + voice = texttospeech_v1.VoiceSelectionParams() + voice.language_code = "language_code_value" + + request = texttospeech_v1.SynthesizeLongAudioRequest( + input=input, + audio_config=audio_config, + output_gcs_uri="output_gcs_uri_value", + voice=voice, + ) + + # Make the request + operation = client.synthesize_long_audio(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.texttospeech_v1.types.SynthesizeLongAudioRequest, dict]): + The request object. The top-level message sent by the client for the + ``SynthesizeLongAudio`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.texttospeech_v1.types.SynthesizeLongAudioResponse` + The message returned to the client by the + SynthesizeLongAudio method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cloud_tts_lrs.SynthesizeLongAudioRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_tts_lrs.SynthesizeLongAudioRequest): + request = cloud_tts_lrs.SynthesizeLongAudioRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.synthesize_long_audio] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_tts_lrs.SynthesizeLongAudioResponse, + metadata_type=cloud_tts_lrs.SynthesizeLongAudioMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "TextToSpeechLongAudioSynthesizeClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TextToSpeechLongAudioSynthesizeClient",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/__init__.py new file mode 100644 index 000000000000..4132045dfc54 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/__init__.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TextToSpeechLongAudioSynthesizeTransport +from .grpc import TextToSpeechLongAudioSynthesizeGrpcTransport +from .grpc_asyncio import TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport +from .rest import ( + TextToSpeechLongAudioSynthesizeRestInterceptor, + TextToSpeechLongAudioSynthesizeRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[TextToSpeechLongAudioSynthesizeTransport]] +_transport_registry["grpc"] = TextToSpeechLongAudioSynthesizeGrpcTransport +_transport_registry[ + "grpc_asyncio" +] = TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport +_transport_registry["rest"] = TextToSpeechLongAudioSynthesizeRestTransport + +__all__ = ( + "TextToSpeechLongAudioSynthesizeTransport", + "TextToSpeechLongAudioSynthesizeGrpcTransport", + "TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport", + "TextToSpeechLongAudioSynthesizeRestTransport", + "TextToSpeechLongAudioSynthesizeRestInterceptor", +) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/base.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/base.py new file mode 100644 index 000000000000..7f7afe86ce28 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/base.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.texttospeech_v1 import gapic_version as package_version +from google.cloud.texttospeech_v1.types import cloud_tts_lrs + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class TextToSpeechLongAudioSynthesizeTransport(abc.ABC): + """Abstract transport class for TextToSpeechLongAudioSynthesize.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "texttospeech.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.synthesize_long_audio: gapic_v1.method.wrap_method( + self.synthesize_long_audio, + default_timeout=5000.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def synthesize_long_audio( + self, + ) -> Callable[ + [cloud_tts_lrs.SynthesizeLongAudioRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("TextToSpeechLongAudioSynthesizeTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/grpc.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/grpc.py new file mode 100644 index 000000000000..af0c0a1c4a75 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/grpc.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.texttospeech_v1.types import cloud_tts_lrs + +from .base import DEFAULT_CLIENT_INFO, TextToSpeechLongAudioSynthesizeTransport + + +class TextToSpeechLongAudioSynthesizeGrpcTransport( + TextToSpeechLongAudioSynthesizeTransport +): + """gRPC backend transport for TextToSpeechLongAudioSynthesize. + + Service that implements Google Cloud Text-to-Speech API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def synthesize_long_audio( + self, + ) -> Callable[[cloud_tts_lrs.SynthesizeLongAudioRequest], operations_pb2.Operation]: + r"""Return a callable for the synthesize long audio method over gRPC. + + Synthesizes long form text asynchronously. + + Returns: + Callable[[~.SynthesizeLongAudioRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "synthesize_long_audio" not in self._stubs: + self._stubs["synthesize_long_audio"] = self.grpc_channel.unary_unary( + "/google.cloud.texttospeech.v1.TextToSpeechLongAudioSynthesize/SynthesizeLongAudio", + request_serializer=cloud_tts_lrs.SynthesizeLongAudioRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["synthesize_long_audio"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("TextToSpeechLongAudioSynthesizeGrpcTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/grpc_asyncio.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/grpc_asyncio.py new file mode 100644 index 000000000000..fcf193c4d040 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/grpc_asyncio.py @@ -0,0 +1,286 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.texttospeech_v1.types import cloud_tts_lrs + +from .base import DEFAULT_CLIENT_INFO, TextToSpeechLongAudioSynthesizeTransport +from .grpc import TextToSpeechLongAudioSynthesizeGrpcTransport + + +class TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport( + TextToSpeechLongAudioSynthesizeTransport +): + """gRPC AsyncIO backend transport for TextToSpeechLongAudioSynthesize. + + Service that implements Google Cloud Text-to-Speech API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def synthesize_long_audio( + self, + ) -> Callable[ + [cloud_tts_lrs.SynthesizeLongAudioRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the synthesize long audio method over gRPC. + + Synthesizes long form text asynchronously. + + Returns: + Callable[[~.SynthesizeLongAudioRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "synthesize_long_audio" not in self._stubs: + self._stubs["synthesize_long_audio"] = self.grpc_channel.unary_unary( + "/google.cloud.texttospeech.v1.TextToSpeechLongAudioSynthesize/SynthesizeLongAudio", + request_serializer=cloud_tts_lrs.SynthesizeLongAudioRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["synthesize_long_audio"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/rest.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/rest.py new file mode 100644 index 000000000000..541e8ddfda69 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/services/text_to_speech_long_audio_synthesize/transports/rest.py @@ -0,0 +1,360 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.texttospeech_v1.types import cloud_tts_lrs + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TextToSpeechLongAudioSynthesizeTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TextToSpeechLongAudioSynthesizeRestInterceptor: + """Interceptor for TextToSpeechLongAudioSynthesize. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TextToSpeechLongAudioSynthesizeRestTransport. + + .. code-block:: python + class MyCustomTextToSpeechLongAudioSynthesizeInterceptor(TextToSpeechLongAudioSynthesizeRestInterceptor): + def pre_synthesize_long_audio(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_synthesize_long_audio(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TextToSpeechLongAudioSynthesizeRestTransport(interceptor=MyCustomTextToSpeechLongAudioSynthesizeInterceptor()) + client = TextToSpeechLongAudioSynthesizeClient(transport=transport) + + + """ + + def pre_synthesize_long_audio( + self, + request: cloud_tts_lrs.SynthesizeLongAudioRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_tts_lrs.SynthesizeLongAudioRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for synthesize_long_audio + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextToSpeechLongAudioSynthesize server. + """ + return request, metadata + + def post_synthesize_long_audio( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for synthesize_long_audio + + Override in a subclass to manipulate the response + after it is returned by the TextToSpeechLongAudioSynthesize server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TextToSpeechLongAudioSynthesizeRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TextToSpeechLongAudioSynthesizeRestInterceptor + + +class TextToSpeechLongAudioSynthesizeRestTransport( + TextToSpeechLongAudioSynthesizeTransport +): + """REST backend transport for TextToSpeechLongAudioSynthesize. + + Service that implements Google Cloud Text-to-Speech API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[TextToSpeechLongAudioSynthesizeRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or TextToSpeechLongAudioSynthesizeRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = {} + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _SynthesizeLongAudio(TextToSpeechLongAudioSynthesizeRestStub): + def __hash__(self): + return hash("SynthesizeLongAudio") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_tts_lrs.SynthesizeLongAudioRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the synthesize long audio method over HTTP. + + Args: + request (~.cloud_tts_lrs.SynthesizeLongAudioRequest): + The request object. The top-level message sent by the client for the + ``SynthesizeLongAudio`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/voices/*}:SynthesizeLongAudio", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_synthesize_long_audio( + request, metadata + ) + pb_request = cloud_tts_lrs.SynthesizeLongAudioRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_synthesize_long_audio(resp) + return resp + + @property + def synthesize_long_audio( + self, + ) -> Callable[[cloud_tts_lrs.SynthesizeLongAudioRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SynthesizeLongAudio(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("TextToSpeechLongAudioSynthesizeRestTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/__init__.py new file mode 100644 index 000000000000..ff1065fcfc6f --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/__init__.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloud_tts import ( + AudioConfig, + AudioEncoding, + CustomVoiceParams, + ListVoicesRequest, + ListVoicesResponse, + SsmlVoiceGender, + SynthesisInput, + SynthesizeSpeechRequest, + SynthesizeSpeechResponse, + Voice, + VoiceSelectionParams, +) +from .cloud_tts_lrs import ( + SynthesizeLongAudioMetadata, + SynthesizeLongAudioRequest, + SynthesizeLongAudioResponse, +) + +__all__ = ( + "AudioConfig", + "CustomVoiceParams", + "ListVoicesRequest", + "ListVoicesResponse", + "SynthesisInput", + "SynthesizeSpeechRequest", + "SynthesizeSpeechResponse", + "Voice", + "VoiceSelectionParams", + "AudioEncoding", + "SsmlVoiceGender", + "SynthesizeLongAudioMetadata", + "SynthesizeLongAudioRequest", + "SynthesizeLongAudioResponse", +) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts.py new file mode 100644 index 000000000000..82cd02c11168 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.texttospeech.v1", + manifest={ + "SsmlVoiceGender", + "AudioEncoding", + "ListVoicesRequest", + "ListVoicesResponse", + "Voice", + "SynthesizeSpeechRequest", + "SynthesisInput", + "VoiceSelectionParams", + "AudioConfig", + "CustomVoiceParams", + "SynthesizeSpeechResponse", + }, +) + + +class SsmlVoiceGender(proto.Enum): + r"""Gender of the voice as described in `SSML voice + element `__. + + Values: + SSML_VOICE_GENDER_UNSPECIFIED (0): + An unspecified gender. + In VoiceSelectionParams, this means that the + client doesn't care which gender the selected + voice will have. In the Voice field of + ListVoicesResponse, this may mean that the voice + doesn't fit any of the other categories in this + enum, or that the gender of the voice isn't + known. + MALE (1): + A male voice. + FEMALE (2): + A female voice. + NEUTRAL (3): + A gender-neutral voice. This voice is not yet + supported. + """ + SSML_VOICE_GENDER_UNSPECIFIED = 0 + MALE = 1 + FEMALE = 2 + NEUTRAL = 3 + + +class AudioEncoding(proto.Enum): + r"""Configuration to set up audio encoder. The encoding + determines the output audio format that we'd like. + + Values: + AUDIO_ENCODING_UNSPECIFIED (0): + Not specified. Will return result + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. + LINEAR16 (1): + Uncompressed 16-bit signed little-endian + samples (Linear PCM). Audio content returned as + LINEAR16 also contains a WAV header. + MP3 (2): + MP3 audio at 32kbps. + OGG_OPUS (3): + Opus encoded audio wrapped in an ogg + container. The result will be a file which can + be played natively on Android, and in browsers + (at least Chrome and Firefox). The quality of + the encoding is considerably higher than MP3 + while using approximately the same bitrate. + MULAW (5): + 8-bit samples that compand 14-bit audio + samples using G.711 PCMU/mu-law. Audio content + returned as MULAW also contains a WAV header. + ALAW (6): + 8-bit samples that compand 14-bit audio + samples using G.711 PCMU/A-law. Audio content + returned as ALAW also contains a WAV header. + """ + AUDIO_ENCODING_UNSPECIFIED = 0 + LINEAR16 = 1 + MP3 = 2 + OGG_OPUS = 3 + MULAW = 5 + ALAW = 6 + + +class ListVoicesRequest(proto.Message): + r"""The top-level message sent by the client for the ``ListVoices`` + method. + + Attributes: + language_code (str): + Optional. Recommended. + `BCP-47 `__ + language tag. If not specified, the API will return all + supported voices. If specified, the ListVoices call will + only return voices that can be used to synthesize this + language_code. For example, if you specify ``"en-NZ"``, all + ``"en-NZ"`` voices will be returned. If you specify + ``"no"``, both ``"no-\*"`` (Norwegian) and ``"nb-\*"`` + (Norwegian Bokmal) voices will be returned. + """ + + language_code: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListVoicesResponse(proto.Message): + r"""The message returned to the client by the ``ListVoices`` method. + + Attributes: + voices (MutableSequence[google.cloud.texttospeech_v1.types.Voice]): + The list of voices. + """ + + voices: MutableSequence["Voice"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Voice", + ) + + +class Voice(proto.Message): + r"""Description of a voice supported by the TTS service. + + Attributes: + language_codes (MutableSequence[str]): + The languages that this voice supports, expressed as + `BCP-47 `__ + language tags (e.g. "en-US", "es-419", "cmn-tw"). + name (str): + The name of this voice. Each distinct voice + has a unique name. + ssml_gender (google.cloud.texttospeech_v1.types.SsmlVoiceGender): + The gender of this voice. + natural_sample_rate_hertz (int): + The natural sample rate (in hertz) for this + voice. + """ + + language_codes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + ssml_gender: "SsmlVoiceGender" = proto.Field( + proto.ENUM, + number=3, + enum="SsmlVoiceGender", + ) + natural_sample_rate_hertz: int = proto.Field( + proto.INT32, + number=4, + ) + + +class SynthesizeSpeechRequest(proto.Message): + r"""The top-level message sent by the client for the + ``SynthesizeSpeech`` method. + + Attributes: + input (google.cloud.texttospeech_v1.types.SynthesisInput): + Required. The Synthesizer requires either + plain text or SSML as input. + voice (google.cloud.texttospeech_v1.types.VoiceSelectionParams): + Required. The desired voice of the + synthesized audio. + audio_config (google.cloud.texttospeech_v1.types.AudioConfig): + Required. The configuration of the + synthesized audio. + """ + + input: "SynthesisInput" = proto.Field( + proto.MESSAGE, + number=1, + message="SynthesisInput", + ) + voice: "VoiceSelectionParams" = proto.Field( + proto.MESSAGE, + number=2, + message="VoiceSelectionParams", + ) + audio_config: "AudioConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="AudioConfig", + ) + + +class SynthesisInput(proto.Message): + r"""Contains text input to be synthesized. Either ``text`` or ``ssml`` + must be supplied. Supplying both or neither returns + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. + The input size is limited to 5000 bytes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + text (str): + The raw text to be synthesized. + + This field is a member of `oneof`_ ``input_source``. + ssml (str): + The SSML document to be synthesized. The SSML document must + be valid and well-formed. Otherwise the RPC will fail and + return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. + For more information, see + `SSML `__. + + This field is a member of `oneof`_ ``input_source``. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + oneof="input_source", + ) + ssml: str = proto.Field( + proto.STRING, + number=2, + oneof="input_source", + ) + + +class VoiceSelectionParams(proto.Message): + r"""Description of which voice to use for a synthesis request. + + Attributes: + language_code (str): + Required. The language (and potentially also the region) of + the voice expressed as a + `BCP-47 `__ + language tag, e.g. "en-US". This should not include a script + tag (e.g. use "cmn-cn" rather than "cmn-Hant-cn"), because + the script will be inferred from the input provided in the + SynthesisInput. The TTS service will use this parameter to + help choose an appropriate voice. Note that the TTS service + may choose a voice with a slightly different language code + than the one selected; it may substitute a different region + (e.g. using en-US rather than en-CA if there isn't a + Canadian voice available), or even a different language, + e.g. using "nb" (Norwegian Bokmal) instead of "no" + (Norwegian)". + name (str): + The name of the voice. If not set, the service will choose a + voice based on the other parameters such as language_code + and gender. + ssml_gender (google.cloud.texttospeech_v1.types.SsmlVoiceGender): + The preferred gender of the voice. If not set, the service + will choose a voice based on the other parameters such as + language_code and name. Note that this is only a preference, + not requirement; if a voice of the appropriate gender is not + available, the synthesizer should substitute a voice with a + different gender rather than failing the request. + custom_voice (google.cloud.texttospeech_v1.types.CustomVoiceParams): + The configuration for a custom voice. If + [CustomVoiceParams.model] is set, the service will choose + the custom voice matching the specified configuration. + """ + + language_code: str = proto.Field( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + ssml_gender: "SsmlVoiceGender" = proto.Field( + proto.ENUM, + number=3, + enum="SsmlVoiceGender", + ) + custom_voice: "CustomVoiceParams" = proto.Field( + proto.MESSAGE, + number=4, + message="CustomVoiceParams", + ) + + +class AudioConfig(proto.Message): + r"""Description of audio data to be synthesized. + + Attributes: + audio_encoding (google.cloud.texttospeech_v1.types.AudioEncoding): + Required. The format of the audio byte + stream. + speaking_rate (float): + Optional. Input only. Speaking rate/speed, in the range + [0.25, 4.0]. 1.0 is the normal native speed supported by the + specific voice. 2.0 is twice as fast, and 0.5 is half as + fast. If unset(0.0), defaults to the native 1.0 speed. Any + other values < 0.25 or > 4.0 will return an error. + pitch (float): + Optional. Input only. Speaking pitch, in the range [-20.0, + 20.0]. 20 means increase 20 semitones from the original + pitch. -20 means decrease 20 semitones from the original + pitch. + volume_gain_db (float): + Optional. Input only. Volume gain (in dB) of the normal + native volume supported by the specific voice, in the range + [-96.0, 16.0]. If unset, or set to a value of 0.0 (dB), will + play at normal native signal amplitude. A value of -6.0 (dB) + will play at approximately half the amplitude of the normal + native signal amplitude. A value of +6.0 (dB) will play at + approximately twice the amplitude of the normal native + signal amplitude. Strongly recommend not to exceed +10 (dB) + as there's usually no effective increase in loudness for any + value greater than that. + sample_rate_hertz (int): + Optional. The synthesis sample rate (in hertz) for this + audio. When this is specified in SynthesizeSpeechRequest, if + this is different from the voice's natural sample rate, then + the synthesizer will honor this request by converting to the + desired sample rate (which might result in worse audio + quality), unless the specified sample rate is not supported + for the encoding chosen, in which case it will fail the + request and return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. + effects_profile_id (MutableSequence[str]): + Optional. Input only. An identifier which selects 'audio + effects' profiles that are applied on (post synthesized) + text to speech. Effects are applied on top of each other in + the order they are given. See `audio + profiles `__ + for current supported profile ids. + """ + + audio_encoding: "AudioEncoding" = proto.Field( + proto.ENUM, + number=1, + enum="AudioEncoding", + ) + speaking_rate: float = proto.Field( + proto.DOUBLE, + number=2, + ) + pitch: float = proto.Field( + proto.DOUBLE, + number=3, + ) + volume_gain_db: float = proto.Field( + proto.DOUBLE, + number=4, + ) + sample_rate_hertz: int = proto.Field( + proto.INT32, + number=5, + ) + effects_profile_id: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + + +class CustomVoiceParams(proto.Message): + r"""Description of the custom voice to be synthesized. + + Attributes: + model (str): + Required. The name of the AutoML model that + synthesizes the custom voice. + reported_usage (google.cloud.texttospeech_v1.types.CustomVoiceParams.ReportedUsage): + Optional. The usage of the synthesized audio + to be reported. + """ + + class ReportedUsage(proto.Enum): + r"""The usage of the synthesized audio. You must report your + honest and correct usage of the service as it's regulated by + contract and will cause significant difference in billing. + + Values: + REPORTED_USAGE_UNSPECIFIED (0): + Request with reported usage unspecified will + be rejected. + REALTIME (1): + For scenarios where the synthesized audio is + not downloadable and can only be used once. For + example, real-time request in IVR system. + OFFLINE (2): + For scenarios where the synthesized audio is + downloadable and can be reused. For example, the + synthesized audio is downloaded, stored in + customer service system and played repeatedly. + """ + REPORTED_USAGE_UNSPECIFIED = 0 + REALTIME = 1 + OFFLINE = 2 + + model: str = proto.Field( + proto.STRING, + number=1, + ) + reported_usage: ReportedUsage = proto.Field( + proto.ENUM, + number=3, + enum=ReportedUsage, + ) + + +class SynthesizeSpeechResponse(proto.Message): + r"""The message returned to the client by the ``SynthesizeSpeech`` + method. + + Attributes: + audio_content (bytes): + The audio data bytes encoded as specified in the request, + including the header for encodings that are wrapped in + containers (e.g. MP3, OGG_OPUS). For LINEAR16 audio, we + include the WAV header. Note: as with all bytes fields, + protobuffers use a pure binary representation, whereas JSON + representations use base64. + """ + + audio_content: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts_lrs.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts_lrs.py new file mode 100644 index 000000000000..b23db84f3acf --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1/types/cloud_tts_lrs.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.texttospeech_v1.types import cloud_tts + +__protobuf__ = proto.module( + package="google.cloud.texttospeech.v1", + manifest={ + "SynthesizeLongAudioRequest", + "SynthesizeLongAudioResponse", + "SynthesizeLongAudioMetadata", + }, +) + + +class SynthesizeLongAudioRequest(proto.Message): + r"""The top-level message sent by the client for the + ``SynthesizeLongAudio`` method. + + Attributes: + parent (str): + The resource states of the request in the form of + ``projects/*/locations/*``. + input (google.cloud.texttospeech_v1.types.SynthesisInput): + Required. The Synthesizer requires either + plain text or SSML as input. While Long Audio is + in preview, SSML is temporarily unsupported. + audio_config (google.cloud.texttospeech_v1.types.AudioConfig): + Required. The configuration of the + synthesized audio. + output_gcs_uri (str): + Required. Specifies a Cloud Storage URI for the synthesis + results. Must be specified in the format: + ``gs://bucket_name/object_name``, and the bucket must + already exist. + voice (google.cloud.texttospeech_v1.types.VoiceSelectionParams): + Required. The desired voice of the + synthesized audio. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + input: cloud_tts.SynthesisInput = proto.Field( + proto.MESSAGE, + number=2, + message=cloud_tts.SynthesisInput, + ) + audio_config: cloud_tts.AudioConfig = proto.Field( + proto.MESSAGE, + number=3, + message=cloud_tts.AudioConfig, + ) + output_gcs_uri: str = proto.Field( + proto.STRING, + number=4, + ) + voice: cloud_tts.VoiceSelectionParams = proto.Field( + proto.MESSAGE, + number=5, + message=cloud_tts.VoiceSelectionParams, + ) + + +class SynthesizeLongAudioResponse(proto.Message): + r"""The message returned to the client by the ``SynthesizeLongAudio`` + method. + + """ + + +class SynthesizeLongAudioMetadata(proto.Message): + r"""Metadata for response returned by the ``SynthesizeLongAudio`` + method. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the request was received. + last_update_time (google.protobuf.timestamp_pb2.Timestamp): + Time of the most recent processing update. + progress_percentage (float): + The progress of the most recent processing + update in percentage, ie. 70.0%. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + last_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + progress_percentage: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/__init__.py new file mode 100644 index 000000000000..795c8a4c2e7f --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/__init__.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.texttospeech_v1beta1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.text_to_speech import TextToSpeechAsyncClient, TextToSpeechClient +from .services.text_to_speech_long_audio_synthesize import ( + TextToSpeechLongAudioSynthesizeAsyncClient, + TextToSpeechLongAudioSynthesizeClient, +) +from .types.cloud_tts import ( + AudioConfig, + AudioEncoding, + CustomVoiceParams, + ListVoicesRequest, + ListVoicesResponse, + SsmlVoiceGender, + SynthesisInput, + SynthesizeSpeechRequest, + SynthesizeSpeechResponse, + Timepoint, + Voice, + VoiceSelectionParams, +) +from .types.cloud_tts_lrs import ( + SynthesizeLongAudioMetadata, + SynthesizeLongAudioRequest, + SynthesizeLongAudioResponse, +) + +__all__ = ( + "TextToSpeechAsyncClient", + "TextToSpeechLongAudioSynthesizeAsyncClient", + "AudioConfig", + "AudioEncoding", + "CustomVoiceParams", + "ListVoicesRequest", + "ListVoicesResponse", + "SsmlVoiceGender", + "SynthesisInput", + "SynthesizeLongAudioMetadata", + "SynthesizeLongAudioRequest", + "SynthesizeLongAudioResponse", + "SynthesizeSpeechRequest", + "SynthesizeSpeechResponse", + "TextToSpeechClient", + "TextToSpeechLongAudioSynthesizeClient", + "Timepoint", + "Voice", + "VoiceSelectionParams", +) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_metadata.json b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_metadata.json new file mode 100644 index 000000000000..668c7494623a --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_metadata.json @@ -0,0 +1,92 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.texttospeech_v1beta1", + "protoPackage": "google.cloud.texttospeech.v1beta1", + "schema": "1.0", + "services": { + "TextToSpeech": { + "clients": { + "grpc": { + "libraryClient": "TextToSpeechClient", + "rpcs": { + "ListVoices": { + "methods": [ + "list_voices" + ] + }, + "SynthesizeSpeech": { + "methods": [ + "synthesize_speech" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TextToSpeechAsyncClient", + "rpcs": { + "ListVoices": { + "methods": [ + "list_voices" + ] + }, + "SynthesizeSpeech": { + "methods": [ + "synthesize_speech" + ] + } + } + }, + "rest": { + "libraryClient": "TextToSpeechClient", + "rpcs": { + "ListVoices": { + "methods": [ + "list_voices" + ] + }, + "SynthesizeSpeech": { + "methods": [ + "synthesize_speech" + ] + } + } + } + } + }, + "TextToSpeechLongAudioSynthesize": { + "clients": { + "grpc": { + "libraryClient": "TextToSpeechLongAudioSynthesizeClient", + "rpcs": { + "SynthesizeLongAudio": { + "methods": [ + "synthesize_long_audio" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TextToSpeechLongAudioSynthesizeAsyncClient", + "rpcs": { + "SynthesizeLongAudio": { + "methods": [ + "synthesize_long_audio" + ] + } + } + }, + "rest": { + "libraryClient": "TextToSpeechLongAudioSynthesizeClient", + "rpcs": { + "SynthesizeLongAudio": { + "methods": [ + "synthesize_long_audio" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py new file mode 100644 index 000000000000..ef7c50064e79 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.14.1" # {x-release-please-version} diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/py.typed b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/py.typed new file mode 100644 index 000000000000..9b87c1e1cbf9 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-texttospeech package uses inline types. diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/__init__.py new file mode 100644 index 000000000000..028dfdc17414 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import TextToSpeechAsyncClient +from .client import TextToSpeechClient + +__all__ = ( + "TextToSpeechClient", + "TextToSpeechAsyncClient", +) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/async_client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/async_client.py new file mode 100644 index 000000000000..c4ad47508384 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/async_client.py @@ -0,0 +1,456 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.texttospeech_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.texttospeech_v1beta1.types import cloud_tts + +from .client import TextToSpeechClient +from .transports.base import DEFAULT_CLIENT_INFO, TextToSpeechTransport +from .transports.grpc_asyncio import TextToSpeechGrpcAsyncIOTransport + + +class TextToSpeechAsyncClient: + """Service that implements Google Cloud Text-to-Speech API.""" + + _client: TextToSpeechClient + + DEFAULT_ENDPOINT = TextToSpeechClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TextToSpeechClient.DEFAULT_MTLS_ENDPOINT + + model_path = staticmethod(TextToSpeechClient.model_path) + parse_model_path = staticmethod(TextToSpeechClient.parse_model_path) + common_billing_account_path = staticmethod( + TextToSpeechClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + TextToSpeechClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(TextToSpeechClient.common_folder_path) + parse_common_folder_path = staticmethod(TextToSpeechClient.parse_common_folder_path) + common_organization_path = staticmethod(TextToSpeechClient.common_organization_path) + parse_common_organization_path = staticmethod( + TextToSpeechClient.parse_common_organization_path + ) + common_project_path = staticmethod(TextToSpeechClient.common_project_path) + parse_common_project_path = staticmethod( + TextToSpeechClient.parse_common_project_path + ) + common_location_path = staticmethod(TextToSpeechClient.common_location_path) + parse_common_location_path = staticmethod( + TextToSpeechClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechAsyncClient: The constructed client. + """ + return TextToSpeechClient.from_service_account_info.__func__(TextToSpeechAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechAsyncClient: The constructed client. + """ + return TextToSpeechClient.from_service_account_file.__func__(TextToSpeechAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TextToSpeechClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TextToSpeechTransport: + """Returns the transport used by the client instance. + + Returns: + TextToSpeechTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(TextToSpeechClient).get_transport_class, type(TextToSpeechClient) + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, TextToSpeechTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the text to speech client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TextToSpeechTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TextToSpeechClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def list_voices( + self, + request: Optional[Union[cloud_tts.ListVoicesRequest, dict]] = None, + *, + language_code: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_tts.ListVoicesResponse: + r"""Returns a list of Voice supported for synthesis. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1beta1 + + async def sample_list_voices(): + # Create a client + client = texttospeech_v1beta1.TextToSpeechAsyncClient() + + # Initialize request argument(s) + request = texttospeech_v1beta1.ListVoicesRequest( + ) + + # Make the request + response = await client.list_voices(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.texttospeech_v1beta1.types.ListVoicesRequest, dict]]): + The request object. The top-level message sent by the client for the + ``ListVoices`` method. + language_code (:class:`str`): + Optional. Recommended. + `BCP-47 `__ + language tag. If not specified, the API will return all + supported voices. If specified, the ListVoices call will + only return voices that can be used to synthesize this + language_code. For example, if you specify ``"en-NZ"``, + all ``"en-NZ"`` voices will be returned. If you specify + ``"no"``, both ``"no-\*"`` (Norwegian) and ``"nb-\*"`` + (Norwegian Bokmal) voices will be returned. + + This corresponds to the ``language_code`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.texttospeech_v1beta1.types.ListVoicesResponse: + The message returned to the client by the ListVoices + method. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([language_code]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_tts.ListVoicesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if language_code is not None: + request.language_code = language_code + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_voices, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def synthesize_speech( + self, + request: Optional[Union[cloud_tts.SynthesizeSpeechRequest, dict]] = None, + *, + input: Optional[cloud_tts.SynthesisInput] = None, + voice: Optional[cloud_tts.VoiceSelectionParams] = None, + audio_config: Optional[cloud_tts.AudioConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_tts.SynthesizeSpeechResponse: + r"""Synthesizes speech synchronously: receive results + after all text input has been processed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1beta1 + + async def sample_synthesize_speech(): + # Create a client + client = texttospeech_v1beta1.TextToSpeechAsyncClient() + + # Initialize request argument(s) + input = texttospeech_v1beta1.SynthesisInput() + input.text = "text_value" + + voice = texttospeech_v1beta1.VoiceSelectionParams() + voice.language_code = "language_code_value" + + audio_config = texttospeech_v1beta1.AudioConfig() + audio_config.audio_encoding = "ALAW" + + request = texttospeech_v1beta1.SynthesizeSpeechRequest( + input=input, + voice=voice, + audio_config=audio_config, + ) + + # Make the request + response = await client.synthesize_speech(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.texttospeech_v1beta1.types.SynthesizeSpeechRequest, dict]]): + The request object. The top-level message sent by the client for the + ``SynthesizeSpeech`` method. + input (:class:`google.cloud.texttospeech_v1beta1.types.SynthesisInput`): + Required. The Synthesizer requires + either plain text or SSML as input. + + This corresponds to the ``input`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + voice (:class:`google.cloud.texttospeech_v1beta1.types.VoiceSelectionParams`): + Required. The desired voice of the + synthesized audio. + + This corresponds to the ``voice`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + audio_config (:class:`google.cloud.texttospeech_v1beta1.types.AudioConfig`): + Required. The configuration of the + synthesized audio. + + This corresponds to the ``audio_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.texttospeech_v1beta1.types.SynthesizeSpeechResponse: + The message returned to the client by the + SynthesizeSpeech method. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([input, voice, audio_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = cloud_tts.SynthesizeSpeechRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if input is not None: + request.input = input + if voice is not None: + request.voice = voice + if audio_config is not None: + request.audio_config = audio_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.synthesize_speech, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "TextToSpeechAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TextToSpeechAsyncClient",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/client.py new file mode 100644 index 000000000000..18deb40699d7 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/client.py @@ -0,0 +1,697 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.texttospeech_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.texttospeech_v1beta1.types import cloud_tts + +from .transports.base import DEFAULT_CLIENT_INFO, TextToSpeechTransport +from .transports.grpc import TextToSpeechGrpcTransport +from .transports.grpc_asyncio import TextToSpeechGrpcAsyncIOTransport +from .transports.rest import TextToSpeechRestTransport + + +class TextToSpeechClientMeta(type): + """Metaclass for the TextToSpeech client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[TextToSpeechTransport]] + _transport_registry["grpc"] = TextToSpeechGrpcTransport + _transport_registry["grpc_asyncio"] = TextToSpeechGrpcAsyncIOTransport + _transport_registry["rest"] = TextToSpeechRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[TextToSpeechTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TextToSpeechClient(metaclass=TextToSpeechClientMeta): + """Service that implements Google Cloud Text-to-Speech API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "texttospeech.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TextToSpeechTransport: + """Returns the transport used by the client instance. + + Returns: + TextToSpeechTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + project: str, + location: str, + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "projects/{project}/locations/{location}/models/{model}".format( + project=project, + location=location, + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/models/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TextToSpeechTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the text to speech client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TextToSpeechTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TextToSpeechTransport): + # transport is a TextToSpeechTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_voices( + self, + request: Optional[Union[cloud_tts.ListVoicesRequest, dict]] = None, + *, + language_code: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_tts.ListVoicesResponse: + r"""Returns a list of Voice supported for synthesis. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1beta1 + + def sample_list_voices(): + # Create a client + client = texttospeech_v1beta1.TextToSpeechClient() + + # Initialize request argument(s) + request = texttospeech_v1beta1.ListVoicesRequest( + ) + + # Make the request + response = client.list_voices(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.texttospeech_v1beta1.types.ListVoicesRequest, dict]): + The request object. The top-level message sent by the client for the + ``ListVoices`` method. + language_code (str): + Optional. Recommended. + `BCP-47 `__ + language tag. If not specified, the API will return all + supported voices. If specified, the ListVoices call will + only return voices that can be used to synthesize this + language_code. For example, if you specify ``"en-NZ"``, + all ``"en-NZ"`` voices will be returned. If you specify + ``"no"``, both ``"no-\*"`` (Norwegian) and ``"nb-\*"`` + (Norwegian Bokmal) voices will be returned. + + This corresponds to the ``language_code`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.texttospeech_v1beta1.types.ListVoicesResponse: + The message returned to the client by the ListVoices + method. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([language_code]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_tts.ListVoicesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_tts.ListVoicesRequest): + request = cloud_tts.ListVoicesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if language_code is not None: + request.language_code = language_code + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_voices] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def synthesize_speech( + self, + request: Optional[Union[cloud_tts.SynthesizeSpeechRequest, dict]] = None, + *, + input: Optional[cloud_tts.SynthesisInput] = None, + voice: Optional[cloud_tts.VoiceSelectionParams] = None, + audio_config: Optional[cloud_tts.AudioConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_tts.SynthesizeSpeechResponse: + r"""Synthesizes speech synchronously: receive results + after all text input has been processed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1beta1 + + def sample_synthesize_speech(): + # Create a client + client = texttospeech_v1beta1.TextToSpeechClient() + + # Initialize request argument(s) + input = texttospeech_v1beta1.SynthesisInput() + input.text = "text_value" + + voice = texttospeech_v1beta1.VoiceSelectionParams() + voice.language_code = "language_code_value" + + audio_config = texttospeech_v1beta1.AudioConfig() + audio_config.audio_encoding = "ALAW" + + request = texttospeech_v1beta1.SynthesizeSpeechRequest( + input=input, + voice=voice, + audio_config=audio_config, + ) + + # Make the request + response = client.synthesize_speech(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.texttospeech_v1beta1.types.SynthesizeSpeechRequest, dict]): + The request object. The top-level message sent by the client for the + ``SynthesizeSpeech`` method. + input (google.cloud.texttospeech_v1beta1.types.SynthesisInput): + Required. The Synthesizer requires + either plain text or SSML as input. + + This corresponds to the ``input`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + voice (google.cloud.texttospeech_v1beta1.types.VoiceSelectionParams): + Required. The desired voice of the + synthesized audio. + + This corresponds to the ``voice`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + audio_config (google.cloud.texttospeech_v1beta1.types.AudioConfig): + Required. The configuration of the + synthesized audio. + + This corresponds to the ``audio_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.texttospeech_v1beta1.types.SynthesizeSpeechResponse: + The message returned to the client by the + SynthesizeSpeech method. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([input, voice, audio_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_tts.SynthesizeSpeechRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_tts.SynthesizeSpeechRequest): + request = cloud_tts.SynthesizeSpeechRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if input is not None: + request.input = input + if voice is not None: + request.voice = voice + if audio_config is not None: + request.audio_config = audio_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.synthesize_speech] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "TextToSpeechClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TextToSpeechClient",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/__init__.py new file mode 100644 index 000000000000..4695cd402827 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TextToSpeechTransport +from .grpc import TextToSpeechGrpcTransport +from .grpc_asyncio import TextToSpeechGrpcAsyncIOTransport +from .rest import TextToSpeechRestInterceptor, TextToSpeechRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TextToSpeechTransport]] +_transport_registry["grpc"] = TextToSpeechGrpcTransport +_transport_registry["grpc_asyncio"] = TextToSpeechGrpcAsyncIOTransport +_transport_registry["rest"] = TextToSpeechRestTransport + +__all__ = ( + "TextToSpeechTransport", + "TextToSpeechGrpcTransport", + "TextToSpeechGrpcAsyncIOTransport", + "TextToSpeechRestTransport", + "TextToSpeechRestInterceptor", +) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/base.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/base.py new file mode 100644 index 000000000000..8cac0d61316b --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/base.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.texttospeech_v1beta1 import gapic_version as package_version +from google.cloud.texttospeech_v1beta1.types import cloud_tts + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class TextToSpeechTransport(abc.ABC): + """Abstract transport class for TextToSpeech.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "texttospeech.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_voices: gapic_v1.method.wrap_method( + self.list_voices, + default_timeout=None, + client_info=client_info, + ), + self.synthesize_speech: gapic_v1.method.wrap_method( + self.synthesize_speech, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_voices( + self, + ) -> Callable[ + [cloud_tts.ListVoicesRequest], + Union[cloud_tts.ListVoicesResponse, Awaitable[cloud_tts.ListVoicesResponse]], + ]: + raise NotImplementedError() + + @property + def synthesize_speech( + self, + ) -> Callable[ + [cloud_tts.SynthesizeSpeechRequest], + Union[ + cloud_tts.SynthesizeSpeechResponse, + Awaitable[cloud_tts.SynthesizeSpeechResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("TextToSpeechTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc.py new file mode 100644 index 000000000000..cfd0258047b3 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc.py @@ -0,0 +1,295 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.texttospeech_v1beta1.types import cloud_tts + +from .base import DEFAULT_CLIENT_INFO, TextToSpeechTransport + + +class TextToSpeechGrpcTransport(TextToSpeechTransport): + """gRPC backend transport for TextToSpeech. + + Service that implements Google Cloud Text-to-Speech API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_voices( + self, + ) -> Callable[[cloud_tts.ListVoicesRequest], cloud_tts.ListVoicesResponse]: + r"""Return a callable for the list voices method over gRPC. + + Returns a list of Voice supported for synthesis. + + Returns: + Callable[[~.ListVoicesRequest], + ~.ListVoicesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_voices" not in self._stubs: + self._stubs["list_voices"] = self.grpc_channel.unary_unary( + "/google.cloud.texttospeech.v1beta1.TextToSpeech/ListVoices", + request_serializer=cloud_tts.ListVoicesRequest.serialize, + response_deserializer=cloud_tts.ListVoicesResponse.deserialize, + ) + return self._stubs["list_voices"] + + @property + def synthesize_speech( + self, + ) -> Callable[ + [cloud_tts.SynthesizeSpeechRequest], cloud_tts.SynthesizeSpeechResponse + ]: + r"""Return a callable for the synthesize speech method over gRPC. + + Synthesizes speech synchronously: receive results + after all text input has been processed. + + Returns: + Callable[[~.SynthesizeSpeechRequest], + ~.SynthesizeSpeechResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "synthesize_speech" not in self._stubs: + self._stubs["synthesize_speech"] = self.grpc_channel.unary_unary( + "/google.cloud.texttospeech.v1beta1.TextToSpeech/SynthesizeSpeech", + request_serializer=cloud_tts.SynthesizeSpeechRequest.serialize, + response_deserializer=cloud_tts.SynthesizeSpeechResponse.deserialize, + ) + return self._stubs["synthesize_speech"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("TextToSpeechGrpcTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc_asyncio.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc_asyncio.py new file mode 100644 index 000000000000..086d094b33f1 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/grpc_asyncio.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.texttospeech_v1beta1.types import cloud_tts + +from .base import DEFAULT_CLIENT_INFO, TextToSpeechTransport +from .grpc import TextToSpeechGrpcTransport + + +class TextToSpeechGrpcAsyncIOTransport(TextToSpeechTransport): + """gRPC AsyncIO backend transport for TextToSpeech. + + Service that implements Google Cloud Text-to-Speech API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_voices( + self, + ) -> Callable[ + [cloud_tts.ListVoicesRequest], Awaitable[cloud_tts.ListVoicesResponse] + ]: + r"""Return a callable for the list voices method over gRPC. + + Returns a list of Voice supported for synthesis. + + Returns: + Callable[[~.ListVoicesRequest], + Awaitable[~.ListVoicesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_voices" not in self._stubs: + self._stubs["list_voices"] = self.grpc_channel.unary_unary( + "/google.cloud.texttospeech.v1beta1.TextToSpeech/ListVoices", + request_serializer=cloud_tts.ListVoicesRequest.serialize, + response_deserializer=cloud_tts.ListVoicesResponse.deserialize, + ) + return self._stubs["list_voices"] + + @property + def synthesize_speech( + self, + ) -> Callable[ + [cloud_tts.SynthesizeSpeechRequest], + Awaitable[cloud_tts.SynthesizeSpeechResponse], + ]: + r"""Return a callable for the synthesize speech method over gRPC. + + Synthesizes speech synchronously: receive results + after all text input has been processed. + + Returns: + Callable[[~.SynthesizeSpeechRequest], + Awaitable[~.SynthesizeSpeechResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "synthesize_speech" not in self._stubs: + self._stubs["synthesize_speech"] = self.grpc_channel.unary_unary( + "/google.cloud.texttospeech.v1beta1.TextToSpeech/SynthesizeSpeech", + request_serializer=cloud_tts.SynthesizeSpeechRequest.serialize, + response_deserializer=cloud_tts.SynthesizeSpeechResponse.deserialize, + ) + return self._stubs["synthesize_speech"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("TextToSpeechGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/rest.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/rest.py new file mode 100644 index 000000000000..cf1f517b709e --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech/transports/rest.py @@ -0,0 +1,438 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.texttospeech_v1beta1.types import cloud_tts + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TextToSpeechTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TextToSpeechRestInterceptor: + """Interceptor for TextToSpeech. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TextToSpeechRestTransport. + + .. code-block:: python + class MyCustomTextToSpeechInterceptor(TextToSpeechRestInterceptor): + def pre_list_voices(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_voices(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_synthesize_speech(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_synthesize_speech(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TextToSpeechRestTransport(interceptor=MyCustomTextToSpeechInterceptor()) + client = TextToSpeechClient(transport=transport) + + + """ + + def pre_list_voices( + self, request: cloud_tts.ListVoicesRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloud_tts.ListVoicesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_voices + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextToSpeech server. + """ + return request, metadata + + def post_list_voices( + self, response: cloud_tts.ListVoicesResponse + ) -> cloud_tts.ListVoicesResponse: + """Post-rpc interceptor for list_voices + + Override in a subclass to manipulate the response + after it is returned by the TextToSpeech server but before + it is returned to user code. + """ + return response + + def pre_synthesize_speech( + self, + request: cloud_tts.SynthesizeSpeechRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_tts.SynthesizeSpeechRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for synthesize_speech + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextToSpeech server. + """ + return request, metadata + + def post_synthesize_speech( + self, response: cloud_tts.SynthesizeSpeechResponse + ) -> cloud_tts.SynthesizeSpeechResponse: + """Post-rpc interceptor for synthesize_speech + + Override in a subclass to manipulate the response + after it is returned by the TextToSpeech server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TextToSpeechRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TextToSpeechRestInterceptor + + +class TextToSpeechRestTransport(TextToSpeechTransport): + """REST backend transport for TextToSpeech. + + Service that implements Google Cloud Text-to-Speech API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[TextToSpeechRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TextToSpeechRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ListVoices(TextToSpeechRestStub): + def __hash__(self): + return hash("ListVoices") + + def __call__( + self, + request: cloud_tts.ListVoicesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_tts.ListVoicesResponse: + r"""Call the list voices method over HTTP. + + Args: + request (~.cloud_tts.ListVoicesRequest): + The request object. The top-level message sent by the client for the + ``ListVoices`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_tts.ListVoicesResponse: + The message returned to the client by the ``ListVoices`` + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/voices", + }, + ] + request, metadata = self._interceptor.pre_list_voices(request, metadata) + pb_request = cloud_tts.ListVoicesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_tts.ListVoicesResponse() + pb_resp = cloud_tts.ListVoicesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_voices(resp) + return resp + + class _SynthesizeSpeech(TextToSpeechRestStub): + def __hash__(self): + return hash("SynthesizeSpeech") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_tts.SynthesizeSpeechRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_tts.SynthesizeSpeechResponse: + r"""Call the synthesize speech method over HTTP. + + Args: + request (~.cloud_tts.SynthesizeSpeechRequest): + The request object. The top-level message sent by the client for the + ``SynthesizeSpeech`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_tts.SynthesizeSpeechResponse: + The message returned to the client by the + ``SynthesizeSpeech`` method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/text:synthesize", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_synthesize_speech( + request, metadata + ) + pb_request = cloud_tts.SynthesizeSpeechRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_tts.SynthesizeSpeechResponse() + pb_resp = cloud_tts.SynthesizeSpeechResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_synthesize_speech(resp) + return resp + + @property + def list_voices( + self, + ) -> Callable[[cloud_tts.ListVoicesRequest], cloud_tts.ListVoicesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListVoices(self._session, self._host, self._interceptor) # type: ignore + + @property + def synthesize_speech( + self, + ) -> Callable[ + [cloud_tts.SynthesizeSpeechRequest], cloud_tts.SynthesizeSpeechResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SynthesizeSpeech(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("TextToSpeechRestTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/__init__.py new file mode 100644 index 000000000000..d46ede2765ca --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import TextToSpeechLongAudioSynthesizeAsyncClient +from .client import TextToSpeechLongAudioSynthesizeClient + +__all__ = ( + "TextToSpeechLongAudioSynthesizeClient", + "TextToSpeechLongAudioSynthesizeAsyncClient", +) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/async_client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/async_client.py new file mode 100644 index 000000000000..b7f74e239232 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/async_client.py @@ -0,0 +1,354 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.texttospeech_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.texttospeech_v1beta1.types import cloud_tts_lrs + +from .client import TextToSpeechLongAudioSynthesizeClient +from .transports.base import ( + DEFAULT_CLIENT_INFO, + TextToSpeechLongAudioSynthesizeTransport, +) +from .transports.grpc_asyncio import TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport + + +class TextToSpeechLongAudioSynthesizeAsyncClient: + """Service that implements Google Cloud Text-to-Speech API.""" + + _client: TextToSpeechLongAudioSynthesizeClient + + DEFAULT_ENDPOINT = TextToSpeechLongAudioSynthesizeClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TextToSpeechLongAudioSynthesizeClient.DEFAULT_MTLS_ENDPOINT + + model_path = staticmethod(TextToSpeechLongAudioSynthesizeClient.model_path) + parse_model_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.parse_model_path + ) + common_billing_account_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.parse_common_organization_path + ) + common_project_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.common_project_path + ) + parse_common_project_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.parse_common_project_path + ) + common_location_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.common_location_path + ) + parse_common_location_path = staticmethod( + TextToSpeechLongAudioSynthesizeClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechLongAudioSynthesizeAsyncClient: The constructed client. + """ + return TextToSpeechLongAudioSynthesizeClient.from_service_account_info.__func__(TextToSpeechLongAudioSynthesizeAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechLongAudioSynthesizeAsyncClient: The constructed client. + """ + return TextToSpeechLongAudioSynthesizeClient.from_service_account_file.__func__(TextToSpeechLongAudioSynthesizeAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TextToSpeechLongAudioSynthesizeClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TextToSpeechLongAudioSynthesizeTransport: + """Returns the transport used by the client instance. + + Returns: + TextToSpeechLongAudioSynthesizeTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(TextToSpeechLongAudioSynthesizeClient).get_transport_class, + type(TextToSpeechLongAudioSynthesizeClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[ + str, TextToSpeechLongAudioSynthesizeTransport + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the text to speech long audio synthesize client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TextToSpeechLongAudioSynthesizeTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TextToSpeechLongAudioSynthesizeClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def synthesize_long_audio( + self, + request: Optional[Union[cloud_tts_lrs.SynthesizeLongAudioRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Synthesizes long form text asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1beta1 + + async def sample_synthesize_long_audio(): + # Create a client + client = texttospeech_v1beta1.TextToSpeechLongAudioSynthesizeAsyncClient() + + # Initialize request argument(s) + input = texttospeech_v1beta1.SynthesisInput() + input.text = "text_value" + + audio_config = texttospeech_v1beta1.AudioConfig() + audio_config.audio_encoding = "ALAW" + + voice = texttospeech_v1beta1.VoiceSelectionParams() + voice.language_code = "language_code_value" + + request = texttospeech_v1beta1.SynthesizeLongAudioRequest( + input=input, + audio_config=audio_config, + output_gcs_uri="output_gcs_uri_value", + voice=voice, + ) + + # Make the request + operation = client.synthesize_long_audio(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.texttospeech_v1beta1.types.SynthesizeLongAudioRequest, dict]]): + The request object. The top-level message sent by the client for the + ``SynthesizeLongAudio`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.texttospeech_v1beta1.types.SynthesizeLongAudioResponse` + The message returned to the client by the + SynthesizeLongAudio method. + + """ + # Create or coerce a protobuf request object. + request = cloud_tts_lrs.SynthesizeLongAudioRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.synthesize_long_audio, + default_timeout=5000.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_tts_lrs.SynthesizeLongAudioResponse, + metadata_type=cloud_tts_lrs.SynthesizeLongAudioMetadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "TextToSpeechLongAudioSynthesizeAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TextToSpeechLongAudioSynthesizeAsyncClient",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/client.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/client.py new file mode 100644 index 000000000000..4a55924e1749 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/client.py @@ -0,0 +1,589 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.texttospeech_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.texttospeech_v1beta1.types import cloud_tts_lrs + +from .transports.base import ( + DEFAULT_CLIENT_INFO, + TextToSpeechLongAudioSynthesizeTransport, +) +from .transports.grpc import TextToSpeechLongAudioSynthesizeGrpcTransport +from .transports.grpc_asyncio import TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport +from .transports.rest import TextToSpeechLongAudioSynthesizeRestTransport + + +class TextToSpeechLongAudioSynthesizeClientMeta(type): + """Metaclass for the TextToSpeechLongAudioSynthesize client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[TextToSpeechLongAudioSynthesizeTransport]] + _transport_registry["grpc"] = TextToSpeechLongAudioSynthesizeGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport + _transport_registry["rest"] = TextToSpeechLongAudioSynthesizeRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[TextToSpeechLongAudioSynthesizeTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TextToSpeechLongAudioSynthesizeClient( + metaclass=TextToSpeechLongAudioSynthesizeClientMeta +): + """Service that implements Google Cloud Text-to-Speech API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "texttospeech.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechLongAudioSynthesizeClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextToSpeechLongAudioSynthesizeClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TextToSpeechLongAudioSynthesizeTransport: + """Returns the transport used by the client instance. + + Returns: + TextToSpeechLongAudioSynthesizeTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + project: str, + location: str, + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "projects/{project}/locations/{location}/models/{model}".format( + project=project, + location=location, + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/models/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, TextToSpeechLongAudioSynthesizeTransport] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the text to speech long audio synthesize client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TextToSpeechLongAudioSynthesizeTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TextToSpeechLongAudioSynthesizeTransport): + # transport is a TextToSpeechLongAudioSynthesizeTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def synthesize_long_audio( + self, + request: Optional[Union[cloud_tts_lrs.SynthesizeLongAudioRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Synthesizes long form text asynchronously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import texttospeech_v1beta1 + + def sample_synthesize_long_audio(): + # Create a client + client = texttospeech_v1beta1.TextToSpeechLongAudioSynthesizeClient() + + # Initialize request argument(s) + input = texttospeech_v1beta1.SynthesisInput() + input.text = "text_value" + + audio_config = texttospeech_v1beta1.AudioConfig() + audio_config.audio_encoding = "ALAW" + + voice = texttospeech_v1beta1.VoiceSelectionParams() + voice.language_code = "language_code_value" + + request = texttospeech_v1beta1.SynthesizeLongAudioRequest( + input=input, + audio_config=audio_config, + output_gcs_uri="output_gcs_uri_value", + voice=voice, + ) + + # Make the request + operation = client.synthesize_long_audio(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.texttospeech_v1beta1.types.SynthesizeLongAudioRequest, dict]): + The request object. The top-level message sent by the client for the + ``SynthesizeLongAudio`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.texttospeech_v1beta1.types.SynthesizeLongAudioResponse` + The message returned to the client by the + SynthesizeLongAudio method. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a cloud_tts_lrs.SynthesizeLongAudioRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_tts_lrs.SynthesizeLongAudioRequest): + request = cloud_tts_lrs.SynthesizeLongAudioRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.synthesize_long_audio] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_tts_lrs.SynthesizeLongAudioResponse, + metadata_type=cloud_tts_lrs.SynthesizeLongAudioMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "TextToSpeechLongAudioSynthesizeClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TextToSpeechLongAudioSynthesizeClient",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/__init__.py new file mode 100644 index 000000000000..4132045dfc54 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/__init__.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TextToSpeechLongAudioSynthesizeTransport +from .grpc import TextToSpeechLongAudioSynthesizeGrpcTransport +from .grpc_asyncio import TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport +from .rest import ( + TextToSpeechLongAudioSynthesizeRestInterceptor, + TextToSpeechLongAudioSynthesizeRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[TextToSpeechLongAudioSynthesizeTransport]] +_transport_registry["grpc"] = TextToSpeechLongAudioSynthesizeGrpcTransport +_transport_registry[ + "grpc_asyncio" +] = TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport +_transport_registry["rest"] = TextToSpeechLongAudioSynthesizeRestTransport + +__all__ = ( + "TextToSpeechLongAudioSynthesizeTransport", + "TextToSpeechLongAudioSynthesizeGrpcTransport", + "TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport", + "TextToSpeechLongAudioSynthesizeRestTransport", + "TextToSpeechLongAudioSynthesizeRestInterceptor", +) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/base.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/base.py new file mode 100644 index 000000000000..70bfcfe8fd09 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/base.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.texttospeech_v1beta1 import gapic_version as package_version +from google.cloud.texttospeech_v1beta1.types import cloud_tts_lrs + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class TextToSpeechLongAudioSynthesizeTransport(abc.ABC): + """Abstract transport class for TextToSpeechLongAudioSynthesize.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "texttospeech.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.synthesize_long_audio: gapic_v1.method.wrap_method( + self.synthesize_long_audio, + default_timeout=5000.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def synthesize_long_audio( + self, + ) -> Callable[ + [cloud_tts_lrs.SynthesizeLongAudioRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("TextToSpeechLongAudioSynthesizeTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/grpc.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/grpc.py new file mode 100644 index 000000000000..876b080a9876 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/grpc.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.texttospeech_v1beta1.types import cloud_tts_lrs + +from .base import DEFAULT_CLIENT_INFO, TextToSpeechLongAudioSynthesizeTransport + + +class TextToSpeechLongAudioSynthesizeGrpcTransport( + TextToSpeechLongAudioSynthesizeTransport +): + """gRPC backend transport for TextToSpeechLongAudioSynthesize. + + Service that implements Google Cloud Text-to-Speech API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def synthesize_long_audio( + self, + ) -> Callable[[cloud_tts_lrs.SynthesizeLongAudioRequest], operations_pb2.Operation]: + r"""Return a callable for the synthesize long audio method over gRPC. + + Synthesizes long form text asynchronously. + + Returns: + Callable[[~.SynthesizeLongAudioRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "synthesize_long_audio" not in self._stubs: + self._stubs["synthesize_long_audio"] = self.grpc_channel.unary_unary( + "/google.cloud.texttospeech.v1beta1.TextToSpeechLongAudioSynthesize/SynthesizeLongAudio", + request_serializer=cloud_tts_lrs.SynthesizeLongAudioRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["synthesize_long_audio"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("TextToSpeechLongAudioSynthesizeGrpcTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/grpc_asyncio.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/grpc_asyncio.py new file mode 100644 index 000000000000..040c809e0f32 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/grpc_asyncio.py @@ -0,0 +1,286 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.texttospeech_v1beta1.types import cloud_tts_lrs + +from .base import DEFAULT_CLIENT_INFO, TextToSpeechLongAudioSynthesizeTransport +from .grpc import TextToSpeechLongAudioSynthesizeGrpcTransport + + +class TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport( + TextToSpeechLongAudioSynthesizeTransport +): + """gRPC AsyncIO backend transport for TextToSpeechLongAudioSynthesize. + + Service that implements Google Cloud Text-to-Speech API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def synthesize_long_audio( + self, + ) -> Callable[ + [cloud_tts_lrs.SynthesizeLongAudioRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the synthesize long audio method over gRPC. + + Synthesizes long form text asynchronously. + + Returns: + Callable[[~.SynthesizeLongAudioRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "synthesize_long_audio" not in self._stubs: + self._stubs["synthesize_long_audio"] = self.grpc_channel.unary_unary( + "/google.cloud.texttospeech.v1beta1.TextToSpeechLongAudioSynthesize/SynthesizeLongAudio", + request_serializer=cloud_tts_lrs.SynthesizeLongAudioRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["synthesize_long_audio"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/rest.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/rest.py new file mode 100644 index 000000000000..86ecb24a5e07 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/services/text_to_speech_long_audio_synthesize/transports/rest.py @@ -0,0 +1,360 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.texttospeech_v1beta1.types import cloud_tts_lrs + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import TextToSpeechLongAudioSynthesizeTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TextToSpeechLongAudioSynthesizeRestInterceptor: + """Interceptor for TextToSpeechLongAudioSynthesize. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TextToSpeechLongAudioSynthesizeRestTransport. + + .. code-block:: python + class MyCustomTextToSpeechLongAudioSynthesizeInterceptor(TextToSpeechLongAudioSynthesizeRestInterceptor): + def pre_synthesize_long_audio(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_synthesize_long_audio(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TextToSpeechLongAudioSynthesizeRestTransport(interceptor=MyCustomTextToSpeechLongAudioSynthesizeInterceptor()) + client = TextToSpeechLongAudioSynthesizeClient(transport=transport) + + + """ + + def pre_synthesize_long_audio( + self, + request: cloud_tts_lrs.SynthesizeLongAudioRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloud_tts_lrs.SynthesizeLongAudioRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for synthesize_long_audio + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextToSpeechLongAudioSynthesize server. + """ + return request, metadata + + def post_synthesize_long_audio( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for synthesize_long_audio + + Override in a subclass to manipulate the response + after it is returned by the TextToSpeechLongAudioSynthesize server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TextToSpeechLongAudioSynthesizeRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TextToSpeechLongAudioSynthesizeRestInterceptor + + +class TextToSpeechLongAudioSynthesizeRestTransport( + TextToSpeechLongAudioSynthesizeTransport +): + """REST backend transport for TextToSpeechLongAudioSynthesize. + + Service that implements Google Cloud Text-to-Speech API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "texttospeech.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[TextToSpeechLongAudioSynthesizeRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or TextToSpeechLongAudioSynthesizeRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = {} + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _SynthesizeLongAudio(TextToSpeechLongAudioSynthesizeRestStub): + def __hash__(self): + return hash("SynthesizeLongAudio") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloud_tts_lrs.SynthesizeLongAudioRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the synthesize long audio method over HTTP. + + Args: + request (~.cloud_tts_lrs.SynthesizeLongAudioRequest): + The request object. The top-level message sent by the client for the + ``SynthesizeLongAudio`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{parent=projects/*/locations/*/voices/*}:SynthesizeLongAudio", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_synthesize_long_audio( + request, metadata + ) + pb_request = cloud_tts_lrs.SynthesizeLongAudioRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_synthesize_long_audio(resp) + return resp + + @property + def synthesize_long_audio( + self, + ) -> Callable[[cloud_tts_lrs.SynthesizeLongAudioRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SynthesizeLongAudio(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("TextToSpeechLongAudioSynthesizeRestTransport",) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/__init__.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/__init__.py new file mode 100644 index 000000000000..ee2ea221842b --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/__init__.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloud_tts import ( + AudioConfig, + AudioEncoding, + CustomVoiceParams, + ListVoicesRequest, + ListVoicesResponse, + SsmlVoiceGender, + SynthesisInput, + SynthesizeSpeechRequest, + SynthesizeSpeechResponse, + Timepoint, + Voice, + VoiceSelectionParams, +) +from .cloud_tts_lrs import ( + SynthesizeLongAudioMetadata, + SynthesizeLongAudioRequest, + SynthesizeLongAudioResponse, +) + +__all__ = ( + "AudioConfig", + "CustomVoiceParams", + "ListVoicesRequest", + "ListVoicesResponse", + "SynthesisInput", + "SynthesizeSpeechRequest", + "SynthesizeSpeechResponse", + "Timepoint", + "Voice", + "VoiceSelectionParams", + "AudioEncoding", + "SsmlVoiceGender", + "SynthesizeLongAudioMetadata", + "SynthesizeLongAudioRequest", + "SynthesizeLongAudioResponse", +) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts.py new file mode 100644 index 000000000000..7c001188c90e --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts.py @@ -0,0 +1,518 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.texttospeech.v1beta1", + manifest={ + "SsmlVoiceGender", + "AudioEncoding", + "ListVoicesRequest", + "ListVoicesResponse", + "Voice", + "SynthesizeSpeechRequest", + "SynthesisInput", + "VoiceSelectionParams", + "AudioConfig", + "CustomVoiceParams", + "SynthesizeSpeechResponse", + "Timepoint", + }, +) + + +class SsmlVoiceGender(proto.Enum): + r"""Gender of the voice as described in `SSML voice + element `__. + + Values: + SSML_VOICE_GENDER_UNSPECIFIED (0): + An unspecified gender. + In VoiceSelectionParams, this means that the + client doesn't care which gender the selected + voice will have. In the Voice field of + ListVoicesResponse, this may mean that the voice + doesn't fit any of the other categories in this + enum, or that the gender of the voice isn't + known. + MALE (1): + A male voice. + FEMALE (2): + A female voice. + NEUTRAL (3): + A gender-neutral voice. This voice is not yet + supported. + """ + SSML_VOICE_GENDER_UNSPECIFIED = 0 + MALE = 1 + FEMALE = 2 + NEUTRAL = 3 + + +class AudioEncoding(proto.Enum): + r"""Configuration to set up audio encoder. The encoding + determines the output audio format that we'd like. + + Values: + AUDIO_ENCODING_UNSPECIFIED (0): + Not specified. Will return result + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. + LINEAR16 (1): + Uncompressed 16-bit signed little-endian + samples (Linear PCM). Audio content returned as + LINEAR16 also contains a WAV header. + MP3 (2): + MP3 audio at 32kbps. + MP3_64_KBPS (4): + MP3 at 64kbps. + OGG_OPUS (3): + Opus encoded audio wrapped in an ogg + container. The result will be a file which can + be played natively on Android, and in browsers + (at least Chrome and Firefox). The quality of + the encoding is considerably higher than MP3 + while using approximately the same bitrate. + MULAW (5): + 8-bit samples that compand 14-bit audio + samples using G.711 PCMU/mu-law. Audio content + returned as MULAW also contains a WAV header. + ALAW (6): + 8-bit samples that compand 14-bit audio + samples using G.711 PCMU/A-law. Audio content + returned as ALAW also contains a WAV header. + """ + AUDIO_ENCODING_UNSPECIFIED = 0 + LINEAR16 = 1 + MP3 = 2 + MP3_64_KBPS = 4 + OGG_OPUS = 3 + MULAW = 5 + ALAW = 6 + + +class ListVoicesRequest(proto.Message): + r"""The top-level message sent by the client for the ``ListVoices`` + method. + + Attributes: + language_code (str): + Optional. Recommended. + `BCP-47 `__ + language tag. If not specified, the API will return all + supported voices. If specified, the ListVoices call will + only return voices that can be used to synthesize this + language_code. For example, if you specify ``"en-NZ"``, all + ``"en-NZ"`` voices will be returned. If you specify + ``"no"``, both ``"no-\*"`` (Norwegian) and ``"nb-\*"`` + (Norwegian Bokmal) voices will be returned. + """ + + language_code: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListVoicesResponse(proto.Message): + r"""The message returned to the client by the ``ListVoices`` method. + + Attributes: + voices (MutableSequence[google.cloud.texttospeech_v1beta1.types.Voice]): + The list of voices. + """ + + voices: MutableSequence["Voice"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Voice", + ) + + +class Voice(proto.Message): + r"""Description of a voice supported by the TTS service. + + Attributes: + language_codes (MutableSequence[str]): + The languages that this voice supports, expressed as + `BCP-47 `__ + language tags (e.g. "en-US", "es-419", "cmn-tw"). + name (str): + The name of this voice. Each distinct voice + has a unique name. + ssml_gender (google.cloud.texttospeech_v1beta1.types.SsmlVoiceGender): + The gender of this voice. + natural_sample_rate_hertz (int): + The natural sample rate (in hertz) for this + voice. + """ + + language_codes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + ssml_gender: "SsmlVoiceGender" = proto.Field( + proto.ENUM, + number=3, + enum="SsmlVoiceGender", + ) + natural_sample_rate_hertz: int = proto.Field( + proto.INT32, + number=4, + ) + + +class SynthesizeSpeechRequest(proto.Message): + r"""The top-level message sent by the client for the + ``SynthesizeSpeech`` method. + + Attributes: + input (google.cloud.texttospeech_v1beta1.types.SynthesisInput): + Required. The Synthesizer requires either + plain text or SSML as input. + voice (google.cloud.texttospeech_v1beta1.types.VoiceSelectionParams): + Required. The desired voice of the + synthesized audio. + audio_config (google.cloud.texttospeech_v1beta1.types.AudioConfig): + Required. The configuration of the + synthesized audio. + enable_time_pointing (MutableSequence[google.cloud.texttospeech_v1beta1.types.SynthesizeSpeechRequest.TimepointType]): + Whether and what timepoints are returned in + the response. + """ + + class TimepointType(proto.Enum): + r"""The type of timepoint information that is returned in the + response. + + Values: + TIMEPOINT_TYPE_UNSPECIFIED (0): + Not specified. No timepoint information will + be returned. + SSML_MARK (1): + Timepoint information of ```` tags in SSML input will + be returned. + """ + TIMEPOINT_TYPE_UNSPECIFIED = 0 + SSML_MARK = 1 + + input: "SynthesisInput" = proto.Field( + proto.MESSAGE, + number=1, + message="SynthesisInput", + ) + voice: "VoiceSelectionParams" = proto.Field( + proto.MESSAGE, + number=2, + message="VoiceSelectionParams", + ) + audio_config: "AudioConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="AudioConfig", + ) + enable_time_pointing: MutableSequence[TimepointType] = proto.RepeatedField( + proto.ENUM, + number=4, + enum=TimepointType, + ) + + +class SynthesisInput(proto.Message): + r"""Contains text input to be synthesized. Either ``text`` or ``ssml`` + must be supplied. Supplying both or neither returns + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. + The input size is limited to 5000 bytes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + text (str): + The raw text to be synthesized. + + This field is a member of `oneof`_ ``input_source``. + ssml (str): + The SSML document to be synthesized. The SSML document must + be valid and well-formed. Otherwise the RPC will fail and + return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. + For more information, see + `SSML `__. + + This field is a member of `oneof`_ ``input_source``. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + oneof="input_source", + ) + ssml: str = proto.Field( + proto.STRING, + number=2, + oneof="input_source", + ) + + +class VoiceSelectionParams(proto.Message): + r"""Description of which voice to use for a synthesis request. + + Attributes: + language_code (str): + Required. The language (and potentially also the region) of + the voice expressed as a + `BCP-47 `__ + language tag, e.g. "en-US". This should not include a script + tag (e.g. use "cmn-cn" rather than "cmn-Hant-cn"), because + the script will be inferred from the input provided in the + SynthesisInput. The TTS service will use this parameter to + help choose an appropriate voice. Note that the TTS service + may choose a voice with a slightly different language code + than the one selected; it may substitute a different region + (e.g. using en-US rather than en-CA if there isn't a + Canadian voice available), or even a different language, + e.g. using "nb" (Norwegian Bokmal) instead of "no" + (Norwegian)". + name (str): + The name of the voice. If not set, the service will choose a + voice based on the other parameters such as language_code + and gender. + ssml_gender (google.cloud.texttospeech_v1beta1.types.SsmlVoiceGender): + The preferred gender of the voice. If not set, the service + will choose a voice based on the other parameters such as + language_code and name. Note that this is only a preference, + not requirement; if a voice of the appropriate gender is not + available, the synthesizer should substitute a voice with a + different gender rather than failing the request. + custom_voice (google.cloud.texttospeech_v1beta1.types.CustomVoiceParams): + The configuration for a custom voice. If + [CustomVoiceParams.model] is set, the service will choose + the custom voice matching the specified configuration. + """ + + language_code: str = proto.Field( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + ssml_gender: "SsmlVoiceGender" = proto.Field( + proto.ENUM, + number=3, + enum="SsmlVoiceGender", + ) + custom_voice: "CustomVoiceParams" = proto.Field( + proto.MESSAGE, + number=4, + message="CustomVoiceParams", + ) + + +class AudioConfig(proto.Message): + r"""Description of audio data to be synthesized. + + Attributes: + audio_encoding (google.cloud.texttospeech_v1beta1.types.AudioEncoding): + Required. The format of the audio byte + stream. + speaking_rate (float): + Optional. Input only. Speaking rate/speed, in the range + [0.25, 4.0]. 1.0 is the normal native speed supported by the + specific voice. 2.0 is twice as fast, and 0.5 is half as + fast. If unset(0.0), defaults to the native 1.0 speed. Any + other values < 0.25 or > 4.0 will return an error. + pitch (float): + Optional. Input only. Speaking pitch, in the range [-20.0, + 20.0]. 20 means increase 20 semitones from the original + pitch. -20 means decrease 20 semitones from the original + pitch. + volume_gain_db (float): + Optional. Input only. Volume gain (in dB) of the normal + native volume supported by the specific voice, in the range + [-96.0, 16.0]. If unset, or set to a value of 0.0 (dB), will + play at normal native signal amplitude. A value of -6.0 (dB) + will play at approximately half the amplitude of the normal + native signal amplitude. A value of +6.0 (dB) will play at + approximately twice the amplitude of the normal native + signal amplitude. Strongly recommend not to exceed +10 (dB) + as there's usually no effective increase in loudness for any + value greater than that. + sample_rate_hertz (int): + Optional. The synthesis sample rate (in hertz) for this + audio. When this is specified in SynthesizeSpeechRequest, if + this is different from the voice's natural sample rate, then + the synthesizer will honor this request by converting to the + desired sample rate (which might result in worse audio + quality), unless the specified sample rate is not supported + for the encoding chosen, in which case it will fail the + request and return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. + effects_profile_id (MutableSequence[str]): + Optional. Input only. An identifier which selects 'audio + effects' profiles that are applied on (post synthesized) + text to speech. Effects are applied on top of each other in + the order they are given. See `audio + profiles `__ + for current supported profile ids. + """ + + audio_encoding: "AudioEncoding" = proto.Field( + proto.ENUM, + number=1, + enum="AudioEncoding", + ) + speaking_rate: float = proto.Field( + proto.DOUBLE, + number=2, + ) + pitch: float = proto.Field( + proto.DOUBLE, + number=3, + ) + volume_gain_db: float = proto.Field( + proto.DOUBLE, + number=4, + ) + sample_rate_hertz: int = proto.Field( + proto.INT32, + number=5, + ) + effects_profile_id: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + + +class CustomVoiceParams(proto.Message): + r"""Description of the custom voice to be synthesized. + + Attributes: + model (str): + Required. The name of the AutoML model that + synthesizes the custom voice. + reported_usage (google.cloud.texttospeech_v1beta1.types.CustomVoiceParams.ReportedUsage): + Optional. The usage of the synthesized audio + to be reported. + """ + + class ReportedUsage(proto.Enum): + r"""The usage of the synthesized audio. You must report your + honest and correct usage of the service as it's regulated by + contract and will cause significant difference in billing. + + Values: + REPORTED_USAGE_UNSPECIFIED (0): + Request with reported usage unspecified will + be rejected. + REALTIME (1): + For scenarios where the synthesized audio is + not downloadable and can only be used once. For + example, real-time request in IVR system. + OFFLINE (2): + For scenarios where the synthesized audio is + downloadable and can be reused. For example, the + synthesized audio is downloaded, stored in + customer service system and played repeatedly. + """ + REPORTED_USAGE_UNSPECIFIED = 0 + REALTIME = 1 + OFFLINE = 2 + + model: str = proto.Field( + proto.STRING, + number=1, + ) + reported_usage: ReportedUsage = proto.Field( + proto.ENUM, + number=3, + enum=ReportedUsage, + ) + + +class SynthesizeSpeechResponse(proto.Message): + r"""The message returned to the client by the ``SynthesizeSpeech`` + method. + + Attributes: + audio_content (bytes): + The audio data bytes encoded as specified in the request, + including the header for encodings that are wrapped in + containers (e.g. MP3, OGG_OPUS). For LINEAR16 audio, we + include the WAV header. Note: as with all bytes fields, + protobuffers use a pure binary representation, whereas JSON + representations use base64. + timepoints (MutableSequence[google.cloud.texttospeech_v1beta1.types.Timepoint]): + A link between a position in the original request input and + a corresponding time in the output audio. It's only + supported via ```` of SSML input. + audio_config (google.cloud.texttospeech_v1beta1.types.AudioConfig): + The audio metadata of ``audio_content``. + """ + + audio_content: bytes = proto.Field( + proto.BYTES, + number=1, + ) + timepoints: MutableSequence["Timepoint"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Timepoint", + ) + audio_config: "AudioConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="AudioConfig", + ) + + +class Timepoint(proto.Message): + r"""This contains a mapping between a certain point in the input + text and a corresponding time in the output audio. + + Attributes: + mark_name (str): + Timepoint name as received from the client within ```` + tag. + time_seconds (float): + Time offset in seconds from the start of the + synthesized audio. + """ + + mark_name: str = proto.Field( + proto.STRING, + number=4, + ) + time_seconds: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py new file mode 100644 index 000000000000..f5a2289e26e1 --- /dev/null +++ b/packages/google-cloud-texttospeech/google/cloud/texttospeech_v1beta1/types/cloud_tts_lrs.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.texttospeech_v1beta1.types import cloud_tts + +__protobuf__ = proto.module( + package="google.cloud.texttospeech.v1beta1", + manifest={ + "SynthesizeLongAudioRequest", + "SynthesizeLongAudioResponse", + "SynthesizeLongAudioMetadata", + }, +) + + +class SynthesizeLongAudioRequest(proto.Message): + r"""The top-level message sent by the client for the + ``SynthesizeLongAudio`` method. + + Attributes: + parent (str): + The resource states of the request in the form of + ``projects/*/locations/*``. + input (google.cloud.texttospeech_v1beta1.types.SynthesisInput): + Required. The Synthesizer requires either + plain text or SSML as input. While Long Audio is + in preview, SSML is temporarily unsupported. + audio_config (google.cloud.texttospeech_v1beta1.types.AudioConfig): + Required. The configuration of the + synthesized audio. + output_gcs_uri (str): + Required. Specifies a Cloud Storage URI for the synthesis + results. Must be specified in the format: + ``gs://bucket_name/object_name``, and the bucket must + already exist. + voice (google.cloud.texttospeech_v1beta1.types.VoiceSelectionParams): + Required. The desired voice of the + synthesized audio. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + input: cloud_tts.SynthesisInput = proto.Field( + proto.MESSAGE, + number=2, + message=cloud_tts.SynthesisInput, + ) + audio_config: cloud_tts.AudioConfig = proto.Field( + proto.MESSAGE, + number=3, + message=cloud_tts.AudioConfig, + ) + output_gcs_uri: str = proto.Field( + proto.STRING, + number=4, + ) + voice: cloud_tts.VoiceSelectionParams = proto.Field( + proto.MESSAGE, + number=5, + message=cloud_tts.VoiceSelectionParams, + ) + + +class SynthesizeLongAudioResponse(proto.Message): + r"""The message returned to the client by the ``SynthesizeLongAudio`` + method. + + """ + + +class SynthesizeLongAudioMetadata(proto.Message): + r"""Metadata for response returned by the ``SynthesizeLongAudio`` + method. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the request was received. + last_update_time (google.protobuf.timestamp_pb2.Timestamp): + Time of the most recent processing update. + progress_percentage (float): + The progress of the most recent processing + update in percentage, ie. 70.0%. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + last_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + progress_percentage: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-texttospeech/mypy.ini b/packages/google-cloud-texttospeech/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-texttospeech/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-texttospeech/noxfile.py b/packages/google-cloud-texttospeech/noxfile.py new file mode 100644 index 000000000000..be54712bfa8f --- /dev/null +++ b/packages/google-cloud-texttospeech/noxfile.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-texttospeech/renovate.json b/packages/google-cloud-texttospeech/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-texttospeech/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-texttospeech/samples/snippets/.gitignore b/packages/google-cloud-texttospeech/samples/snippets/.gitignore new file mode 100644 index 000000000000..8f6514bdc379 --- /dev/null +++ b/packages/google-cloud-texttospeech/samples/snippets/.gitignore @@ -0,0 +1 @@ +output.mp3 \ No newline at end of file diff --git a/packages/google-cloud-texttospeech/samples/snippets/README.md b/packages/google-cloud-texttospeech/samples/snippets/README.md new file mode 100644 index 000000000000..9b4b465055b5 --- /dev/null +++ b/packages/google-cloud-texttospeech/samples/snippets/README.md @@ -0,0 +1,4 @@ +Samples migrated +================ + +New location: https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/texttospeech diff --git a/packages/google-cloud-texttospeech/scripts/decrypt-secrets.sh b/packages/google-cloud-texttospeech/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-texttospeech/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-texttospeech/scripts/fixup_keywords.py b/packages/google-cloud-texttospeech/scripts/fixup_keywords.py new file mode 100644 index 000000000000..c216d0bad6d9 --- /dev/null +++ b/packages/google-cloud-texttospeech/scripts/fixup_keywords.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class texttospeechCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'list_voices': ('language_code', ), + 'synthesize_speech': ('input', 'voice', 'audio_config', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=texttospeechCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the texttospeech client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-texttospeech/scripts/fixup_texttospeech_v1_keywords.py b/packages/google-cloud-texttospeech/scripts/fixup_texttospeech_v1_keywords.py new file mode 100644 index 000000000000..6535f681d041 --- /dev/null +++ b/packages/google-cloud-texttospeech/scripts/fixup_texttospeech_v1_keywords.py @@ -0,0 +1,178 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class texttospeechCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'list_voices': ('language_code', ), + 'synthesize_long_audio': ('input', 'audio_config', 'output_gcs_uri', 'voice', 'parent', ), + 'synthesize_speech': ('input', 'voice', 'audio_config', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=texttospeechCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the texttospeech client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-texttospeech/scripts/fixup_texttospeech_v1beta1_keywords.py b/packages/google-cloud-texttospeech/scripts/fixup_texttospeech_v1beta1_keywords.py new file mode 100644 index 000000000000..45023da0599b --- /dev/null +++ b/packages/google-cloud-texttospeech/scripts/fixup_texttospeech_v1beta1_keywords.py @@ -0,0 +1,178 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class texttospeechCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'list_voices': ('language_code', ), + 'synthesize_long_audio': ('input', 'audio_config', 'output_gcs_uri', 'voice', 'parent', ), + 'synthesize_speech': ('input', 'voice', 'audio_config', 'enable_time_pointing', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=texttospeechCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the texttospeech client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-texttospeech/scripts/readme-gen/readme_gen.py b/packages/google-cloud-texttospeech/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..1acc119835b5 --- /dev/null +++ b/packages/google-cloud-texttospeech/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-texttospeech/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-texttospeech/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-texttospeech/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-texttospeech/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-texttospeech/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-texttospeech/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-texttospeech/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-texttospeech/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-texttospeech/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-texttospeech/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-texttospeech/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-texttospeech/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-texttospeech/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-texttospeech/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-texttospeech/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-texttospeech/setup.cfg b/packages/google-cloud-texttospeech/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-texttospeech/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-texttospeech/setup.py b/packages/google-cloud-texttospeech/setup.py new file mode 100644 index 000000000000..73d71dd16c7a --- /dev/null +++ b/packages/google-cloud-texttospeech/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-texttospeech" + + +description = "Google Cloud Texttospeech API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/texttospeech/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-texttospeech/testing/.gitignore b/packages/google-cloud-texttospeech/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-texttospeech/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-texttospeech/testing/constraints-3.10.txt b/packages/google-cloud-texttospeech/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-texttospeech/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-texttospeech/testing/constraints-3.11.txt b/packages/google-cloud-texttospeech/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-texttospeech/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-texttospeech/testing/constraints-3.12.txt b/packages/google-cloud-texttospeech/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-texttospeech/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-texttospeech/testing/constraints-3.7.txt b/packages/google-cloud-texttospeech/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-texttospeech/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-texttospeech/testing/constraints-3.8.txt b/packages/google-cloud-texttospeech/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-texttospeech/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-texttospeech/testing/constraints-3.9.txt b/packages/google-cloud-texttospeech/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-texttospeech/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-texttospeech/testing/secrets.tar.enc b/packages/google-cloud-texttospeech/testing/secrets.tar.enc new file mode 100644 index 000000000000..1a749dbfcc0d Binary files /dev/null and b/packages/google-cloud-texttospeech/testing/secrets.tar.enc differ diff --git a/packages/google-cloud-texttospeech/tests/__init__.py b/packages/google-cloud-texttospeech/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-texttospeech/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-texttospeech/tests/system/__init__.py b/packages/google-cloud-texttospeech/tests/system/__init__.py new file mode 100644 index 000000000000..6cfb48f4cdf8 --- /dev/null +++ b/packages/google-cloud-texttospeech/tests/system/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-texttospeech/tests/system/smoke_test.py b/packages/google-cloud-texttospeech/tests/system/smoke_test.py new file mode 100644 index 000000000000..ef2b43644159 --- /dev/null +++ b/packages/google-cloud-texttospeech/tests/system/smoke_test.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.cloud import texttospeech_v1 + + +@pytest.mark.parametrize("transport", ["grpc", "rest"]) +def test_list_voices(transport: str): + client = texttospeech_v1.TextToSpeechClient(transport=transport) + + client.list_voices() + + # The purpose of this smoke test is to test the communication with the API server, + # rather than API-specific functionality. + # If the smoke test fails, we won't reach this line. + assert True diff --git a/packages/google-cloud-texttospeech/tests/unit/__init__.py b/packages/google-cloud-texttospeech/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-texttospeech/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/__init__.py b/packages/google-cloud-texttospeech/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/__init__.py b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech.py b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech.py new file mode 100644 index 000000000000..e0b5b74c6ec0 --- /dev/null +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech.py @@ -0,0 +1,2273 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.texttospeech_v1.services.text_to_speech import ( + TextToSpeechAsyncClient, + TextToSpeechClient, + transports, +) +from google.cloud.texttospeech_v1.types import cloud_tts + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TextToSpeechClient._get_default_mtls_endpoint(None) is None + assert ( + TextToSpeechClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + TextToSpeechClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + TextToSpeechClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + TextToSpeechClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert TextToSpeechClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TextToSpeechClient, "grpc"), + (TextToSpeechAsyncClient, "grpc_asyncio"), + (TextToSpeechClient, "rest"), + ], +) +def test_text_to_speech_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "texttospeech.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.TextToSpeechGrpcTransport, "grpc"), + (transports.TextToSpeechGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.TextToSpeechRestTransport, "rest"), + ], +) +def test_text_to_speech_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TextToSpeechClient, "grpc"), + (TextToSpeechAsyncClient, "grpc_asyncio"), + (TextToSpeechClient, "rest"), + ], +) +def test_text_to_speech_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "texttospeech.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com" + ) + + +def test_text_to_speech_client_get_transport_class(): + transport = TextToSpeechClient.get_transport_class() + available_transports = [ + transports.TextToSpeechGrpcTransport, + transports.TextToSpeechRestTransport, + ] + assert transport in available_transports + + transport = TextToSpeechClient.get_transport_class("grpc") + assert transport == transports.TextToSpeechGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (TextToSpeechClient, transports.TextToSpeechGrpcTransport, "grpc"), + ( + TextToSpeechAsyncClient, + transports.TextToSpeechGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (TextToSpeechClient, transports.TextToSpeechRestTransport, "rest"), + ], +) +@mock.patch.object( + TextToSpeechClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TextToSpeechClient) +) +@mock.patch.object( + TextToSpeechAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechAsyncClient), +) +def test_text_to_speech_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TextToSpeechClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TextToSpeechClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (TextToSpeechClient, transports.TextToSpeechGrpcTransport, "grpc", "true"), + ( + TextToSpeechAsyncClient, + transports.TextToSpeechGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (TextToSpeechClient, transports.TextToSpeechGrpcTransport, "grpc", "false"), + ( + TextToSpeechAsyncClient, + transports.TextToSpeechGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (TextToSpeechClient, transports.TextToSpeechRestTransport, "rest", "true"), + (TextToSpeechClient, transports.TextToSpeechRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + TextToSpeechClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TextToSpeechClient) +) +@mock.patch.object( + TextToSpeechAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_text_to_speech_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [TextToSpeechClient, TextToSpeechAsyncClient]) +@mock.patch.object( + TextToSpeechClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TextToSpeechClient) +) +@mock.patch.object( + TextToSpeechAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechAsyncClient), +) +def test_text_to_speech_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (TextToSpeechClient, transports.TextToSpeechGrpcTransport, "grpc"), + ( + TextToSpeechAsyncClient, + transports.TextToSpeechGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (TextToSpeechClient, transports.TextToSpeechRestTransport, "rest"), + ], +) +def test_text_to_speech_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TextToSpeechClient, + transports.TextToSpeechGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TextToSpeechAsyncClient, + transports.TextToSpeechGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (TextToSpeechClient, transports.TextToSpeechRestTransport, "rest", None), + ], +) +def test_text_to_speech_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_text_to_speech_client_client_options_from_dict(): + with mock.patch( + "google.cloud.texttospeech_v1.services.text_to_speech.transports.TextToSpeechGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = TextToSpeechClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TextToSpeechClient, + transports.TextToSpeechGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TextToSpeechAsyncClient, + transports.TextToSpeechGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_text_to_speech_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "texttospeech.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="texttospeech.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_tts.ListVoicesRequest, + dict, + ], +) +def test_list_voices(request_type, transport: str = "grpc"): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_voices), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_tts.ListVoicesResponse() + response = client.list_voices(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts.ListVoicesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_tts.ListVoicesResponse) + + +def test_list_voices_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_voices), "__call__") as call: + client.list_voices() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts.ListVoicesRequest() + + +@pytest.mark.asyncio +async def test_list_voices_async( + transport: str = "grpc_asyncio", request_type=cloud_tts.ListVoicesRequest +): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_voices), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_tts.ListVoicesResponse() + ) + response = await client.list_voices(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts.ListVoicesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_tts.ListVoicesResponse) + + +@pytest.mark.asyncio +async def test_list_voices_async_from_dict(): + await test_list_voices_async(request_type=dict) + + +def test_list_voices_flattened(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_voices), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_tts.ListVoicesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_voices( + language_code="language_code_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].language_code + mock_val = "language_code_value" + assert arg == mock_val + + +def test_list_voices_flattened_error(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_voices( + cloud_tts.ListVoicesRequest(), + language_code="language_code_value", + ) + + +@pytest.mark.asyncio +async def test_list_voices_flattened_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_voices), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_tts.ListVoicesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_tts.ListVoicesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_voices( + language_code="language_code_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].language_code + mock_val = "language_code_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_voices_flattened_error_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_voices( + cloud_tts.ListVoicesRequest(), + language_code="language_code_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_tts.SynthesizeSpeechRequest, + dict, + ], +) +def test_synthesize_speech(request_type, transport: str = "grpc"): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_speech), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_tts.SynthesizeSpeechResponse( + audio_content=b"audio_content_blob", + ) + response = client.synthesize_speech(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts.SynthesizeSpeechRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_tts.SynthesizeSpeechResponse) + assert response.audio_content == b"audio_content_blob" + + +def test_synthesize_speech_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_speech), "__call__" + ) as call: + client.synthesize_speech() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts.SynthesizeSpeechRequest() + + +@pytest.mark.asyncio +async def test_synthesize_speech_async( + transport: str = "grpc_asyncio", request_type=cloud_tts.SynthesizeSpeechRequest +): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_speech), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_tts.SynthesizeSpeechResponse( + audio_content=b"audio_content_blob", + ) + ) + response = await client.synthesize_speech(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts.SynthesizeSpeechRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_tts.SynthesizeSpeechResponse) + assert response.audio_content == b"audio_content_blob" + + +@pytest.mark.asyncio +async def test_synthesize_speech_async_from_dict(): + await test_synthesize_speech_async(request_type=dict) + + +def test_synthesize_speech_flattened(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_speech), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_tts.SynthesizeSpeechResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.synthesize_speech( + input=cloud_tts.SynthesisInput(text="text_value"), + voice=cloud_tts.VoiceSelectionParams(language_code="language_code_value"), + audio_config=cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].input + mock_val = cloud_tts.SynthesisInput(text="text_value") + assert arg == mock_val + arg = args[0].voice + mock_val = cloud_tts.VoiceSelectionParams(language_code="language_code_value") + assert arg == mock_val + arg = args[0].audio_config + mock_val = cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ) + assert arg == mock_val + + +def test_synthesize_speech_flattened_error(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.synthesize_speech( + cloud_tts.SynthesizeSpeechRequest(), + input=cloud_tts.SynthesisInput(text="text_value"), + voice=cloud_tts.VoiceSelectionParams(language_code="language_code_value"), + audio_config=cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ), + ) + + +@pytest.mark.asyncio +async def test_synthesize_speech_flattened_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_speech), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_tts.SynthesizeSpeechResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_tts.SynthesizeSpeechResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.synthesize_speech( + input=cloud_tts.SynthesisInput(text="text_value"), + voice=cloud_tts.VoiceSelectionParams(language_code="language_code_value"), + audio_config=cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].input + mock_val = cloud_tts.SynthesisInput(text="text_value") + assert arg == mock_val + arg = args[0].voice + mock_val = cloud_tts.VoiceSelectionParams(language_code="language_code_value") + assert arg == mock_val + arg = args[0].audio_config + mock_val = cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_synthesize_speech_flattened_error_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.synthesize_speech( + cloud_tts.SynthesizeSpeechRequest(), + input=cloud_tts.SynthesisInput(text="text_value"), + voice=cloud_tts.VoiceSelectionParams(language_code="language_code_value"), + audio_config=cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_tts.ListVoicesRequest, + dict, + ], +) +def test_list_voices_rest(request_type): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_tts.ListVoicesResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_tts.ListVoicesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_voices(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_tts.ListVoicesResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_voices_rest_interceptors(null_interceptor): + transport = transports.TextToSpeechRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextToSpeechRestInterceptor(), + ) + client = TextToSpeechClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TextToSpeechRestInterceptor, "post_list_voices" + ) as post, mock.patch.object( + transports.TextToSpeechRestInterceptor, "pre_list_voices" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_tts.ListVoicesRequest.pb(cloud_tts.ListVoicesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_tts.ListVoicesResponse.to_json( + cloud_tts.ListVoicesResponse() + ) + + request = cloud_tts.ListVoicesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_tts.ListVoicesResponse() + + client.list_voices( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_voices_rest_bad_request( + transport: str = "rest", request_type=cloud_tts.ListVoicesRequest +): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_voices(request) + + +def test_list_voices_rest_flattened(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_tts.ListVoicesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + language_code="language_code_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_tts.ListVoicesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_voices(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/voices" % client.transport._host, args[1]) + + +def test_list_voices_rest_flattened_error(transport: str = "rest"): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_voices( + cloud_tts.ListVoicesRequest(), + language_code="language_code_value", + ) + + +def test_list_voices_rest_error(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_tts.SynthesizeSpeechRequest, + dict, + ], +) +def test_synthesize_speech_rest(request_type): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_tts.SynthesizeSpeechResponse( + audio_content=b"audio_content_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_tts.SynthesizeSpeechResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.synthesize_speech(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_tts.SynthesizeSpeechResponse) + assert response.audio_content == b"audio_content_blob" + + +def test_synthesize_speech_rest_required_fields( + request_type=cloud_tts.SynthesizeSpeechRequest, +): + transport_class = transports.TextToSpeechRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).synthesize_speech._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).synthesize_speech._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_tts.SynthesizeSpeechResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_tts.SynthesizeSpeechResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.synthesize_speech(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_synthesize_speech_rest_unset_required_fields(): + transport = transports.TextToSpeechRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.synthesize_speech._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "input", + "voice", + "audioConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_synthesize_speech_rest_interceptors(null_interceptor): + transport = transports.TextToSpeechRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextToSpeechRestInterceptor(), + ) + client = TextToSpeechClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TextToSpeechRestInterceptor, "post_synthesize_speech" + ) as post, mock.patch.object( + transports.TextToSpeechRestInterceptor, "pre_synthesize_speech" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_tts.SynthesizeSpeechRequest.pb( + cloud_tts.SynthesizeSpeechRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_tts.SynthesizeSpeechResponse.to_json( + cloud_tts.SynthesizeSpeechResponse() + ) + + request = cloud_tts.SynthesizeSpeechRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_tts.SynthesizeSpeechResponse() + + client.synthesize_speech( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_synthesize_speech_rest_bad_request( + transport: str = "rest", request_type=cloud_tts.SynthesizeSpeechRequest +): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.synthesize_speech(request) + + +def test_synthesize_speech_rest_flattened(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_tts.SynthesizeSpeechResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + input=cloud_tts.SynthesisInput(text="text_value"), + voice=cloud_tts.VoiceSelectionParams(language_code="language_code_value"), + audio_config=cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_tts.SynthesizeSpeechResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.synthesize_speech(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/text:synthesize" % client.transport._host, args[1] + ) + + +def test_synthesize_speech_rest_flattened_error(transport: str = "rest"): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.synthesize_speech( + cloud_tts.SynthesizeSpeechRequest(), + input=cloud_tts.SynthesisInput(text="text_value"), + voice=cloud_tts.VoiceSelectionParams(language_code="language_code_value"), + audio_config=cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ), + ) + + +def test_synthesize_speech_rest_error(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TextToSpeechGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TextToSpeechGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextToSpeechClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TextToSpeechGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TextToSpeechClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TextToSpeechClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TextToSpeechGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextToSpeechClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TextToSpeechGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TextToSpeechClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TextToSpeechGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TextToSpeechGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechGrpcTransport, + transports.TextToSpeechGrpcAsyncIOTransport, + transports.TextToSpeechRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = TextToSpeechClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TextToSpeechGrpcTransport, + ) + + +def test_text_to_speech_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TextToSpeechTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_text_to_speech_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.texttospeech_v1.services.text_to_speech.transports.TextToSpeechTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.TextToSpeechTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_voices", + "synthesize_speech", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_text_to_speech_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.texttospeech_v1.services.text_to_speech.transports.TextToSpeechTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TextToSpeechTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_text_to_speech_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.texttospeech_v1.services.text_to_speech.transports.TextToSpeechTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TextToSpeechTransport() + adc.assert_called_once() + + +def test_text_to_speech_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TextToSpeechClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechGrpcTransport, + transports.TextToSpeechGrpcAsyncIOTransport, + ], +) +def test_text_to_speech_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechGrpcTransport, + transports.TextToSpeechGrpcAsyncIOTransport, + transports.TextToSpeechRestTransport, + ], +) +def test_text_to_speech_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TextToSpeechGrpcTransport, grpc_helpers), + (transports.TextToSpeechGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_text_to_speech_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "texttospeech.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="texttospeech.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.TextToSpeechGrpcTransport, transports.TextToSpeechGrpcAsyncIOTransport], +) +def test_text_to_speech_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_text_to_speech_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.TextToSpeechRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_text_to_speech_host_no_port(transport_name): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="texttospeech.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "texttospeech.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_text_to_speech_host_with_port(transport_name): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="texttospeech.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "texttospeech.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_text_to_speech_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TextToSpeechClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TextToSpeechClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_voices._session + session2 = client2.transport.list_voices._session + assert session1 != session2 + session1 = client1.transport.synthesize_speech._session + session2 = client2.transport.synthesize_speech._session + assert session1 != session2 + + +def test_text_to_speech_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TextToSpeechGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_text_to_speech_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TextToSpeechGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.TextToSpeechGrpcTransport, transports.TextToSpeechGrpcAsyncIOTransport], +) +def test_text_to_speech_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.TextToSpeechGrpcTransport, transports.TextToSpeechGrpcAsyncIOTransport], +) +def test_text_to_speech_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + project = "squid" + location = "clam" + model = "whelk" + expected = "projects/{project}/locations/{location}/models/{model}".format( + project=project, + location=location, + model=model, + ) + actual = TextToSpeechClient.model_path(project, location, model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "project": "octopus", + "location": "oyster", + "model": "nudibranch", + } + path = TextToSpeechClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = TextToSpeechClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = TextToSpeechClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = TextToSpeechClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = TextToSpeechClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = TextToSpeechClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = TextToSpeechClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = TextToSpeechClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = TextToSpeechClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = TextToSpeechClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = TextToSpeechClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.TextToSpeechTransport, "_prep_wrapped_messages" + ) as prep: + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.TextToSpeechTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = TextToSpeechClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (TextToSpeechClient, transports.TextToSpeechGrpcTransport), + (TextToSpeechAsyncClient, transports.TextToSpeechGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech_long_audio_synthesize.py b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech_long_audio_synthesize.py new file mode 100644 index 000000000000..c531cca2c9c2 --- /dev/null +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1/test_text_to_speech_long_audio_synthesize.py @@ -0,0 +1,1995 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.texttospeech_v1.services.text_to_speech_long_audio_synthesize import ( + TextToSpeechLongAudioSynthesizeAsyncClient, + TextToSpeechLongAudioSynthesizeClient, + transports, +) +from google.cloud.texttospeech_v1.types import cloud_tts, cloud_tts_lrs + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ( + TextToSpeechLongAudioSynthesizeClient._get_default_mtls_endpoint(None) is None + ) + assert ( + TextToSpeechLongAudioSynthesizeClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + TextToSpeechLongAudioSynthesizeClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + TextToSpeechLongAudioSynthesizeClient._get_default_mtls_endpoint( + sandbox_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + TextToSpeechLongAudioSynthesizeClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + TextToSpeechLongAudioSynthesizeClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TextToSpeechLongAudioSynthesizeClient, "grpc"), + (TextToSpeechLongAudioSynthesizeAsyncClient, "grpc_asyncio"), + (TextToSpeechLongAudioSynthesizeClient, "rest"), + ], +) +def test_text_to_speech_long_audio_synthesize_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "texttospeech.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.TextToSpeechLongAudioSynthesizeGrpcTransport, "grpc"), + ( + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (transports.TextToSpeechLongAudioSynthesizeRestTransport, "rest"), + ], +) +def test_text_to_speech_long_audio_synthesize_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TextToSpeechLongAudioSynthesizeClient, "grpc"), + (TextToSpeechLongAudioSynthesizeAsyncClient, "grpc_asyncio"), + (TextToSpeechLongAudioSynthesizeClient, "rest"), + ], +) +def test_text_to_speech_long_audio_synthesize_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "texttospeech.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com" + ) + + +def test_text_to_speech_long_audio_synthesize_client_get_transport_class(): + transport = TextToSpeechLongAudioSynthesizeClient.get_transport_class() + available_transports = [ + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + ] + assert transport in available_transports + + transport = TextToSpeechLongAudioSynthesizeClient.get_transport_class("grpc") + assert transport == transports.TextToSpeechLongAudioSynthesizeGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + "grpc", + ), + ( + TextToSpeechLongAudioSynthesizeAsyncClient, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + TextToSpeechLongAudioSynthesizeClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechLongAudioSynthesizeClient), +) +@mock.patch.object( + TextToSpeechLongAudioSynthesizeAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechLongAudioSynthesizeAsyncClient), +) +def test_text_to_speech_long_audio_synthesize_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + TextToSpeechLongAudioSynthesizeClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + TextToSpeechLongAudioSynthesizeClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + "grpc", + "true", + ), + ( + TextToSpeechLongAudioSynthesizeAsyncClient, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + "grpc", + "false", + ), + ( + TextToSpeechLongAudioSynthesizeAsyncClient, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + "rest", + "true", + ), + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + TextToSpeechLongAudioSynthesizeClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechLongAudioSynthesizeClient), +) +@mock.patch.object( + TextToSpeechLongAudioSynthesizeAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechLongAudioSynthesizeAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_text_to_speech_long_audio_synthesize_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [TextToSpeechLongAudioSynthesizeClient, TextToSpeechLongAudioSynthesizeAsyncClient], +) +@mock.patch.object( + TextToSpeechLongAudioSynthesizeClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechLongAudioSynthesizeClient), +) +@mock.patch.object( + TextToSpeechLongAudioSynthesizeAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechLongAudioSynthesizeAsyncClient), +) +def test_text_to_speech_long_audio_synthesize_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + "grpc", + ), + ( + TextToSpeechLongAudioSynthesizeAsyncClient, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + "rest", + ), + ], +) +def test_text_to_speech_long_audio_synthesize_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TextToSpeechLongAudioSynthesizeAsyncClient, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + "rest", + None, + ), + ], +) +def test_text_to_speech_long_audio_synthesize_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_text_to_speech_long_audio_synthesize_client_client_options_from_dict(): + with mock.patch( + "google.cloud.texttospeech_v1.services.text_to_speech_long_audio_synthesize.transports.TextToSpeechLongAudioSynthesizeGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = TextToSpeechLongAudioSynthesizeClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TextToSpeechLongAudioSynthesizeAsyncClient, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_text_to_speech_long_audio_synthesize_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "texttospeech.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="texttospeech.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_tts_lrs.SynthesizeLongAudioRequest, + dict, + ], +) +def test_synthesize_long_audio(request_type, transport: str = "grpc"): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_long_audio), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.synthesize_long_audio(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts_lrs.SynthesizeLongAudioRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_synthesize_long_audio_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_long_audio), "__call__" + ) as call: + client.synthesize_long_audio() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts_lrs.SynthesizeLongAudioRequest() + + +@pytest.mark.asyncio +async def test_synthesize_long_audio_async( + transport: str = "grpc_asyncio", + request_type=cloud_tts_lrs.SynthesizeLongAudioRequest, +): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_long_audio), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.synthesize_long_audio(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts_lrs.SynthesizeLongAudioRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_synthesize_long_audio_async_from_dict(): + await test_synthesize_long_audio_async(request_type=dict) + + +def test_synthesize_long_audio_field_headers(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_tts_lrs.SynthesizeLongAudioRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_long_audio), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.synthesize_long_audio(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_synthesize_long_audio_field_headers_async(): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_tts_lrs.SynthesizeLongAudioRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_long_audio), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.synthesize_long_audio(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_tts_lrs.SynthesizeLongAudioRequest, + dict, + ], +) +def test_synthesize_long_audio_rest(request_type): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/voices/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.synthesize_long_audio(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_synthesize_long_audio_rest_required_fields( + request_type=cloud_tts_lrs.SynthesizeLongAudioRequest, +): + transport_class = transports.TextToSpeechLongAudioSynthesizeRestTransport + + request_init = {} + request_init["output_gcs_uri"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).synthesize_long_audio._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["outputGcsUri"] = "output_gcs_uri_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).synthesize_long_audio._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "outputGcsUri" in jsonified_request + assert jsonified_request["outputGcsUri"] == "output_gcs_uri_value" + + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.synthesize_long_audio(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_synthesize_long_audio_rest_unset_required_fields(): + transport = transports.TextToSpeechLongAudioSynthesizeRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.synthesize_long_audio._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "input", + "audioConfig", + "outputGcsUri", + "voice", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_synthesize_long_audio_rest_interceptors(null_interceptor): + transport = transports.TextToSpeechLongAudioSynthesizeRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextToSpeechLongAudioSynthesizeRestInterceptor(), + ) + client = TextToSpeechLongAudioSynthesizeClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TextToSpeechLongAudioSynthesizeRestInterceptor, + "post_synthesize_long_audio", + ) as post, mock.patch.object( + transports.TextToSpeechLongAudioSynthesizeRestInterceptor, + "pre_synthesize_long_audio", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_tts_lrs.SynthesizeLongAudioRequest.pb( + cloud_tts_lrs.SynthesizeLongAudioRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_tts_lrs.SynthesizeLongAudioRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.synthesize_long_audio( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_synthesize_long_audio_rest_bad_request( + transport: str = "rest", request_type=cloud_tts_lrs.SynthesizeLongAudioRequest +): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/voices/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.synthesize_long_audio(request) + + +def test_synthesize_long_audio_rest_error(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextToSpeechLongAudioSynthesizeClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TextToSpeechLongAudioSynthesizeClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TextToSpeechLongAudioSynthesizeClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextToSpeechLongAudioSynthesizeClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TextToSpeechLongAudioSynthesizeClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = TextToSpeechLongAudioSynthesizeClient.get_transport_class( + transport_name + )( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + ) + + +def test_text_to_speech_long_audio_synthesize_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TextToSpeechLongAudioSynthesizeTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_text_to_speech_long_audio_synthesize_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.texttospeech_v1.services.text_to_speech_long_audio_synthesize.transports.TextToSpeechLongAudioSynthesizeTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.TextToSpeechLongAudioSynthesizeTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("synthesize_long_audio",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_text_to_speech_long_audio_synthesize_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.texttospeech_v1.services.text_to_speech_long_audio_synthesize.transports.TextToSpeechLongAudioSynthesizeTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TextToSpeechLongAudioSynthesizeTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_text_to_speech_long_audio_synthesize_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.texttospeech_v1.services.text_to_speech_long_audio_synthesize.transports.TextToSpeechLongAudioSynthesizeTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TextToSpeechLongAudioSynthesizeTransport() + adc.assert_called_once() + + +def test_text_to_speech_long_audio_synthesize_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TextToSpeechLongAudioSynthesizeClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + ], +) +def test_text_to_speech_long_audio_synthesize_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + ], +) +def test_text_to_speech_long_audio_synthesize_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TextToSpeechLongAudioSynthesizeGrpcTransport, grpc_helpers), + ( + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_text_to_speech_long_audio_synthesize_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "texttospeech.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="texttospeech.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + ], +) +def test_text_to_speech_long_audio_synthesize_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_text_to_speech_long_audio_synthesize_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.TextToSpeechLongAudioSynthesizeRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_text_to_speech_long_audio_synthesize_rest_lro_client(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_text_to_speech_long_audio_synthesize_host_no_port(transport_name): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="texttospeech.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "texttospeech.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_text_to_speech_long_audio_synthesize_host_with_port(transport_name): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="texttospeech.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "texttospeech.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_text_to_speech_long_audio_synthesize_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TextToSpeechLongAudioSynthesizeClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TextToSpeechLongAudioSynthesizeClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.synthesize_long_audio._session + session2 = client2.transport.synthesize_long_audio._session + assert session1 != session2 + + +def test_text_to_speech_long_audio_synthesize_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_text_to_speech_long_audio_synthesize_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + ], +) +def test_text_to_speech_long_audio_synthesize_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + ], +) +def test_text_to_speech_long_audio_synthesize_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_text_to_speech_long_audio_synthesize_grpc_lro_client(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_text_to_speech_long_audio_synthesize_grpc_lro_async_client(): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_model_path(): + project = "squid" + location = "clam" + model = "whelk" + expected = "projects/{project}/locations/{location}/models/{model}".format( + project=project, + location=location, + model=model, + ) + actual = TextToSpeechLongAudioSynthesizeClient.model_path(project, location, model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "project": "octopus", + "location": "oyster", + "model": "nudibranch", + } + path = TextToSpeechLongAudioSynthesizeClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechLongAudioSynthesizeClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = TextToSpeechLongAudioSynthesizeClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = TextToSpeechLongAudioSynthesizeClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechLongAudioSynthesizeClient.parse_common_billing_account_path( + path + ) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = TextToSpeechLongAudioSynthesizeClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = TextToSpeechLongAudioSynthesizeClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechLongAudioSynthesizeClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = TextToSpeechLongAudioSynthesizeClient.common_organization_path( + organization + ) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = TextToSpeechLongAudioSynthesizeClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechLongAudioSynthesizeClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = TextToSpeechLongAudioSynthesizeClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = TextToSpeechLongAudioSynthesizeClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechLongAudioSynthesizeClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = TextToSpeechLongAudioSynthesizeClient.common_location_path( + project, location + ) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = TextToSpeechLongAudioSynthesizeClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechLongAudioSynthesizeClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.TextToSpeechLongAudioSynthesizeTransport, "_prep_wrapped_messages" + ) as prep: + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.TextToSpeechLongAudioSynthesizeTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = TextToSpeechLongAudioSynthesizeClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + ), + ( + TextToSpeechLongAudioSynthesizeAsyncClient, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/__init__.py b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech.py b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech.py new file mode 100644 index 000000000000..8c9f0156393c --- /dev/null +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech.py @@ -0,0 +1,2275 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.texttospeech_v1beta1.services.text_to_speech import ( + TextToSpeechAsyncClient, + TextToSpeechClient, + transports, +) +from google.cloud.texttospeech_v1beta1.types import cloud_tts + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TextToSpeechClient._get_default_mtls_endpoint(None) is None + assert ( + TextToSpeechClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + TextToSpeechClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + TextToSpeechClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + TextToSpeechClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert TextToSpeechClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TextToSpeechClient, "grpc"), + (TextToSpeechAsyncClient, "grpc_asyncio"), + (TextToSpeechClient, "rest"), + ], +) +def test_text_to_speech_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "texttospeech.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.TextToSpeechGrpcTransport, "grpc"), + (transports.TextToSpeechGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.TextToSpeechRestTransport, "rest"), + ], +) +def test_text_to_speech_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TextToSpeechClient, "grpc"), + (TextToSpeechAsyncClient, "grpc_asyncio"), + (TextToSpeechClient, "rest"), + ], +) +def test_text_to_speech_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "texttospeech.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com" + ) + + +def test_text_to_speech_client_get_transport_class(): + transport = TextToSpeechClient.get_transport_class() + available_transports = [ + transports.TextToSpeechGrpcTransport, + transports.TextToSpeechRestTransport, + ] + assert transport in available_transports + + transport = TextToSpeechClient.get_transport_class("grpc") + assert transport == transports.TextToSpeechGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (TextToSpeechClient, transports.TextToSpeechGrpcTransport, "grpc"), + ( + TextToSpeechAsyncClient, + transports.TextToSpeechGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (TextToSpeechClient, transports.TextToSpeechRestTransport, "rest"), + ], +) +@mock.patch.object( + TextToSpeechClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TextToSpeechClient) +) +@mock.patch.object( + TextToSpeechAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechAsyncClient), +) +def test_text_to_speech_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TextToSpeechClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TextToSpeechClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (TextToSpeechClient, transports.TextToSpeechGrpcTransport, "grpc", "true"), + ( + TextToSpeechAsyncClient, + transports.TextToSpeechGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (TextToSpeechClient, transports.TextToSpeechGrpcTransport, "grpc", "false"), + ( + TextToSpeechAsyncClient, + transports.TextToSpeechGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (TextToSpeechClient, transports.TextToSpeechRestTransport, "rest", "true"), + (TextToSpeechClient, transports.TextToSpeechRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + TextToSpeechClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TextToSpeechClient) +) +@mock.patch.object( + TextToSpeechAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_text_to_speech_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [TextToSpeechClient, TextToSpeechAsyncClient]) +@mock.patch.object( + TextToSpeechClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TextToSpeechClient) +) +@mock.patch.object( + TextToSpeechAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechAsyncClient), +) +def test_text_to_speech_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (TextToSpeechClient, transports.TextToSpeechGrpcTransport, "grpc"), + ( + TextToSpeechAsyncClient, + transports.TextToSpeechGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (TextToSpeechClient, transports.TextToSpeechRestTransport, "rest"), + ], +) +def test_text_to_speech_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TextToSpeechClient, + transports.TextToSpeechGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TextToSpeechAsyncClient, + transports.TextToSpeechGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (TextToSpeechClient, transports.TextToSpeechRestTransport, "rest", None), + ], +) +def test_text_to_speech_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_text_to_speech_client_client_options_from_dict(): + with mock.patch( + "google.cloud.texttospeech_v1beta1.services.text_to_speech.transports.TextToSpeechGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = TextToSpeechClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TextToSpeechClient, + transports.TextToSpeechGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TextToSpeechAsyncClient, + transports.TextToSpeechGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_text_to_speech_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "texttospeech.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="texttospeech.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_tts.ListVoicesRequest, + dict, + ], +) +def test_list_voices(request_type, transport: str = "grpc"): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_voices), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_tts.ListVoicesResponse() + response = client.list_voices(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts.ListVoicesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_tts.ListVoicesResponse) + + +def test_list_voices_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_voices), "__call__") as call: + client.list_voices() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts.ListVoicesRequest() + + +@pytest.mark.asyncio +async def test_list_voices_async( + transport: str = "grpc_asyncio", request_type=cloud_tts.ListVoicesRequest +): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_voices), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_tts.ListVoicesResponse() + ) + response = await client.list_voices(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts.ListVoicesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_tts.ListVoicesResponse) + + +@pytest.mark.asyncio +async def test_list_voices_async_from_dict(): + await test_list_voices_async(request_type=dict) + + +def test_list_voices_flattened(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_voices), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_tts.ListVoicesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_voices( + language_code="language_code_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].language_code + mock_val = "language_code_value" + assert arg == mock_val + + +def test_list_voices_flattened_error(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_voices( + cloud_tts.ListVoicesRequest(), + language_code="language_code_value", + ) + + +@pytest.mark.asyncio +async def test_list_voices_flattened_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_voices), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_tts.ListVoicesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_tts.ListVoicesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_voices( + language_code="language_code_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].language_code + mock_val = "language_code_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_voices_flattened_error_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_voices( + cloud_tts.ListVoicesRequest(), + language_code="language_code_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_tts.SynthesizeSpeechRequest, + dict, + ], +) +def test_synthesize_speech(request_type, transport: str = "grpc"): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_speech), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_tts.SynthesizeSpeechResponse( + audio_content=b"audio_content_blob", + ) + response = client.synthesize_speech(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts.SynthesizeSpeechRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_tts.SynthesizeSpeechResponse) + assert response.audio_content == b"audio_content_blob" + + +def test_synthesize_speech_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_speech), "__call__" + ) as call: + client.synthesize_speech() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts.SynthesizeSpeechRequest() + + +@pytest.mark.asyncio +async def test_synthesize_speech_async( + transport: str = "grpc_asyncio", request_type=cloud_tts.SynthesizeSpeechRequest +): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_speech), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_tts.SynthesizeSpeechResponse( + audio_content=b"audio_content_blob", + ) + ) + response = await client.synthesize_speech(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts.SynthesizeSpeechRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_tts.SynthesizeSpeechResponse) + assert response.audio_content == b"audio_content_blob" + + +@pytest.mark.asyncio +async def test_synthesize_speech_async_from_dict(): + await test_synthesize_speech_async(request_type=dict) + + +def test_synthesize_speech_flattened(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_speech), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_tts.SynthesizeSpeechResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.synthesize_speech( + input=cloud_tts.SynthesisInput(text="text_value"), + voice=cloud_tts.VoiceSelectionParams(language_code="language_code_value"), + audio_config=cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].input + mock_val = cloud_tts.SynthesisInput(text="text_value") + assert arg == mock_val + arg = args[0].voice + mock_val = cloud_tts.VoiceSelectionParams(language_code="language_code_value") + assert arg == mock_val + arg = args[0].audio_config + mock_val = cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ) + assert arg == mock_val + + +def test_synthesize_speech_flattened_error(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.synthesize_speech( + cloud_tts.SynthesizeSpeechRequest(), + input=cloud_tts.SynthesisInput(text="text_value"), + voice=cloud_tts.VoiceSelectionParams(language_code="language_code_value"), + audio_config=cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ), + ) + + +@pytest.mark.asyncio +async def test_synthesize_speech_flattened_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_speech), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_tts.SynthesizeSpeechResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_tts.SynthesizeSpeechResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.synthesize_speech( + input=cloud_tts.SynthesisInput(text="text_value"), + voice=cloud_tts.VoiceSelectionParams(language_code="language_code_value"), + audio_config=cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].input + mock_val = cloud_tts.SynthesisInput(text="text_value") + assert arg == mock_val + arg = args[0].voice + mock_val = cloud_tts.VoiceSelectionParams(language_code="language_code_value") + assert arg == mock_val + arg = args[0].audio_config + mock_val = cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_synthesize_speech_flattened_error_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.synthesize_speech( + cloud_tts.SynthesizeSpeechRequest(), + input=cloud_tts.SynthesisInput(text="text_value"), + voice=cloud_tts.VoiceSelectionParams(language_code="language_code_value"), + audio_config=cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_tts.ListVoicesRequest, + dict, + ], +) +def test_list_voices_rest(request_type): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_tts.ListVoicesResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_tts.ListVoicesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_voices(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_tts.ListVoicesResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_voices_rest_interceptors(null_interceptor): + transport = transports.TextToSpeechRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextToSpeechRestInterceptor(), + ) + client = TextToSpeechClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TextToSpeechRestInterceptor, "post_list_voices" + ) as post, mock.patch.object( + transports.TextToSpeechRestInterceptor, "pre_list_voices" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_tts.ListVoicesRequest.pb(cloud_tts.ListVoicesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_tts.ListVoicesResponse.to_json( + cloud_tts.ListVoicesResponse() + ) + + request = cloud_tts.ListVoicesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_tts.ListVoicesResponse() + + client.list_voices( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_voices_rest_bad_request( + transport: str = "rest", request_type=cloud_tts.ListVoicesRequest +): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_voices(request) + + +def test_list_voices_rest_flattened(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_tts.ListVoicesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + language_code="language_code_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_tts.ListVoicesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_voices(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/voices" % client.transport._host, args[1] + ) + + +def test_list_voices_rest_flattened_error(transport: str = "rest"): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_voices( + cloud_tts.ListVoicesRequest(), + language_code="language_code_value", + ) + + +def test_list_voices_rest_error(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_tts.SynthesizeSpeechRequest, + dict, + ], +) +def test_synthesize_speech_rest(request_type): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_tts.SynthesizeSpeechResponse( + audio_content=b"audio_content_blob", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_tts.SynthesizeSpeechResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.synthesize_speech(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_tts.SynthesizeSpeechResponse) + assert response.audio_content == b"audio_content_blob" + + +def test_synthesize_speech_rest_required_fields( + request_type=cloud_tts.SynthesizeSpeechRequest, +): + transport_class = transports.TextToSpeechRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).synthesize_speech._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).synthesize_speech._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_tts.SynthesizeSpeechResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_tts.SynthesizeSpeechResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.synthesize_speech(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_synthesize_speech_rest_unset_required_fields(): + transport = transports.TextToSpeechRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.synthesize_speech._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "input", + "voice", + "audioConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_synthesize_speech_rest_interceptors(null_interceptor): + transport = transports.TextToSpeechRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextToSpeechRestInterceptor(), + ) + client = TextToSpeechClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TextToSpeechRestInterceptor, "post_synthesize_speech" + ) as post, mock.patch.object( + transports.TextToSpeechRestInterceptor, "pre_synthesize_speech" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_tts.SynthesizeSpeechRequest.pb( + cloud_tts.SynthesizeSpeechRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_tts.SynthesizeSpeechResponse.to_json( + cloud_tts.SynthesizeSpeechResponse() + ) + + request = cloud_tts.SynthesizeSpeechRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_tts.SynthesizeSpeechResponse() + + client.synthesize_speech( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_synthesize_speech_rest_bad_request( + transport: str = "rest", request_type=cloud_tts.SynthesizeSpeechRequest +): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.synthesize_speech(request) + + +def test_synthesize_speech_rest_flattened(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloud_tts.SynthesizeSpeechResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + input=cloud_tts.SynthesisInput(text="text_value"), + voice=cloud_tts.VoiceSelectionParams(language_code="language_code_value"), + audio_config=cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_tts.SynthesizeSpeechResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.synthesize_speech(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/text:synthesize" % client.transport._host, args[1] + ) + + +def test_synthesize_speech_rest_flattened_error(transport: str = "rest"): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.synthesize_speech( + cloud_tts.SynthesizeSpeechRequest(), + input=cloud_tts.SynthesisInput(text="text_value"), + voice=cloud_tts.VoiceSelectionParams(language_code="language_code_value"), + audio_config=cloud_tts.AudioConfig( + audio_encoding=cloud_tts.AudioEncoding.LINEAR16 + ), + ) + + +def test_synthesize_speech_rest_error(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TextToSpeechGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TextToSpeechGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextToSpeechClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TextToSpeechGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TextToSpeechClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TextToSpeechClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TextToSpeechGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextToSpeechClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TextToSpeechGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TextToSpeechClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TextToSpeechGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TextToSpeechGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechGrpcTransport, + transports.TextToSpeechGrpcAsyncIOTransport, + transports.TextToSpeechRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = TextToSpeechClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TextToSpeechGrpcTransport, + ) + + +def test_text_to_speech_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TextToSpeechTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_text_to_speech_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.texttospeech_v1beta1.services.text_to_speech.transports.TextToSpeechTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.TextToSpeechTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_voices", + "synthesize_speech", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_text_to_speech_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.texttospeech_v1beta1.services.text_to_speech.transports.TextToSpeechTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TextToSpeechTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_text_to_speech_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.texttospeech_v1beta1.services.text_to_speech.transports.TextToSpeechTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TextToSpeechTransport() + adc.assert_called_once() + + +def test_text_to_speech_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TextToSpeechClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechGrpcTransport, + transports.TextToSpeechGrpcAsyncIOTransport, + ], +) +def test_text_to_speech_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechGrpcTransport, + transports.TextToSpeechGrpcAsyncIOTransport, + transports.TextToSpeechRestTransport, + ], +) +def test_text_to_speech_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TextToSpeechGrpcTransport, grpc_helpers), + (transports.TextToSpeechGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_text_to_speech_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "texttospeech.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="texttospeech.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.TextToSpeechGrpcTransport, transports.TextToSpeechGrpcAsyncIOTransport], +) +def test_text_to_speech_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_text_to_speech_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.TextToSpeechRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_text_to_speech_host_no_port(transport_name): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="texttospeech.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "texttospeech.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_text_to_speech_host_with_port(transport_name): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="texttospeech.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "texttospeech.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_text_to_speech_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TextToSpeechClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TextToSpeechClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_voices._session + session2 = client2.transport.list_voices._session + assert session1 != session2 + session1 = client1.transport.synthesize_speech._session + session2 = client2.transport.synthesize_speech._session + assert session1 != session2 + + +def test_text_to_speech_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TextToSpeechGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_text_to_speech_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TextToSpeechGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.TextToSpeechGrpcTransport, transports.TextToSpeechGrpcAsyncIOTransport], +) +def test_text_to_speech_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.TextToSpeechGrpcTransport, transports.TextToSpeechGrpcAsyncIOTransport], +) +def test_text_to_speech_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + project = "squid" + location = "clam" + model = "whelk" + expected = "projects/{project}/locations/{location}/models/{model}".format( + project=project, + location=location, + model=model, + ) + actual = TextToSpeechClient.model_path(project, location, model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "project": "octopus", + "location": "oyster", + "model": "nudibranch", + } + path = TextToSpeechClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = TextToSpeechClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = TextToSpeechClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = TextToSpeechClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = TextToSpeechClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = TextToSpeechClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = TextToSpeechClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = TextToSpeechClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = TextToSpeechClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = TextToSpeechClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = TextToSpeechClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.TextToSpeechTransport, "_prep_wrapped_messages" + ) as prep: + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.TextToSpeechTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = TextToSpeechClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = TextToSpeechAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = TextToSpeechClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (TextToSpeechClient, transports.TextToSpeechGrpcTransport), + (TextToSpeechAsyncClient, transports.TextToSpeechGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech_long_audio_synthesize.py b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech_long_audio_synthesize.py new file mode 100644 index 000000000000..be0f0e433518 --- /dev/null +++ b/packages/google-cloud-texttospeech/tests/unit/gapic/texttospeech_v1beta1/test_text_to_speech_long_audio_synthesize.py @@ -0,0 +1,1995 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.texttospeech_v1beta1.services.text_to_speech_long_audio_synthesize import ( + TextToSpeechLongAudioSynthesizeAsyncClient, + TextToSpeechLongAudioSynthesizeClient, + transports, +) +from google.cloud.texttospeech_v1beta1.types import cloud_tts, cloud_tts_lrs + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ( + TextToSpeechLongAudioSynthesizeClient._get_default_mtls_endpoint(None) is None + ) + assert ( + TextToSpeechLongAudioSynthesizeClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + TextToSpeechLongAudioSynthesizeClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + TextToSpeechLongAudioSynthesizeClient._get_default_mtls_endpoint( + sandbox_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + TextToSpeechLongAudioSynthesizeClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + TextToSpeechLongAudioSynthesizeClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TextToSpeechLongAudioSynthesizeClient, "grpc"), + (TextToSpeechLongAudioSynthesizeAsyncClient, "grpc_asyncio"), + (TextToSpeechLongAudioSynthesizeClient, "rest"), + ], +) +def test_text_to_speech_long_audio_synthesize_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "texttospeech.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.TextToSpeechLongAudioSynthesizeGrpcTransport, "grpc"), + ( + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (transports.TextToSpeechLongAudioSynthesizeRestTransport, "rest"), + ], +) +def test_text_to_speech_long_audio_synthesize_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TextToSpeechLongAudioSynthesizeClient, "grpc"), + (TextToSpeechLongAudioSynthesizeAsyncClient, "grpc_asyncio"), + (TextToSpeechLongAudioSynthesizeClient, "rest"), + ], +) +def test_text_to_speech_long_audio_synthesize_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "texttospeech.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com" + ) + + +def test_text_to_speech_long_audio_synthesize_client_get_transport_class(): + transport = TextToSpeechLongAudioSynthesizeClient.get_transport_class() + available_transports = [ + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + ] + assert transport in available_transports + + transport = TextToSpeechLongAudioSynthesizeClient.get_transport_class("grpc") + assert transport == transports.TextToSpeechLongAudioSynthesizeGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + "grpc", + ), + ( + TextToSpeechLongAudioSynthesizeAsyncClient, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + TextToSpeechLongAudioSynthesizeClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechLongAudioSynthesizeClient), +) +@mock.patch.object( + TextToSpeechLongAudioSynthesizeAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechLongAudioSynthesizeAsyncClient), +) +def test_text_to_speech_long_audio_synthesize_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + TextToSpeechLongAudioSynthesizeClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + TextToSpeechLongAudioSynthesizeClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + "grpc", + "true", + ), + ( + TextToSpeechLongAudioSynthesizeAsyncClient, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + "grpc", + "false", + ), + ( + TextToSpeechLongAudioSynthesizeAsyncClient, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + "rest", + "true", + ), + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + TextToSpeechLongAudioSynthesizeClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechLongAudioSynthesizeClient), +) +@mock.patch.object( + TextToSpeechLongAudioSynthesizeAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechLongAudioSynthesizeAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_text_to_speech_long_audio_synthesize_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [TextToSpeechLongAudioSynthesizeClient, TextToSpeechLongAudioSynthesizeAsyncClient], +) +@mock.patch.object( + TextToSpeechLongAudioSynthesizeClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechLongAudioSynthesizeClient), +) +@mock.patch.object( + TextToSpeechLongAudioSynthesizeAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextToSpeechLongAudioSynthesizeAsyncClient), +) +def test_text_to_speech_long_audio_synthesize_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + "grpc", + ), + ( + TextToSpeechLongAudioSynthesizeAsyncClient, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + "rest", + ), + ], +) +def test_text_to_speech_long_audio_synthesize_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TextToSpeechLongAudioSynthesizeAsyncClient, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + "rest", + None, + ), + ], +) +def test_text_to_speech_long_audio_synthesize_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_text_to_speech_long_audio_synthesize_client_client_options_from_dict(): + with mock.patch( + "google.cloud.texttospeech_v1beta1.services.text_to_speech_long_audio_synthesize.transports.TextToSpeechLongAudioSynthesizeGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = TextToSpeechLongAudioSynthesizeClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TextToSpeechLongAudioSynthesizeAsyncClient, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_text_to_speech_long_audio_synthesize_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "texttospeech.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="texttospeech.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_tts_lrs.SynthesizeLongAudioRequest, + dict, + ], +) +def test_synthesize_long_audio(request_type, transport: str = "grpc"): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_long_audio), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.synthesize_long_audio(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts_lrs.SynthesizeLongAudioRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_synthesize_long_audio_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_long_audio), "__call__" + ) as call: + client.synthesize_long_audio() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts_lrs.SynthesizeLongAudioRequest() + + +@pytest.mark.asyncio +async def test_synthesize_long_audio_async( + transport: str = "grpc_asyncio", + request_type=cloud_tts_lrs.SynthesizeLongAudioRequest, +): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_long_audio), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.synthesize_long_audio(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_tts_lrs.SynthesizeLongAudioRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_synthesize_long_audio_async_from_dict(): + await test_synthesize_long_audio_async(request_type=dict) + + +def test_synthesize_long_audio_field_headers(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_tts_lrs.SynthesizeLongAudioRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_long_audio), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.synthesize_long_audio(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_synthesize_long_audio_field_headers_async(): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_tts_lrs.SynthesizeLongAudioRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.synthesize_long_audio), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.synthesize_long_audio(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + cloud_tts_lrs.SynthesizeLongAudioRequest, + dict, + ], +) +def test_synthesize_long_audio_rest(request_type): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/voices/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.synthesize_long_audio(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_synthesize_long_audio_rest_required_fields( + request_type=cloud_tts_lrs.SynthesizeLongAudioRequest, +): + transport_class = transports.TextToSpeechLongAudioSynthesizeRestTransport + + request_init = {} + request_init["output_gcs_uri"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).synthesize_long_audio._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["outputGcsUri"] = "output_gcs_uri_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).synthesize_long_audio._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "outputGcsUri" in jsonified_request + assert jsonified_request["outputGcsUri"] == "output_gcs_uri_value" + + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.synthesize_long_audio(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_synthesize_long_audio_rest_unset_required_fields(): + transport = transports.TextToSpeechLongAudioSynthesizeRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.synthesize_long_audio._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "input", + "audioConfig", + "outputGcsUri", + "voice", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_synthesize_long_audio_rest_interceptors(null_interceptor): + transport = transports.TextToSpeechLongAudioSynthesizeRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextToSpeechLongAudioSynthesizeRestInterceptor(), + ) + client = TextToSpeechLongAudioSynthesizeClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.TextToSpeechLongAudioSynthesizeRestInterceptor, + "post_synthesize_long_audio", + ) as post, mock.patch.object( + transports.TextToSpeechLongAudioSynthesizeRestInterceptor, + "pre_synthesize_long_audio", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_tts_lrs.SynthesizeLongAudioRequest.pb( + cloud_tts_lrs.SynthesizeLongAudioRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloud_tts_lrs.SynthesizeLongAudioRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.synthesize_long_audio( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_synthesize_long_audio_rest_bad_request( + transport: str = "rest", request_type=cloud_tts_lrs.SynthesizeLongAudioRequest +): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/voices/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.synthesize_long_audio(request) + + +def test_synthesize_long_audio_rest_error(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextToSpeechLongAudioSynthesizeClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TextToSpeechLongAudioSynthesizeClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TextToSpeechLongAudioSynthesizeClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextToSpeechLongAudioSynthesizeClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TextToSpeechLongAudioSynthesizeClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = TextToSpeechLongAudioSynthesizeClient.get_transport_class( + transport_name + )( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + ) + + +def test_text_to_speech_long_audio_synthesize_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TextToSpeechLongAudioSynthesizeTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_text_to_speech_long_audio_synthesize_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.texttospeech_v1beta1.services.text_to_speech_long_audio_synthesize.transports.TextToSpeechLongAudioSynthesizeTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.TextToSpeechLongAudioSynthesizeTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("synthesize_long_audio",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_text_to_speech_long_audio_synthesize_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.texttospeech_v1beta1.services.text_to_speech_long_audio_synthesize.transports.TextToSpeechLongAudioSynthesizeTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TextToSpeechLongAudioSynthesizeTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_text_to_speech_long_audio_synthesize_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.texttospeech_v1beta1.services.text_to_speech_long_audio_synthesize.transports.TextToSpeechLongAudioSynthesizeTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TextToSpeechLongAudioSynthesizeTransport() + adc.assert_called_once() + + +def test_text_to_speech_long_audio_synthesize_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TextToSpeechLongAudioSynthesizeClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + ], +) +def test_text_to_speech_long_audio_synthesize_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + transports.TextToSpeechLongAudioSynthesizeRestTransport, + ], +) +def test_text_to_speech_long_audio_synthesize_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TextToSpeechLongAudioSynthesizeGrpcTransport, grpc_helpers), + ( + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_text_to_speech_long_audio_synthesize_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "texttospeech.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="texttospeech.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + ], +) +def test_text_to_speech_long_audio_synthesize_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_text_to_speech_long_audio_synthesize_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.TextToSpeechLongAudioSynthesizeRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_text_to_speech_long_audio_synthesize_rest_lro_client(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_text_to_speech_long_audio_synthesize_host_no_port(transport_name): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="texttospeech.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "texttospeech.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_text_to_speech_long_audio_synthesize_host_with_port(transport_name): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="texttospeech.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "texttospeech.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://texttospeech.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_text_to_speech_long_audio_synthesize_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TextToSpeechLongAudioSynthesizeClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TextToSpeechLongAudioSynthesizeClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.synthesize_long_audio._session + session2 = client2.transport.synthesize_long_audio._session + assert session1 != session2 + + +def test_text_to_speech_long_audio_synthesize_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_text_to_speech_long_audio_synthesize_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + ], +) +def test_text_to_speech_long_audio_synthesize_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + ], +) +def test_text_to_speech_long_audio_synthesize_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_text_to_speech_long_audio_synthesize_grpc_lro_client(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_text_to_speech_long_audio_synthesize_grpc_lro_async_client(): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_model_path(): + project = "squid" + location = "clam" + model = "whelk" + expected = "projects/{project}/locations/{location}/models/{model}".format( + project=project, + location=location, + model=model, + ) + actual = TextToSpeechLongAudioSynthesizeClient.model_path(project, location, model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "project": "octopus", + "location": "oyster", + "model": "nudibranch", + } + path = TextToSpeechLongAudioSynthesizeClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechLongAudioSynthesizeClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = TextToSpeechLongAudioSynthesizeClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = TextToSpeechLongAudioSynthesizeClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechLongAudioSynthesizeClient.parse_common_billing_account_path( + path + ) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = TextToSpeechLongAudioSynthesizeClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = TextToSpeechLongAudioSynthesizeClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechLongAudioSynthesizeClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = TextToSpeechLongAudioSynthesizeClient.common_organization_path( + organization + ) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = TextToSpeechLongAudioSynthesizeClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechLongAudioSynthesizeClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = TextToSpeechLongAudioSynthesizeClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = TextToSpeechLongAudioSynthesizeClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechLongAudioSynthesizeClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = TextToSpeechLongAudioSynthesizeClient.common_location_path( + project, location + ) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = TextToSpeechLongAudioSynthesizeClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TextToSpeechLongAudioSynthesizeClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.TextToSpeechLongAudioSynthesizeTransport, "_prep_wrapped_messages" + ) as prep: + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.TextToSpeechLongAudioSynthesizeTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = TextToSpeechLongAudioSynthesizeClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = TextToSpeechLongAudioSynthesizeAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = TextToSpeechLongAudioSynthesizeClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + TextToSpeechLongAudioSynthesizeClient, + transports.TextToSpeechLongAudioSynthesizeGrpcTransport, + ), + ( + TextToSpeechLongAudioSynthesizeAsyncClient, + transports.TextToSpeechLongAudioSynthesizeGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-texttospeech/texttospeech-v1beta1-py.tar.gz b/packages/google-cloud-texttospeech/texttospeech-v1beta1-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-videointelligence/.OwlBot.yaml b/packages/google-cloud-videointelligence/.OwlBot.yaml new file mode 100644 index 000000000000..7613f749a5a7 --- /dev/null +++ b/packages/google-cloud-videointelligence/.OwlBot.yaml @@ -0,0 +1,24 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/videointelligence/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-videointelligence/$1 + +begin-after-commit-hash: 9f1fe1408a1799948f1b17add7c3a15145057718 + diff --git a/packages/google-cloud-videointelligence/.coveragerc b/packages/google-cloud-videointelligence/.coveragerc new file mode 100644 index 000000000000..dc460726bc09 --- /dev/null +++ b/packages/google-cloud-videointelligence/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/videointelligence/__init__.py + google/cloud/videointelligence/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-videointelligence/.flake8 b/packages/google-cloud-videointelligence/.flake8 new file mode 100644 index 000000000000..87f6e408c47d --- /dev/null +++ b/packages/google-cloud-videointelligence/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-videointelligence/.gitignore b/packages/google-cloud-videointelligence/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-videointelligence/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-videointelligence/.repo-metadata.json b/packages/google-cloud-videointelligence/.repo-metadata.json new file mode 100644 index 000000000000..19e463e2b459 --- /dev/null +++ b/packages/google-cloud-videointelligence/.repo-metadata.json @@ -0,0 +1,18 @@ +{ + "name": "videointelligence", + "name_pretty": "Video Intelligence", + "product_documentation": "https://cloud.google.com/video-intelligence/docs/", + "client_documentation": "https://cloud.google.com/python/docs/reference/videointelligence/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5084810", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-videointelligence", + "api_id": "videointelligence.googleapis.com", + "requires_billing": true, + "default_version": "v1", + "codeowner_team": "@googleapis/cdpe-cloudai", + "api_shortname": "videointelligence", + "api_description": "makes videos searchable, and discoverable, by extracting metadata with an easy to use API. You can now search every moment of every video file in your catalog and find every occurrence as well as its significance. It quickly annotates videos stored in Google Cloud Storage, and helps you identify key nouns entities of your video, and when they occur within the video. Separate signal from noise, by retrieving relevant information at the video, shot or per frame." +} diff --git a/packages/google-cloud-videointelligence/CHANGELOG.md b/packages/google-cloud-videointelligence/CHANGELOG.md new file mode 100644 index 000000000000..5410ead733d6 --- /dev/null +++ b/packages/google-cloud-videointelligence/CHANGELOG.md @@ -0,0 +1,569 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-videointelligence/#history + +## [2.11.4](https://github.com/googleapis/python-videointelligence/compare/v2.11.3...v2.11.4) (2023-09-19) + + +### Documentation + +* Minor formatting ([10422c3](https://github.com/googleapis/python-videointelligence/commit/10422c3d5b9efb281ff11f45d84b5e9bfde8ab59)) + +## [2.11.3](https://github.com/googleapis/python-videointelligence/compare/v2.11.2...v2.11.3) (2023-07-04) + + +### Bug Fixes + +* Add async context manager return types ([#433](https://github.com/googleapis/python-videointelligence/issues/433)) ([9a5baa7](https://github.com/googleapis/python-videointelligence/commit/9a5baa7df22b5389bfb83d8488ba12206ebec73f)) + +## [2.11.2](https://github.com/googleapis/python-videointelligence/compare/v2.11.1...v2.11.2) (2023-06-01) + + +### Documentation + +* **samples:** Expand region tags to include imports ([4ff158d](https://github.com/googleapis/python-videointelligence/commit/4ff158da2450cfd6be7bf8881f7bf7aa55911c78)) + +## [2.11.1](https://github.com/googleapis/python-videointelligence/compare/v2.11.0...v2.11.1) (2023-03-23) + + +### Documentation + +* Fix formatting of request arg in docstring ([#412](https://github.com/googleapis/python-videointelligence/issues/412)) ([a3d5521](https://github.com/googleapis/python-videointelligence/commit/a3d5521f16abe05fda32e19937c599e1dad67c57)) + +## [2.11.0](https://github.com/googleapis/python-videointelligence/compare/v2.10.1...v2.11.0) (2023-02-28) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([c1e2400](https://github.com/googleapis/python-videointelligence/commit/c1e2400e2f22d80fc256aba41310e2ec524cdf07)) + +## [2.10.1](https://github.com/googleapis/python-videointelligence/compare/v2.10.0...v2.10.1) (2023-01-23) + + +### Bug Fixes + +* Add context manager return types ([0815c39](https://github.com/googleapis/python-videointelligence/commit/0815c399f098cdcfbeeb3a780aed76a9c470cded)) + + +### Documentation + +* Add documentation for enums ([0815c39](https://github.com/googleapis/python-videointelligence/commit/0815c399f098cdcfbeeb3a780aed76a9c470cded)) + +## [2.10.0](https://github.com/googleapis/python-videointelligence/compare/v2.9.0...v2.10.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#394](https://github.com/googleapis/python-videointelligence/issues/394)) ([f30a8f7](https://github.com/googleapis/python-videointelligence/commit/f30a8f7fb6b61c3b1ce08fc42e8cacb895e95161)) + +## [2.9.0](https://github.com/googleapis/python-videointelligence/compare/v2.8.3...v2.9.0) (2022-12-15) + + +### Features + +* Add support for `google.cloud.videointelligence.__version__` ([29c4e5f](https://github.com/googleapis/python-videointelligence/commit/29c4e5fb6163d30383cb42cd633fc32125508ddf)) +* Add typing to proto.Message based class attributes ([29c4e5f](https://github.com/googleapis/python-videointelligence/commit/29c4e5fb6163d30383cb42cd633fc32125508ddf)) + + +### Bug Fixes + +* Add dict typing for client_options ([29c4e5f](https://github.com/googleapis/python-videointelligence/commit/29c4e5fb6163d30383cb42cd633fc32125508ddf)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([ad16669](https://github.com/googleapis/python-videointelligence/commit/ad1666990059300d31c0c6e1fb186146f84c65ee)) +* Drop usage of pkg_resources ([ad16669](https://github.com/googleapis/python-videointelligence/commit/ad1666990059300d31c0c6e1fb186146f84c65ee)) +* Fix timeout default values ([ad16669](https://github.com/googleapis/python-videointelligence/commit/ad1666990059300d31c0c6e1fb186146f84c65ee)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([29c4e5f](https://github.com/googleapis/python-videointelligence/commit/29c4e5fb6163d30383cb42cd633fc32125508ddf)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([ad16669](https://github.com/googleapis/python-videointelligence/commit/ad1666990059300d31c0c6e1fb186146f84c65ee)) + +## [2.8.3](https://github.com/googleapis/python-videointelligence/compare/v2.8.2...v2.8.3) (2022-10-10) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#375](https://github.com/googleapis/python-videointelligence/issues/375)) ([2042093](https://github.com/googleapis/python-videointelligence/commit/2042093b5520c7990cfa6cce07931cc020d47a63)) + +## [2.8.2](https://github.com/googleapis/python-videointelligence/compare/v2.8.1...v2.8.2) (2022-09-29) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#371](https://github.com/googleapis/python-videointelligence/issues/371)) ([57912db](https://github.com/googleapis/python-videointelligence/commit/57912db4beb081198c8935b31419cf129a5a067d)) + +## [2.8.1](https://github.com/googleapis/python-videointelligence/compare/v2.8.0...v2.8.1) (2022-08-12) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([292b7db](https://github.com/googleapis/python-videointelligence/commit/292b7dbd04fdee40d89879ce0d7b2eaddced6b06)) +* **deps:** require proto-plus >= 1.22.0 ([292b7db](https://github.com/googleapis/python-videointelligence/commit/292b7dbd04fdee40d89879ce0d7b2eaddced6b06)) + +## [2.8.0](https://github.com/googleapis/python-videointelligence/compare/v2.7.1...v2.8.0) (2022-07-16) + + +### Features + +* add audience parameter ([954b308](https://github.com/googleapis/python-videointelligence/commit/954b308a895ba9782025dd9ee07fd991e7e70e63)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#341](https://github.com/googleapis/python-videointelligence/issues/341)) ([00b91f5](https://github.com/googleapis/python-videointelligence/commit/00b91f5eb652f4cdf183f9f69011a529a790f294)) +* require python 3.7+ ([#339](https://github.com/googleapis/python-videointelligence/issues/339)) ([78918dc](https://github.com/googleapis/python-videointelligence/commit/78918dcfa59ee1439ca30fffc0d186d923f39271)) + +## [2.7.1](https://github.com/googleapis/python-videointelligence/compare/v2.7.0...v2.7.1) (2022-06-06) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#326](https://github.com/googleapis/python-videointelligence/issues/326)) ([4773005](https://github.com/googleapis/python-videointelligence/commit/47730057a5d4f13e09e25764def958961d08cad1)) + + +### Documentation + +* fix changelog header to consistent size ([#327](https://github.com/googleapis/python-videointelligence/issues/327)) ([4a5f177](https://github.com/googleapis/python-videointelligence/commit/4a5f177b785994262fbfd6a75dc84bef4d502d01)) + +## [2.7.0](https://github.com/googleapis/python-videointelligence/compare/v2.6.1...v2.7.0) (2022-04-28) + + +### Features + +* field ObjectTrackingAnnotation.segment moved into oneof, added track_id ([#318](https://github.com/googleapis/python-videointelligence/issues/318)) ([f1f88d4](https://github.com/googleapis/python-videointelligence/commit/f1f88d4eef49927d8822f90af154cdc2f582c471)) + +## [2.6.1](https://github.com/googleapis/python-videointelligence/compare/v2.6.0...v2.6.1) (2022-03-05) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#282](https://github.com/googleapis/python-videointelligence/issues/282)) ([5a8dc98](https://github.com/googleapis/python-videointelligence/commit/5a8dc9859bf8aa62ea74e55f9af2a272f4e05486)) +* **deps:** require proto-plus>=1.15.0 ([5a8dc98](https://github.com/googleapis/python-videointelligence/commit/5a8dc9859bf8aa62ea74e55f9af2a272f4e05486)) + +## [2.6.0](https://github.com/googleapis/python-videointelligence/compare/v2.5.1...v2.6.0) (2022-02-11) + + +### Features + +* add api key support ([#268](https://github.com/googleapis/python-videointelligence/issues/268)) ([a35f538](https://github.com/googleapis/python-videointelligence/commit/a35f538ae5595fb15112025e69661f0484317294)) + + +### Bug Fixes + +* resolve DuplicateCredentialArgs error when using credentials_file ([aef5b0c](https://github.com/googleapis/python-videointelligence/commit/aef5b0cf7bf0878be037ebf5b1dd65921d966ff4)) + + +### Documentation + +* add generated snippets ([#273](https://github.com/googleapis/python-videointelligence/issues/273)) ([f0cc364](https://github.com/googleapis/python-videointelligence/commit/f0cc36444044b971b43362915e7d8d0e9bef62bf)) + +## [2.5.1](https://www.github.com/googleapis/python-videointelligence/compare/v2.5.0...v2.5.1) (2021-11-01) + + +### Bug Fixes + +* **deps:** drop packaging dependency ([959836b](https://www.github.com/googleapis/python-videointelligence/commit/959836bf7b4f4b93eaa632d655f5433a150c5ca7)) +* **deps:** require google-api-core >= 1.28.0 ([959836b](https://www.github.com/googleapis/python-videointelligence/commit/959836bf7b4f4b93eaa632d655f5433a150c5ca7)) + + +### Documentation + +* list oneofs in docstring ([959836b](https://www.github.com/googleapis/python-videointelligence/commit/959836bf7b4f4b93eaa632d655f5433a150c5ca7)) + +## [2.5.0](https://www.github.com/googleapis/python-videointelligence/compare/v2.4.0...v2.5.0) (2021-10-18) + + +### Features + +* add support for python 3.10 ([#235](https://www.github.com/googleapis/python-videointelligence/issues/235)) ([225ea0f](https://www.github.com/googleapis/python-videointelligence/commit/225ea0f90cd226d74ccb76229ca4008a9e1d8a23)) + +## [2.4.0](https://www.github.com/googleapis/python-videointelligence/compare/v2.3.3...v2.4.0) (2021-10-07) + + +### Features + +* add context manager support in client ([#229](https://www.github.com/googleapis/python-videointelligence/issues/229)) ([ac75850](https://www.github.com/googleapis/python-videointelligence/commit/ac75850925ac29bd3ad238bebd48cbedfe638942)) + +## [2.3.3](https://www.github.com/googleapis/python-videointelligence/compare/v2.3.2...v2.3.3) (2021-09-24) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([ec86dc6](https://www.github.com/googleapis/python-videointelligence/commit/ec86dc6211a2b5be69c2b74dd65ea4968f89f244)) + +## [2.3.2](https://www.github.com/googleapis/python-videointelligence/compare/v2.3.1...v2.3.2) (2021-07-26) + + +### Bug Fixes + +* enable self signed jwt for grpc ([#193](https://www.github.com/googleapis/python-videointelligence/issues/193)) ([29475ff](https://www.github.com/googleapis/python-videointelligence/commit/29475ff90809473bf23cbc3d284e1a2afdc69e94)) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#190](https://www.github.com/googleapis/python-videointelligence/issues/190)) ([bed1899](https://www.github.com/googleapis/python-videointelligence/commit/bed1899b6f31e4fd29cbdebbe21b0dea587ce483)) + +## [2.3.1](https://www.github.com/googleapis/python-videointelligence/compare/v2.3.0...v2.3.1) (2021-07-20) + + +### Bug Fixes + +* **deps:** pin 'google-{api,cloud}-core', 'google-auth' to allow 2.x versions ([#189](https://www.github.com/googleapis/python-videointelligence/issues/189)) ([2fb8dc9](https://www.github.com/googleapis/python-videointelligence/commit/2fb8dc92478a125eb4871e30fe6840238ac2cfa2)) + +## [2.3.0](https://www.github.com/googleapis/python-videointelligence/compare/v2.2.0...v2.3.0) (2021-07-12) + + +### Features + +* add always_use_jwt_access ([#173](https://www.github.com/googleapis/python-videointelligence/issues/173)) ([3c7fbb0](https://www.github.com/googleapis/python-videointelligence/commit/3c7fbb00a6bddd239d3fdb3a75a485db3dfe041e)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-videointelligence/issues/1127)) ([#163](https://www.github.com/googleapis/python-videointelligence/issues/163)) ([09b0a33](https://www.github.com/googleapis/python-videointelligence/commit/09b0a338c8de99dc547c5dc5e15b42d14210ba18)), closes [#1126](https://www.github.com/googleapis/python-videointelligence/issues/1126) + +## [2.2.0](https://www.github.com/googleapis/python-videointelligence/compare/v2.1.0...v2.2.0) (2021-05-28) + + +### Features + +* support self-signed JWT flow for service accounts ([50da518](https://www.github.com/googleapis/python-videointelligence/commit/50da51820e11cb446a2803eff5895c2eba878adb)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([50da518](https://www.github.com/googleapis/python-videointelligence/commit/50da51820e11cb446a2803eff5895c2eba878adb)) +* **deps:** add packaging requirement ([#154](https://www.github.com/googleapis/python-videointelligence/issues/154)) ([26214ed](https://www.github.com/googleapis/python-videointelligence/commit/26214ed1603599bd3034c615f73224bc4fc5f42d)) + +## [2.1.0](https://www.github.com/googleapis/python-videointelligence/compare/v2.0.0...v2.1.0) (2021-03-31) + + +### Features + +* Introduce tracks and thumbnails fields for FaceDetectionAnnotations ([#90](https://www.github.com/googleapis/python-videointelligence/issues/90)) ([e4bbbad](https://www.github.com/googleapis/python-videointelligence/commit/e4bbbad245df46c226f51fac4d89f5b8bff64d15)) +* updates person detection samples for GA ([#86](https://www.github.com/googleapis/python-videointelligence/issues/86)) ([1a68219](https://www.github.com/googleapis/python-videointelligence/commit/1a68219142ed23c434417808da9fcdca3812280d)) + + +### Bug Fixes + +* remove gRPC send/recv limits; add enums to `__init__.py` ([#94](https://www.github.com/googleapis/python-videointelligence/issues/94)) ([d2dcc14](https://www.github.com/googleapis/python-videointelligence/commit/d2dcc14b1d5b0b1df815aa6fe50007266365462b)) + +## [2.0.0](https://www.github.com/googleapis/python-videointelligence/compare/v1.16.1...v2.0.0) (2020-11-19) + + +### ⚠ BREAKING CHANGES + +* use microgenerator. See [Migration Guide](https://pypi.org/project/google-cloud-logging/2.0.0/). (#76) + +### Features + +* use microgenerator ([#76](https://www.github.com/googleapis/python-videointelligence/issues/76)) + +## [1.16.1](https://www.github.com/googleapis/python-videointelligence/compare/v1.16.0...v1.16.1) (2020-11-18) + + +### Documentation + +* add samples from video/cloud-client/labels ([#49](https://www.github.com/googleapis/python-videointelligence/issues/49)) ([07806d3](https://www.github.com/googleapis/python-videointelligence/commit/07806d3b7e62aa985c994c2f56f787d458beb60d)), closes [#930](https://www.github.com/googleapis/python-videointelligence/issues/930) [#945](https://www.github.com/googleapis/python-videointelligence/issues/945) [#952](https://www.github.com/googleapis/python-videointelligence/issues/952) [#962](https://www.github.com/googleapis/python-videointelligence/issues/962) [#1093](https://www.github.com/googleapis/python-videointelligence/issues/1093) [#1121](https://www.github.com/googleapis/python-videointelligence/issues/1121) [#1123](https://www.github.com/googleapis/python-videointelligence/issues/1123) [#1088](https://www.github.com/googleapis/python-videointelligence/issues/1088) [#1158](https://www.github.com/googleapis/python-videointelligence/issues/1158) [#1160](https://www.github.com/googleapis/python-videointelligence/issues/1160) [#1186](https://www.github.com/googleapis/python-videointelligence/issues/1186) [#1221](https://www.github.com/googleapis/python-videointelligence/issues/1221) [#1254](https://www.github.com/googleapis/python-videointelligence/issues/1254) [#1377](https://www.github.com/googleapis/python-videointelligence/issues/1377) [#1441](https://www.github.com/googleapis/python-videointelligence/issues/1441) [#1464](https://www.github.com/googleapis/python-videointelligence/issues/1464) [#1639](https://www.github.com/googleapis/python-videointelligence/issues/1639) [#1658](https://www.github.com/googleapis/python-videointelligence/issues/1658) [#1743](https://www.github.com/googleapis/python-videointelligence/issues/1743) [#1846](https://www.github.com/googleapis/python-videointelligence/issues/1846) [#1980](https://www.github.com/googleapis/python-videointelligence/issues/1980) [#2162](https://www.github.com/googleapis/python-videointelligence/issues/2162) [#2439](https://www.github.com/googleapis/python-videointelligence/issues/2439) [#2005](https://www.github.com/googleapis/python-videointelligence/issues/2005) [#3169](https://www.github.com/googleapis/python-videointelligence/issues/3169) [#2806](https://www.github.com/googleapis/python-videointelligence/issues/2806) [#4017](https://www.github.com/googleapis/python-videointelligence/issues/4017) [#4041](https://www.github.com/googleapis/python-videointelligence/issues/4041) +* add samples from video/cloud-client/shotchange ([#72](https://www.github.com/googleapis/python-videointelligence/issues/72)) ([d0a03e3](https://www.github.com/googleapis/python-videointelligence/commit/d0a03e3e77ca8f079941969f1245c2064b24ec51)), closes [#930](https://www.github.com/googleapis/python-videointelligence/issues/930) [#933](https://www.github.com/googleapis/python-videointelligence/issues/933) [#945](https://www.github.com/googleapis/python-videointelligence/issues/945) [#952](https://www.github.com/googleapis/python-videointelligence/issues/952) [#962](https://www.github.com/googleapis/python-videointelligence/issues/962) [#958](https://www.github.com/googleapis/python-videointelligence/issues/958) [#968](https://www.github.com/googleapis/python-videointelligence/issues/968) [#1093](https://www.github.com/googleapis/python-videointelligence/issues/1093) [#1121](https://www.github.com/googleapis/python-videointelligence/issues/1121) [#1123](https://www.github.com/googleapis/python-videointelligence/issues/1123) [#1088](https://www.github.com/googleapis/python-videointelligence/issues/1088) [#1158](https://www.github.com/googleapis/python-videointelligence/issues/1158) [#1160](https://www.github.com/googleapis/python-videointelligence/issues/1160) [#1186](https://www.github.com/googleapis/python-videointelligence/issues/1186) [#1221](https://www.github.com/googleapis/python-videointelligence/issues/1221) [#1254](https://www.github.com/googleapis/python-videointelligence/issues/1254) [#1377](https://www.github.com/googleapis/python-videointelligence/issues/1377) [#1441](https://www.github.com/googleapis/python-videointelligence/issues/1441) [#1464](https://www.github.com/googleapis/python-videointelligence/issues/1464) [#1639](https://www.github.com/googleapis/python-videointelligence/issues/1639) [#1658](https://www.github.com/googleapis/python-videointelligence/issues/1658) [#1743](https://www.github.com/googleapis/python-videointelligence/issues/1743) [#1846](https://www.github.com/googleapis/python-videointelligence/issues/1846) [#1871](https://www.github.com/googleapis/python-videointelligence/issues/1871) [#1980](https://www.github.com/googleapis/python-videointelligence/issues/1980) [#2162](https://www.github.com/googleapis/python-videointelligence/issues/2162) [#2439](https://www.github.com/googleapis/python-videointelligence/issues/2439) [#2005](https://www.github.com/googleapis/python-videointelligence/issues/2005) [#3169](https://www.github.com/googleapis/python-videointelligence/issues/3169) [#2806](https://www.github.com/googleapis/python-videointelligence/issues/2806) [#4017](https://www.github.com/googleapis/python-videointelligence/issues/4017) [#4041](https://www.github.com/googleapis/python-videointelligence/issues/4041) + +## [1.16.0](https://www.github.com/googleapis/python-videointelligence/compare/v1.15.0...v1.16.0) (2020-10-02) + + +### Features + +* **v1:** add PersonDetection and FaceDetection ([#53](https://www.github.com/googleapis/python-videointelligence/issues/53)) ([55415a8](https://www.github.com/googleapis/python-videointelligence/commit/55415a81e738badc997e93d60c37b5dbc8b373ea)) +* video speech transcription [([#1849](https://www.github.com/googleapis/python-videointelligence/issues/1849))](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1849) ([0bb8156](https://www.github.com/googleapis/python-videointelligence/commit/0bb8156ddda4fde4bbdda5f48d21fbbc34a2b0e8)) + + +### Documentation + +* corrects release badge and link ([#36](https://www.github.com/googleapis/python-videointelligence/issues/36)) ([20ad69c](https://www.github.com/googleapis/python-videointelligence/commit/20ad69cefb473565d5065e4b118398c675cd5f79)) + +## [1.15.0](https://www.github.com/googleapis/python-videointelligence/compare/v1.14.0...v1.15.0) (2020-06-09) + + +### Features + +* add support for streaming automl action recognition in v1p3beta1; make 'features' a positional param for annotate_video in betas ([#31](https://www.github.com/googleapis/python-videointelligence/issues/31)) ([586f920](https://www.github.com/googleapis/python-videointelligence/commit/586f920a1932e1a813adfed500502fba0ff5edb7)), closes [#517](https://www.github.com/googleapis/python-videointelligence/issues/517) [#538](https://www.github.com/googleapis/python-videointelligence/issues/538) [#565](https://www.github.com/googleapis/python-videointelligence/issues/565) [#576](https://www.github.com/googleapis/python-videointelligence/issues/576) [#506](https://www.github.com/googleapis/python-videointelligence/issues/506) [#586](https://www.github.com/googleapis/python-videointelligence/issues/586) [#585](https://www.github.com/googleapis/python-videointelligence/issues/585) + +## [1.14.0](https://www.github.com/googleapis/python-videointelligence/compare/v1.13.0...v1.14.0) (2020-03-12) + + +### Features + +* add logo recognition to v1 (via synth) ([#15](https://www.github.com/googleapis/python-videointelligence/issues/15)) ([84b1688](https://www.github.com/googleapis/python-videointelligence/commit/84b16887225acbb1d1821310baf10ef52967ce0b)) + +## [1.13.0](https://www.github.com/googleapis/python-videointelligence/compare/v1.12.1...v1.13.0) (2020-02-13) + + +### Features + +* **videointelligence:** add person detection and face detection ([#5](https://www.github.com/googleapis/python-videointelligence/issues/5)) ([6464f30](https://www.github.com/googleapis/python-videointelligence/commit/6464f30d8ca8a090bf26b099a9734391010ce162)) + +## 1.12.1 + +11-14-2019 16:12 PST + +### Implementation Changes +- Revert [#9440](https://github.com/googleapis/google-cloud-python/pull/9440). Make `features` a keyword parameter to `annotate_video`. ([#9810](https://github.com/googleapis/google-cloud-python/pull/9810)) + +## 1.12.0 + +11-08-2019 09:32 PST + + +### Implementation Changes +- Make `features` a positional parameter in `annotate_video`, update retry config, make AnnotateVideo nonidempotent (via synth). ([#9440](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9440)) + +### New Features +- Add celebrity recognition support (via synth). ([#9612](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9612)) +- Drop support for `v1beta1` version of the API. ([#9426](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9426)) + +### Documentation +- Tweak docstrings, client configuration (via synth). ([#9434](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9434)) +- Change requests intersphinx url (via synth). ([#9412](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9412)) +- Fix intersphinx reference to requests. ([#9294](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9085)) +- Remove compatibility badges from READMEs. ([#9035](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9035)) + +### Internal / Testing Changes +- Enrich VPCSC tests. ([#9193](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9193)) + +## 1.11.0 + +08-12-2019 14:00 PDT + +### New Features +- Add segment / shot presence label annotations fields (via synth). ([#8987](https://github.com/googleapis/google-cloud-python/pull/8987)) +- Add V1 video segment / feature fields; remove send/recv msg size limit (via synth). ([#8975](https://github.com/googleapis/google-cloud-python/pull/8975)) + +### Documentation +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + +## 1.10.0 + +07-24-2019 17:52 PDT + + +### Implementation Changes +- Allow kwargs to be passed to create_channel (via synth). ([#8410](https://github.com/googleapis/google-cloud-python/pull/8410)) + +### New Features +- Add 'client_options' support (via synth). ([#8528](https://github.com/googleapis/google-cloud-python/pull/8528)) +- Add support for streaming classification / object tracking (via synth). ([#8427](https://github.com/googleapis/google-cloud-python/pull/8427)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) +- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) + +### Internal / Testing Changes +- Add VPC SC integration tests. ([#8607](https://github.com/googleapis/google-cloud-python/pull/8607)) +- Pin black version (via synth). ([#8601](https://github.com/googleapis/google-cloud-python/pull/8601)) +- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) +- Change test video URI, add disclaimer to auto-generated template files (via synth). ([#8334](https://github.com/googleapis/google-cloud-python/pull/8334)) +- Declare encoding as utf-8 in pb2 files (via synth). ([#8370](https://github.com/googleapis/google-cloud-python/pull/8370)) +- Suppress checking 'cov-fail-under' in nox default session (via synth). ([#8256](https://github.com/googleapis/google-cloud-python/pull/8256)) + +## 1.9.0 + +06-05-2019 10:42 PDT + + +### Implementation Changes +- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) + +### New Features +- Add v1 object tracking support, v1p3b1 speech transcription / logo recognition support (via synth). ([#8221](https://github.com/googleapis/google-cloud-python/pull/8221)) + +### Documentation +- Change input_uri for sample video (via synth). ([#7944](https://github.com/googleapis/google-cloud-python/pull/7944)) +- Fix uri to video (via synth). ([#7862](https://github.com/googleapis/google-cloud-python/pull/7862)) + +### Internal / Testing Changes +- Fix coverage in 'types.py' (via synth). ([#8169](https://github.com/googleapis/google-cloud-python/pull/8169)) +- Blacken noxfile.py, setup.py (via synth). ([#8136](https://github.com/googleapis/google-cloud-python/pull/8136)) +- Harden synth replacement against template changes. ([#8104](https://github.com/googleapis/google-cloud-python/pull/8104)) +- Update noxfile (via synth). ([#7838](https://github.com/googleapis/google-cloud-python/pull/7838)) +- Add nox session `docs` (via synth). ([#7786](https://github.com/googleapis/google-cloud-python/pull/7786)) +- Update docs build configuration. ([#7603](https://github.com/googleapis/google-cloud-python/pull/7603)) + +## 1.8.0 + +03-06-2019 12:20 PST + +### New Features +- Add videointelligence v1p3beta1 (Streaming API Support). ([#7490](https://github.com/googleapis/google-cloud-python/pull/7490)) + +### Internal / Testing Changes +- Copy lintified proto files (via synth). ([#7472](https://github.com/googleapis/google-cloud-python/pull/7472)) + +## 1.7.0 + +02-25-2019 12:25 PST + + +### Implementation Changes +- Remove unused message exports. ([#7279](https://github.com/googleapis/google-cloud-python/pull/7279)) +- Protoc-generated serialization update. ([#7099](https://github.com/googleapis/google-cloud-python/pull/7099)) + +### New Features +- Add text detection / object tracking feature support (via sync). ([#7415](https://github.com/googleapis/google-cloud-python/pull/7415)) + +### Documentation +- Updated client library documentation URLs. ([#7307](https://github.com/googleapis/google-cloud-python/pull/7307)) +- Update copyright headers. +- Pick up stub docstring fix in GAPIC generator. ([#6986](https://github.com/googleapis/google-cloud-python/pull/6986)) + +### Internal / Testing Changes +- Add clarifying comment to blacken nox target. ([#7407](https://github.com/googleapis/google-cloud-python/pull/7407)) +- Copy proto files alongside protoc versions. +- Add protos as an artifact to library. ([#7205](https://github.com/googleapis/google-cloud-python/pull/7205)) + +## 1.6.1 + +12-17-2018 17:09 PST + +### Implementation Changes +- Import `iam.policy` from `google.api_core`. ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) +- Fixes to GAPIC generator. ([#6578](https://github.com/googleapis/google-cloud-python/pull/6578)) + +### Documentation +- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) + +### Internal / Testing Changes +- Add baseline for synth.metadata +- Update noxfile. +- Blacken all gen'd libs ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) +- Omit local deps ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) +- Run black at end of synth.py ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) +- Run Black on Generated libraries ([#6666](https://github.com/googleapis/google-cloud-python/pull/6666)) +- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) + +## 1.6.0 + +11-09-2018 13:36 PST + + +### Implementation Changes +- Add support for speech transcription. ([#6313](https://github.com/googleapis/google-cloud-python/pull/6313)) +- Fix client_info bug, update docstrings and timeouts. ([#6425](https://github.com/googleapis/google-cloud-python/pull/6425)) + +### Dependencies +- Bump minimum 'api_core' version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/googleapis/google-cloud-python/pull/6391)) +- Avoid broken 'google-common-apis 1.5.4' release. ([#6355](https://github.com/googleapis/google-cloud-python/pull/6355)) + +### Documentation +- normalize use of support level badges.([#6159](https://github.com/googleapis/google-cloud-python/pull/6159)) + +### Internal / Testing Changes +- Add 'mock' to unit test dependencies for autogen libs. ([#6402](https://github.com/googleapis/google-cloud-python/pull/6402)) + +## 1.5.0 + +### New Features +- Regenerate v2p2beta1 to add Object Tracking and Text Detection Beta ([#6225](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6225)) + +### Documentation +- Harmonize / DRY 'README.rst' / 'docs/index.rst'. ([#6002](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6002)) +- Correct text for the pip install command ([#6198](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6198)) + +### Internal / Testing Changes +- Use new Nox ([#6175](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6175)) + +## 1.4.0 + +### New Features +- Add support for 'v1p2beta1' API version ([#6004](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6004)) + +### Implementation Changes +- Re-generate library using videointelligence/synth.py ([#5982](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5982)) +- Re-generate library using videointelligence/synth.py ([#5954](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5954)) + +## 1.3.0 + +### Implementation Changes +- Avoid overwriting '__module__' of messages from shared modules. (#5364) + +### New Features +- Regenerate Video Intelligence v1p1beta1 endpoint to add new features (#5617) + +### Internal / Testing Changes +- Add Test runs for Python 3.7 and remove 3.4 (#5295) + +## 1.2.0 + +### New Features + +- Add v1p1beta1 version of videointelligence (#5165) + +### Internal / Testing Changes + +- Fix v1p1beta1 unit tests (#5064) + +## 1.1.0 + +### Interface additions + +- Added video v1p1beta1 (#5048) + +## 1.0.1 + +### Dependencies + +- Update dependency range for api-core to include v1.0.0 releases (#4944) + +### Testing and internal changes + +- Normalize all setup.py files (#4909) + +## 1.0.0 + +[![release level](https://img.shields.io/badge/release%20level-general%20availability%20%28GA%29-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) + +### Features + +#### General Availability + +The `google-cloud-videointelligence` package is now supported at the +**general availability** quality level. This means it is stable; the code +and API surface will not change in backwards-incompatible ways unless +absolutely necessary (e.g. because of critical security issues) or with an +extensive deprecation period. + +One exception to this: We will remove beta endpoints (as a semver-minor update) +at whatever point the underlying endpoints go away. + +#### v1 endpoint + +The underlying video intelligence API has also gone general availability, and +this library by default now uses the `v1` endpoint (rather than `v1beta2`) +unless you explicitly used something else. This is a backwards compatible +change as the `v1` and `v1beta2` endpoints are identical. If you pinned to +`v1beta2`, you are encouraged to move to `v1`. + +## 0.28.0 + +### Documentation + +- Added link to "Python Development Environment Setup Guide" in + project README (#4187, h/t to @michaelawyu) + +### Dependencies + +- Upgrading to `google-cloud-core >= 0.28.0` and adding dependency + on `google-api-core` (#4221, #4280) +- Deferring to `google-api-core` for `grpcio` and + `googleapis-common-protos`dependencies (#4096, #4098) + +### Packaging + +- Change "Development Status" in package metadata from `3 - Alpha` + to `4 - Beta` (eb43849569556c6e47f11b8310864c5a280507f2) + +PyPI: https://pypi.org/project/google-cloud-videointelligence/0.28.0/ diff --git a/packages/google-cloud-videointelligence/CODE_OF_CONDUCT.md b/packages/google-cloud-videointelligence/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-videointelligence/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-videointelligence/CONTRIBUTING.rst b/packages/google-cloud-videointelligence/CONTRIBUTING.rst new file mode 100644 index 000000000000..d5145c017d08 --- /dev/null +++ b/packages/google-cloud-videointelligence/CONTRIBUTING.rst @@ -0,0 +1,269 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.11 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.11 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-videointelligence + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-videointelligence/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-videointelligence/LICENSE b/packages/google-cloud-videointelligence/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-videointelligence/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-videointelligence/MANIFEST.in b/packages/google-cloud-videointelligence/MANIFEST.in new file mode 100644 index 000000000000..e0a66705318e --- /dev/null +++ b/packages/google-cloud-videointelligence/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-videointelligence/README.rst b/packages/google-cloud-videointelligence/README.rst new file mode 100644 index 000000000000..8d02bb3bf5c2 --- /dev/null +++ b/packages/google-cloud-videointelligence/README.rst @@ -0,0 +1,108 @@ +Python Client for Video Intelligence +==================================== + +|stable| |pypi| |versions| + +`Video Intelligence`_: makes videos searchable, and discoverable, by extracting metadata with an easy to use API. You can now search every moment of every video file in your catalog and find every occurrence as well as its significance. It quickly annotates videos stored in Google Cloud Storage, and helps you identify key nouns entities of your video, and when they occur within the video. Separate signal from noise, by retrieving relevant information at the video, shot or per frame. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-videointelligence.svg + :target: https://pypi.org/project/google-cloud-videointelligence/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-videointelligence.svg + :target: https://pypi.org/project/google-cloud-videointelligence/ +.. _Video Intelligence: https://cloud.google.com/video-intelligence/docs/ +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/videointelligence/latest +.. _Product Documentation: https://cloud.google.com/video-intelligence/docs/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Video Intelligence.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Video Intelligence.: https://cloud.google.com/video-intelligence/docs/ +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-videointelligence/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-videointelligence + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-videointelligence + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Video Intelligence + to see other available methods on the client. +- Read the `Video Intelligence Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Video Intelligence Product documentation: https://cloud.google.com/video-intelligence/docs/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-videointelligence/SECURITY.md b/packages/google-cloud-videointelligence/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-videointelligence/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-videointelligence/docs/CHANGELOG.md b/packages/google-cloud-videointelligence/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-videointelligence/docs/README.rst b/packages/google-cloud-videointelligence/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-videointelligence/docs/_static/custom.css b/packages/google-cloud-videointelligence/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-videointelligence/docs/_templates/layout.html b/packages/google-cloud-videointelligence/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-videointelligence/docs/conf.py b/packages/google-cloud-videointelligence/docs/conf.py new file mode 100644 index 000000000000..7ea5eeb73fe2 --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-videointelligence documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-videointelligence" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-videointelligence", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-videointelligence-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-videointelligence.tex", + "google-cloud-videointelligence Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-videointelligence", + "google-cloud-videointelligence Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-videointelligence", + "google-cloud-videointelligence Documentation", + author, + "google-cloud-videointelligence", + "google-cloud-videointelligence Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-videointelligence/docs/index.rst b/packages/google-cloud-videointelligence/docs/index.rst new file mode 100644 index 000000000000..69b1ba6ff4c6 --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/index.rst @@ -0,0 +1,58 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +This package includes clients for multiple versions of Video Intelligence. +By default, you will get version ``videointelligence_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + videointelligence_v1/services + videointelligence_v1/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + videointelligence_v1beta2/services + videointelligence_v1beta2/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + videointelligence_v1p1beta1/services + videointelligence_v1p1beta1/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + videointelligence_v1p2beta1/services + videointelligence_v1p2beta1/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + videointelligence_v1p3beta1/services + videointelligence_v1p3beta1/types + + +Changelog +--------- + +For a list of all ``google-cloud-videointelligence`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-videointelligence/docs/multiprocessing.rst b/packages/google-cloud-videointelligence/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1/services.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1/services.rst new file mode 100644 index 000000000000..ed2a27c6650b --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Videointelligence v1 API +================================================== +.. toctree:: + :maxdepth: 2 + + video_intelligence_service diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1/types.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1/types.rst new file mode 100644 index 000000000000..b5d39277f4fd --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Videointelligence v1 API +=============================================== + +.. automodule:: google.cloud.videointelligence_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1/video_intelligence_service.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1/video_intelligence_service.rst new file mode 100644 index 000000000000..d8d38ca3239c --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1/video_intelligence_service.rst @@ -0,0 +1,6 @@ +VideoIntelligenceService +------------------------------------------ + +.. automodule:: google.cloud.videointelligence_v1.services.video_intelligence_service + :members: + :inherited-members: diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1beta2/services.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1beta2/services.rst new file mode 100644 index 000000000000..c8f45a1e0e37 --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1beta2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Videointelligence v1beta2 API +======================================================= +.. toctree:: + :maxdepth: 2 + + video_intelligence_service diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1beta2/types.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1beta2/types.rst new file mode 100644 index 000000000000..47f60c25e451 --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1beta2/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Videointelligence v1beta2 API +==================================================== + +.. automodule:: google.cloud.videointelligence_v1beta2.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1beta2/video_intelligence_service.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1beta2/video_intelligence_service.rst new file mode 100644 index 000000000000..ab6c31d31e6d --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1beta2/video_intelligence_service.rst @@ -0,0 +1,6 @@ +VideoIntelligenceService +------------------------------------------ + +.. automodule:: google.cloud.videointelligence_v1beta2.services.video_intelligence_service + :members: + :inherited-members: diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1p1beta1/services.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1p1beta1/services.rst new file mode 100644 index 000000000000..a26a19fe3861 --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1p1beta1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Videointelligence v1p1beta1 API +========================================================= +.. toctree:: + :maxdepth: 2 + + video_intelligence_service diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1p1beta1/types.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1p1beta1/types.rst new file mode 100644 index 000000000000..e9c681c58510 --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1p1beta1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Videointelligence v1p1beta1 API +====================================================== + +.. automodule:: google.cloud.videointelligence_v1p1beta1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1p1beta1/video_intelligence_service.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1p1beta1/video_intelligence_service.rst new file mode 100644 index 000000000000..423a835c5b25 --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1p1beta1/video_intelligence_service.rst @@ -0,0 +1,6 @@ +VideoIntelligenceService +------------------------------------------ + +.. automodule:: google.cloud.videointelligence_v1p1beta1.services.video_intelligence_service + :members: + :inherited-members: diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1p2beta1/services.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1p2beta1/services.rst new file mode 100644 index 000000000000..e3029604fd1b --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1p2beta1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Videointelligence v1p2beta1 API +========================================================= +.. toctree:: + :maxdepth: 2 + + video_intelligence_service diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1p2beta1/types.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1p2beta1/types.rst new file mode 100644 index 000000000000..30c5462f5f14 --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1p2beta1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Videointelligence v1p2beta1 API +====================================================== + +.. automodule:: google.cloud.videointelligence_v1p2beta1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1p2beta1/video_intelligence_service.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1p2beta1/video_intelligence_service.rst new file mode 100644 index 000000000000..804a5442a150 --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1p2beta1/video_intelligence_service.rst @@ -0,0 +1,6 @@ +VideoIntelligenceService +------------------------------------------ + +.. automodule:: google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service + :members: + :inherited-members: diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1p3beta1/services.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1p3beta1/services.rst new file mode 100644 index 000000000000..853cb921e507 --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1p3beta1/services.rst @@ -0,0 +1,7 @@ +Services for Google Cloud Videointelligence v1p3beta1 API +========================================================= +.. toctree:: + :maxdepth: 2 + + streaming_video_intelligence_service + video_intelligence_service diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1p3beta1/streaming_video_intelligence_service.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1p3beta1/streaming_video_intelligence_service.rst new file mode 100644 index 000000000000..4de59243fb0c --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1p3beta1/streaming_video_intelligence_service.rst @@ -0,0 +1,6 @@ +StreamingVideoIntelligenceService +--------------------------------------------------- + +.. automodule:: google.cloud.videointelligence_v1p3beta1.services.streaming_video_intelligence_service + :members: + :inherited-members: diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1p3beta1/types.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1p3beta1/types.rst new file mode 100644 index 000000000000..4305d5882042 --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1p3beta1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Videointelligence v1p3beta1 API +====================================================== + +.. automodule:: google.cloud.videointelligence_v1p3beta1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-videointelligence/docs/videointelligence_v1p3beta1/video_intelligence_service.rst b/packages/google-cloud-videointelligence/docs/videointelligence_v1p3beta1/video_intelligence_service.rst new file mode 100644 index 000000000000..fd06a1996f49 --- /dev/null +++ b/packages/google-cloud-videointelligence/docs/videointelligence_v1p3beta1/video_intelligence_service.rst @@ -0,0 +1,6 @@ +VideoIntelligenceService +------------------------------------------ + +.. automodule:: google.cloud.videointelligence_v1p3beta1.services.video_intelligence_service + :members: + :inherited-members: diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence/__init__.py new file mode 100644 index 000000000000..2a3eaecff7ef --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence/__init__.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.videointelligence import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.videointelligence_v1.services.video_intelligence_service.async_client import ( + VideoIntelligenceServiceAsyncClient, +) +from google.cloud.videointelligence_v1.services.video_intelligence_service.client import ( + VideoIntelligenceServiceClient, +) +from google.cloud.videointelligence_v1.types.video_intelligence import ( + AnnotateVideoProgress, + AnnotateVideoRequest, + AnnotateVideoResponse, + DetectedAttribute, + DetectedLandmark, + Entity, + ExplicitContentAnnotation, + ExplicitContentDetectionConfig, + ExplicitContentFrame, + FaceAnnotation, + FaceDetectionAnnotation, + FaceDetectionConfig, + FaceFrame, + FaceSegment, + Feature, + LabelAnnotation, + LabelDetectionConfig, + LabelDetectionMode, + LabelFrame, + LabelSegment, + Likelihood, + LogoRecognitionAnnotation, + NormalizedBoundingBox, + NormalizedBoundingPoly, + NormalizedVertex, + ObjectTrackingAnnotation, + ObjectTrackingConfig, + ObjectTrackingFrame, + PersonDetectionAnnotation, + PersonDetectionConfig, + ShotChangeDetectionConfig, + SpeechContext, + SpeechRecognitionAlternative, + SpeechTranscription, + SpeechTranscriptionConfig, + TextAnnotation, + TextDetectionConfig, + TextFrame, + TextSegment, + TimestampedObject, + Track, + VideoAnnotationProgress, + VideoAnnotationResults, + VideoContext, + VideoSegment, + WordInfo, +) + +__all__ = ( + "VideoIntelligenceServiceClient", + "VideoIntelligenceServiceAsyncClient", + "AnnotateVideoProgress", + "AnnotateVideoRequest", + "AnnotateVideoResponse", + "DetectedAttribute", + "DetectedLandmark", + "Entity", + "ExplicitContentAnnotation", + "ExplicitContentDetectionConfig", + "ExplicitContentFrame", + "FaceAnnotation", + "FaceDetectionAnnotation", + "FaceDetectionConfig", + "FaceFrame", + "FaceSegment", + "LabelAnnotation", + "LabelDetectionConfig", + "LabelFrame", + "LabelSegment", + "LogoRecognitionAnnotation", + "NormalizedBoundingBox", + "NormalizedBoundingPoly", + "NormalizedVertex", + "ObjectTrackingAnnotation", + "ObjectTrackingConfig", + "ObjectTrackingFrame", + "PersonDetectionAnnotation", + "PersonDetectionConfig", + "ShotChangeDetectionConfig", + "SpeechContext", + "SpeechRecognitionAlternative", + "SpeechTranscription", + "SpeechTranscriptionConfig", + "TextAnnotation", + "TextDetectionConfig", + "TextFrame", + "TextSegment", + "TimestampedObject", + "Track", + "VideoAnnotationProgress", + "VideoAnnotationResults", + "VideoContext", + "VideoSegment", + "WordInfo", + "Feature", + "LabelDetectionMode", + "Likelihood", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence/gapic_version.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence/gapic_version.py new file mode 100644 index 000000000000..84f00fd3f92c --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.11.4" # {x-release-please-version} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence/py.typed b/packages/google-cloud-videointelligence/google/cloud/videointelligence/py.typed new file mode 100644 index 000000000000..e7fb166bf3e5 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-videointelligence package uses inline types. diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/__init__.py new file mode 100644 index 000000000000..52429b4cdbc6 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/__init__.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.videointelligence_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.video_intelligence_service import ( + VideoIntelligenceServiceAsyncClient, + VideoIntelligenceServiceClient, +) +from .types.video_intelligence import ( + AnnotateVideoProgress, + AnnotateVideoRequest, + AnnotateVideoResponse, + DetectedAttribute, + DetectedLandmark, + Entity, + ExplicitContentAnnotation, + ExplicitContentDetectionConfig, + ExplicitContentFrame, + FaceAnnotation, + FaceDetectionAnnotation, + FaceDetectionConfig, + FaceFrame, + FaceSegment, + Feature, + LabelAnnotation, + LabelDetectionConfig, + LabelDetectionMode, + LabelFrame, + LabelSegment, + Likelihood, + LogoRecognitionAnnotation, + NormalizedBoundingBox, + NormalizedBoundingPoly, + NormalizedVertex, + ObjectTrackingAnnotation, + ObjectTrackingConfig, + ObjectTrackingFrame, + PersonDetectionAnnotation, + PersonDetectionConfig, + ShotChangeDetectionConfig, + SpeechContext, + SpeechRecognitionAlternative, + SpeechTranscription, + SpeechTranscriptionConfig, + TextAnnotation, + TextDetectionConfig, + TextFrame, + TextSegment, + TimestampedObject, + Track, + VideoAnnotationProgress, + VideoAnnotationResults, + VideoContext, + VideoSegment, + WordInfo, +) + +__all__ = ( + "VideoIntelligenceServiceAsyncClient", + "AnnotateVideoProgress", + "AnnotateVideoRequest", + "AnnotateVideoResponse", + "DetectedAttribute", + "DetectedLandmark", + "Entity", + "ExplicitContentAnnotation", + "ExplicitContentDetectionConfig", + "ExplicitContentFrame", + "FaceAnnotation", + "FaceDetectionAnnotation", + "FaceDetectionConfig", + "FaceFrame", + "FaceSegment", + "Feature", + "LabelAnnotation", + "LabelDetectionConfig", + "LabelDetectionMode", + "LabelFrame", + "LabelSegment", + "Likelihood", + "LogoRecognitionAnnotation", + "NormalizedBoundingBox", + "NormalizedBoundingPoly", + "NormalizedVertex", + "ObjectTrackingAnnotation", + "ObjectTrackingConfig", + "ObjectTrackingFrame", + "PersonDetectionAnnotation", + "PersonDetectionConfig", + "ShotChangeDetectionConfig", + "SpeechContext", + "SpeechRecognitionAlternative", + "SpeechTranscription", + "SpeechTranscriptionConfig", + "TextAnnotation", + "TextDetectionConfig", + "TextFrame", + "TextSegment", + "TimestampedObject", + "Track", + "VideoAnnotationProgress", + "VideoAnnotationResults", + "VideoContext", + "VideoIntelligenceServiceClient", + "VideoSegment", + "WordInfo", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/gapic_metadata.json b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/gapic_metadata.json new file mode 100644 index 000000000000..5bcd1cc2227d --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/gapic_metadata.json @@ -0,0 +1,43 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.videointelligence_v1", + "protoPackage": "google.cloud.videointelligence.v1", + "schema": "1.0", + "services": { + "VideoIntelligenceService": { + "clients": { + "grpc": { + "libraryClient": "VideoIntelligenceServiceClient", + "rpcs": { + "AnnotateVideo": { + "methods": [ + "annotate_video" + ] + } + } + }, + "grpc-async": { + "libraryClient": "VideoIntelligenceServiceAsyncClient", + "rpcs": { + "AnnotateVideo": { + "methods": [ + "annotate_video" + ] + } + } + }, + "rest": { + "libraryClient": "VideoIntelligenceServiceClient", + "rpcs": { + "AnnotateVideo": { + "methods": [ + "annotate_video" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/gapic_version.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/gapic_version.py new file mode 100644 index 000000000000..84f00fd3f92c --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.11.4" # {x-release-please-version} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/py.typed b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/py.typed new file mode 100644 index 000000000000..e7fb166bf3e5 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-videointelligence package uses inline types. diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/__init__.py new file mode 100644 index 000000000000..9bfaa3bf2e62 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import VideoIntelligenceServiceAsyncClient +from .client import VideoIntelligenceServiceClient + +__all__ = ( + "VideoIntelligenceServiceClient", + "VideoIntelligenceServiceAsyncClient", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/async_client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/async_client.py new file mode 100644 index 000000000000..95ee9677f2bf --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/async_client.py @@ -0,0 +1,380 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore + +from google.cloud.videointelligence_v1.types import video_intelligence + +from .client import VideoIntelligenceServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .transports.grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport + + +class VideoIntelligenceServiceAsyncClient: + """Service that implements the Video Intelligence API.""" + + _client: VideoIntelligenceServiceClient + + DEFAULT_ENDPOINT = VideoIntelligenceServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = VideoIntelligenceServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + VideoIntelligenceServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(VideoIntelligenceServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + VideoIntelligenceServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + VideoIntelligenceServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + VideoIntelligenceServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceAsyncClient: The constructed client. + """ + return VideoIntelligenceServiceClient.from_service_account_info.__func__(VideoIntelligenceServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceAsyncClient: The constructed client. + """ + return VideoIntelligenceServiceClient.from_service_account_file.__func__(VideoIntelligenceServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return VideoIntelligenceServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> VideoIntelligenceServiceTransport: + """Returns the transport used by the client instance. + + Returns: + VideoIntelligenceServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(VideoIntelligenceServiceClient).get_transport_class, + type(VideoIntelligenceServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, VideoIntelligenceServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the video intelligence service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.VideoIntelligenceServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = VideoIntelligenceServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def annotate_video( + self, + request: Optional[Union[video_intelligence.AnnotateVideoRequest, dict]] = None, + *, + input_uri: Optional[str] = None, + features: Optional[MutableSequence[video_intelligence.Feature]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import videointelligence_v1 + + async def sample_annotate_video(): + # Create a client + client = videointelligence_v1.VideoIntelligenceServiceAsyncClient() + + # Initialize request argument(s) + request = videointelligence_v1.AnnotateVideoRequest( + features=['PERSON_DETECTION'], + ) + + # Make the request + operation = client.annotate_video(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.videointelligence_v1.types.AnnotateVideoRequest, dict]]): + The request object. Video annotation request. + input_uri (:class:`str`): + Input video location. Currently, only `Cloud + Storage `__ URIs are + supported. URIs must be specified in the following + format: ``gs://bucket-id/object-id`` (other URI formats + return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + To identify multiple videos, a video URI may include + wildcards in the ``object-id``. Supported wildcards: '*' + to match 0 or more characters; '?' to match 1 character. + If unset, the input video should be embedded in the + request as ``input_content``. If set, ``input_content`` + must be unset. + + This corresponds to the ``input_uri`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (:class:`MutableSequence[google.cloud.videointelligence_v1.types.Feature]`): + Required. Requested video annotation + features. + + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.videointelligence_v1.types.AnnotateVideoResponse` Video annotation response. Included in the response + field of the Operation returned by the GetOperation + call of the google::longrunning::Operations service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([input_uri, features]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = video_intelligence.AnnotateVideoRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if input_uri is not None: + request.input_uri = input_uri + if features: + request.features.extend(features) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.annotate_video, + default_retry=retries.Retry( + initial=1.0, + maximum=120.0, + multiplier=2.5, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + video_intelligence.AnnotateVideoResponse, + metadata_type=video_intelligence.AnnotateVideoProgress, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "VideoIntelligenceServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("VideoIntelligenceServiceAsyncClient",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/client.py new file mode 100644 index 000000000000..a32657aa447a --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/client.py @@ -0,0 +1,581 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore + +from google.cloud.videointelligence_v1.types import video_intelligence + +from .transports.base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .transports.grpc import VideoIntelligenceServiceGrpcTransport +from .transports.grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport +from .transports.rest import VideoIntelligenceServiceRestTransport + + +class VideoIntelligenceServiceClientMeta(type): + """Metaclass for the VideoIntelligenceService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[VideoIntelligenceServiceTransport]] + _transport_registry["grpc"] = VideoIntelligenceServiceGrpcTransport + _transport_registry["grpc_asyncio"] = VideoIntelligenceServiceGrpcAsyncIOTransport + _transport_registry["rest"] = VideoIntelligenceServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[VideoIntelligenceServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class VideoIntelligenceServiceClient(metaclass=VideoIntelligenceServiceClientMeta): + """Service that implements the Video Intelligence API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "videointelligence.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> VideoIntelligenceServiceTransport: + """Returns the transport used by the client instance. + + Returns: + VideoIntelligenceServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, VideoIntelligenceServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the video intelligence service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, VideoIntelligenceServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, VideoIntelligenceServiceTransport): + # transport is a VideoIntelligenceServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def annotate_video( + self, + request: Optional[Union[video_intelligence.AnnotateVideoRequest, dict]] = None, + *, + input_uri: Optional[str] = None, + features: Optional[MutableSequence[video_intelligence.Feature]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import videointelligence_v1 + + def sample_annotate_video(): + # Create a client + client = videointelligence_v1.VideoIntelligenceServiceClient() + + # Initialize request argument(s) + request = videointelligence_v1.AnnotateVideoRequest( + features=['PERSON_DETECTION'], + ) + + # Make the request + operation = client.annotate_video(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.videointelligence_v1.types.AnnotateVideoRequest, dict]): + The request object. Video annotation request. + input_uri (str): + Input video location. Currently, only `Cloud + Storage `__ URIs are + supported. URIs must be specified in the following + format: ``gs://bucket-id/object-id`` (other URI formats + return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + To identify multiple videos, a video URI may include + wildcards in the ``object-id``. Supported wildcards: '*' + to match 0 or more characters; '?' to match 1 character. + If unset, the input video should be embedded in the + request as ``input_content``. If set, ``input_content`` + must be unset. + + This corresponds to the ``input_uri`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (MutableSequence[google.cloud.videointelligence_v1.types.Feature]): + Required. Requested video annotation + features. + + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.videointelligence_v1.types.AnnotateVideoResponse` Video annotation response. Included in the response + field of the Operation returned by the GetOperation + call of the google::longrunning::Operations service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([input_uri, features]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a video_intelligence.AnnotateVideoRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, video_intelligence.AnnotateVideoRequest): + request = video_intelligence.AnnotateVideoRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if input_uri is not None: + request.input_uri = input_uri + if features is not None: + request.features = features + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.annotate_video] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + video_intelligence.AnnotateVideoResponse, + metadata_type=video_intelligence.AnnotateVideoProgress, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "VideoIntelligenceServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("VideoIntelligenceServiceClient",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/__init__.py new file mode 100644 index 000000000000..775ef9e3503a --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import VideoIntelligenceServiceTransport +from .grpc import VideoIntelligenceServiceGrpcTransport +from .grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport +from .rest import ( + VideoIntelligenceServiceRestInterceptor, + VideoIntelligenceServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[VideoIntelligenceServiceTransport]] +_transport_registry["grpc"] = VideoIntelligenceServiceGrpcTransport +_transport_registry["grpc_asyncio"] = VideoIntelligenceServiceGrpcAsyncIOTransport +_transport_registry["rest"] = VideoIntelligenceServiceRestTransport + +__all__ = ( + "VideoIntelligenceServiceTransport", + "VideoIntelligenceServiceGrpcTransport", + "VideoIntelligenceServiceGrpcAsyncIOTransport", + "VideoIntelligenceServiceRestTransport", + "VideoIntelligenceServiceRestInterceptor", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/base.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/base.py new file mode 100644 index 000000000000..736070726f4a --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/base.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1 import gapic_version as package_version +from google.cloud.videointelligence_v1.types import video_intelligence + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class VideoIntelligenceServiceTransport(abc.ABC): + """Abstract transport class for VideoIntelligenceService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "videointelligence.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.annotate_video: gapic_v1.method.wrap_method( + self.annotate_video, + default_retry=retries.Retry( + initial=1.0, + maximum=120.0, + multiplier=2.5, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def annotate_video( + self, + ) -> Callable[ + [video_intelligence.AnnotateVideoRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("VideoIntelligenceServiceTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/grpc.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/grpc.py new file mode 100644 index 000000000000..9d04915efd6a --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/grpc.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.videointelligence_v1.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport + + +class VideoIntelligenceServiceGrpcTransport(VideoIntelligenceServiceTransport): + """gRPC backend transport for VideoIntelligenceService. + + Service that implements the Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def annotate_video( + self, + ) -> Callable[[video_intelligence.AnnotateVideoRequest], operations_pb2.Operation]: + r"""Return a callable for the annotate video method over gRPC. + + Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + Returns: + Callable[[~.AnnotateVideoRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "annotate_video" not in self._stubs: + self._stubs["annotate_video"] = self.grpc_channel.unary_unary( + "/google.cloud.videointelligence.v1.VideoIntelligenceService/AnnotateVideo", + request_serializer=video_intelligence.AnnotateVideoRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["annotate_video"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("VideoIntelligenceServiceGrpcTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/grpc_asyncio.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..d2359471d404 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/grpc_asyncio.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.videointelligence_v1.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .grpc import VideoIntelligenceServiceGrpcTransport + + +class VideoIntelligenceServiceGrpcAsyncIOTransport(VideoIntelligenceServiceTransport): + """gRPC AsyncIO backend transport for VideoIntelligenceService. + + Service that implements the Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def annotate_video( + self, + ) -> Callable[ + [video_intelligence.AnnotateVideoRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the annotate video method over gRPC. + + Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + Returns: + Callable[[~.AnnotateVideoRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "annotate_video" not in self._stubs: + self._stubs["annotate_video"] = self.grpc_channel.unary_unary( + "/google.cloud.videointelligence.v1.VideoIntelligenceService/AnnotateVideo", + request_serializer=video_intelligence.AnnotateVideoRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["annotate_video"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("VideoIntelligenceServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/rest.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/rest.py new file mode 100644 index 000000000000..414855eb8286 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/services/video_intelligence_service/transports/rest.py @@ -0,0 +1,386 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.videointelligence_v1.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import VideoIntelligenceServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class VideoIntelligenceServiceRestInterceptor: + """Interceptor for VideoIntelligenceService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the VideoIntelligenceServiceRestTransport. + + .. code-block:: python + class MyCustomVideoIntelligenceServiceInterceptor(VideoIntelligenceServiceRestInterceptor): + def pre_annotate_video(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_annotate_video(self, response): + logging.log(f"Received response: {response}") + return response + + transport = VideoIntelligenceServiceRestTransport(interceptor=MyCustomVideoIntelligenceServiceInterceptor()) + client = VideoIntelligenceServiceClient(transport=transport) + + + """ + + def pre_annotate_video( + self, + request: video_intelligence.AnnotateVideoRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[video_intelligence.AnnotateVideoRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for annotate_video + + Override in a subclass to manipulate the request or metadata + before they are sent to the VideoIntelligenceService server. + """ + return request, metadata + + def post_annotate_video( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for annotate_video + + Override in a subclass to manipulate the response + after it is returned by the VideoIntelligenceService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class VideoIntelligenceServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: VideoIntelligenceServiceRestInterceptor + + +class VideoIntelligenceServiceRestTransport(VideoIntelligenceServiceTransport): + """REST backend transport for VideoIntelligenceService. + + Service that implements the Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[VideoIntelligenceServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or VideoIntelligenceServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/operations/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/operations/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/operations/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _AnnotateVideo(VideoIntelligenceServiceRestStub): + def __hash__(self): + return hash("AnnotateVideo") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: video_intelligence.AnnotateVideoRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the annotate video method over HTTP. + + Args: + request (~.video_intelligence.AnnotateVideoRequest): + The request object. Video annotation request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/videos:annotate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_annotate_video(request, metadata) + pb_request = video_intelligence.AnnotateVideoRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_annotate_video(resp) + return resp + + @property + def annotate_video( + self, + ) -> Callable[[video_intelligence.AnnotateVideoRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnnotateVideo(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("VideoIntelligenceServiceRestTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/types/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/types/__init__.py new file mode 100644 index 000000000000..5d758fcd5ca5 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/types/__init__.py @@ -0,0 +1,112 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .video_intelligence import ( + AnnotateVideoProgress, + AnnotateVideoRequest, + AnnotateVideoResponse, + DetectedAttribute, + DetectedLandmark, + Entity, + ExplicitContentAnnotation, + ExplicitContentDetectionConfig, + ExplicitContentFrame, + FaceAnnotation, + FaceDetectionAnnotation, + FaceDetectionConfig, + FaceFrame, + FaceSegment, + Feature, + LabelAnnotation, + LabelDetectionConfig, + LabelDetectionMode, + LabelFrame, + LabelSegment, + Likelihood, + LogoRecognitionAnnotation, + NormalizedBoundingBox, + NormalizedBoundingPoly, + NormalizedVertex, + ObjectTrackingAnnotation, + ObjectTrackingConfig, + ObjectTrackingFrame, + PersonDetectionAnnotation, + PersonDetectionConfig, + ShotChangeDetectionConfig, + SpeechContext, + SpeechRecognitionAlternative, + SpeechTranscription, + SpeechTranscriptionConfig, + TextAnnotation, + TextDetectionConfig, + TextFrame, + TextSegment, + TimestampedObject, + Track, + VideoAnnotationProgress, + VideoAnnotationResults, + VideoContext, + VideoSegment, + WordInfo, +) + +__all__ = ( + "AnnotateVideoProgress", + "AnnotateVideoRequest", + "AnnotateVideoResponse", + "DetectedAttribute", + "DetectedLandmark", + "Entity", + "ExplicitContentAnnotation", + "ExplicitContentDetectionConfig", + "ExplicitContentFrame", + "FaceAnnotation", + "FaceDetectionAnnotation", + "FaceDetectionConfig", + "FaceFrame", + "FaceSegment", + "LabelAnnotation", + "LabelDetectionConfig", + "LabelFrame", + "LabelSegment", + "LogoRecognitionAnnotation", + "NormalizedBoundingBox", + "NormalizedBoundingPoly", + "NormalizedVertex", + "ObjectTrackingAnnotation", + "ObjectTrackingConfig", + "ObjectTrackingFrame", + "PersonDetectionAnnotation", + "PersonDetectionConfig", + "ShotChangeDetectionConfig", + "SpeechContext", + "SpeechRecognitionAlternative", + "SpeechTranscription", + "SpeechTranscriptionConfig", + "TextAnnotation", + "TextDetectionConfig", + "TextFrame", + "TextSegment", + "TimestampedObject", + "Track", + "VideoAnnotationProgress", + "VideoAnnotationResults", + "VideoContext", + "VideoSegment", + "WordInfo", + "Feature", + "LabelDetectionMode", + "Likelihood", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/types/video_intelligence.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/types/video_intelligence.py new file mode 100644 index 000000000000..02a16fdd8f09 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1/types/video_intelligence.py @@ -0,0 +1,1769 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.videointelligence.v1", + manifest={ + "Feature", + "LabelDetectionMode", + "Likelihood", + "AnnotateVideoRequest", + "VideoContext", + "LabelDetectionConfig", + "ShotChangeDetectionConfig", + "ObjectTrackingConfig", + "FaceDetectionConfig", + "PersonDetectionConfig", + "ExplicitContentDetectionConfig", + "TextDetectionConfig", + "VideoSegment", + "LabelSegment", + "LabelFrame", + "Entity", + "LabelAnnotation", + "ExplicitContentFrame", + "ExplicitContentAnnotation", + "NormalizedBoundingBox", + "FaceDetectionAnnotation", + "PersonDetectionAnnotation", + "FaceSegment", + "FaceFrame", + "FaceAnnotation", + "TimestampedObject", + "Track", + "DetectedAttribute", + "DetectedLandmark", + "VideoAnnotationResults", + "AnnotateVideoResponse", + "VideoAnnotationProgress", + "AnnotateVideoProgress", + "SpeechTranscriptionConfig", + "SpeechContext", + "SpeechTranscription", + "SpeechRecognitionAlternative", + "WordInfo", + "NormalizedVertex", + "NormalizedBoundingPoly", + "TextSegment", + "TextFrame", + "TextAnnotation", + "ObjectTrackingFrame", + "ObjectTrackingAnnotation", + "LogoRecognitionAnnotation", + }, +) + + +class Feature(proto.Enum): + r"""Video annotation feature. + + Values: + FEATURE_UNSPECIFIED (0): + Unspecified. + LABEL_DETECTION (1): + Label detection. Detect objects, such as dog + or flower. + SHOT_CHANGE_DETECTION (2): + Shot change detection. + EXPLICIT_CONTENT_DETECTION (3): + Explicit content detection. + FACE_DETECTION (4): + Human face detection. + SPEECH_TRANSCRIPTION (6): + Speech transcription. + TEXT_DETECTION (7): + OCR text detection and tracking. + OBJECT_TRACKING (9): + Object detection and tracking. + LOGO_RECOGNITION (12): + Logo detection, tracking, and recognition. + PERSON_DETECTION (14): + Person detection. + """ + FEATURE_UNSPECIFIED = 0 + LABEL_DETECTION = 1 + SHOT_CHANGE_DETECTION = 2 + EXPLICIT_CONTENT_DETECTION = 3 + FACE_DETECTION = 4 + SPEECH_TRANSCRIPTION = 6 + TEXT_DETECTION = 7 + OBJECT_TRACKING = 9 + LOGO_RECOGNITION = 12 + PERSON_DETECTION = 14 + + +class LabelDetectionMode(proto.Enum): + r"""Label detection mode. + + Values: + LABEL_DETECTION_MODE_UNSPECIFIED (0): + Unspecified. + SHOT_MODE (1): + Detect shot-level labels. + FRAME_MODE (2): + Detect frame-level labels. + SHOT_AND_FRAME_MODE (3): + Detect both shot-level and frame-level + labels. + """ + LABEL_DETECTION_MODE_UNSPECIFIED = 0 + SHOT_MODE = 1 + FRAME_MODE = 2 + SHOT_AND_FRAME_MODE = 3 + + +class Likelihood(proto.Enum): + r"""Bucketized representation of likelihood. + + Values: + LIKELIHOOD_UNSPECIFIED (0): + Unspecified likelihood. + VERY_UNLIKELY (1): + Very unlikely. + UNLIKELY (2): + Unlikely. + POSSIBLE (3): + Possible. + LIKELY (4): + Likely. + VERY_LIKELY (5): + Very likely. + """ + LIKELIHOOD_UNSPECIFIED = 0 + VERY_UNLIKELY = 1 + UNLIKELY = 2 + POSSIBLE = 3 + LIKELY = 4 + VERY_LIKELY = 5 + + +class AnnotateVideoRequest(proto.Message): + r"""Video annotation request. + + Attributes: + input_uri (str): + Input video location. Currently, only `Cloud + Storage `__ URIs are + supported. URIs must be specified in the following format: + ``gs://bucket-id/object-id`` (other URI formats return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + To identify multiple videos, a video URI may include + wildcards in the ``object-id``. Supported wildcards: '*' to + match 0 or more characters; '?' to match 1 character. If + unset, the input video should be embedded in the request as + ``input_content``. If set, ``input_content`` must be unset. + input_content (bytes): + The video data bytes. If unset, the input video(s) should be + specified via the ``input_uri``. If set, ``input_uri`` must + be unset. + features (MutableSequence[google.cloud.videointelligence_v1.types.Feature]): + Required. Requested video annotation + features. + video_context (google.cloud.videointelligence_v1.types.VideoContext): + Additional video context and/or + feature-specific parameters. + output_uri (str): + Optional. Location where the output (in JSON format) should + be stored. Currently, only `Cloud + Storage `__ URIs are + supported. These must be specified in the following format: + ``gs://bucket-id/object-id`` (other URI formats return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + location_id (str): + Optional. Cloud region where annotation should take place. + Supported cloud regions are: ``us-east1``, ``us-west1``, + ``europe-west1``, ``asia-east1``. If no region is specified, + the region will be determined based on video file location. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + input_content: bytes = proto.Field( + proto.BYTES, + number=6, + ) + features: MutableSequence["Feature"] = proto.RepeatedField( + proto.ENUM, + number=2, + enum="Feature", + ) + video_context: "VideoContext" = proto.Field( + proto.MESSAGE, + number=3, + message="VideoContext", + ) + output_uri: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class VideoContext(proto.Message): + r"""Video context and/or feature-specific parameters. + + Attributes: + segments (MutableSequence[google.cloud.videointelligence_v1.types.VideoSegment]): + Video segments to annotate. The segments may + overlap and are not required to be contiguous or + span the whole video. If unspecified, each video + is treated as a single segment. + label_detection_config (google.cloud.videointelligence_v1.types.LabelDetectionConfig): + Config for LABEL_DETECTION. + shot_change_detection_config (google.cloud.videointelligence_v1.types.ShotChangeDetectionConfig): + Config for SHOT_CHANGE_DETECTION. + explicit_content_detection_config (google.cloud.videointelligence_v1.types.ExplicitContentDetectionConfig): + Config for EXPLICIT_CONTENT_DETECTION. + face_detection_config (google.cloud.videointelligence_v1.types.FaceDetectionConfig): + Config for FACE_DETECTION. + speech_transcription_config (google.cloud.videointelligence_v1.types.SpeechTranscriptionConfig): + Config for SPEECH_TRANSCRIPTION. + text_detection_config (google.cloud.videointelligence_v1.types.TextDetectionConfig): + Config for TEXT_DETECTION. + person_detection_config (google.cloud.videointelligence_v1.types.PersonDetectionConfig): + Config for PERSON_DETECTION. + object_tracking_config (google.cloud.videointelligence_v1.types.ObjectTrackingConfig): + Config for OBJECT_TRACKING. + """ + + segments: MutableSequence["VideoSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + label_detection_config: "LabelDetectionConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="LabelDetectionConfig", + ) + shot_change_detection_config: "ShotChangeDetectionConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="ShotChangeDetectionConfig", + ) + explicit_content_detection_config: "ExplicitContentDetectionConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="ExplicitContentDetectionConfig", + ) + face_detection_config: "FaceDetectionConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="FaceDetectionConfig", + ) + speech_transcription_config: "SpeechTranscriptionConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="SpeechTranscriptionConfig", + ) + text_detection_config: "TextDetectionConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="TextDetectionConfig", + ) + person_detection_config: "PersonDetectionConfig" = proto.Field( + proto.MESSAGE, + number=11, + message="PersonDetectionConfig", + ) + object_tracking_config: "ObjectTrackingConfig" = proto.Field( + proto.MESSAGE, + number=13, + message="ObjectTrackingConfig", + ) + + +class LabelDetectionConfig(proto.Message): + r"""Config for LABEL_DETECTION. + + Attributes: + label_detection_mode (google.cloud.videointelligence_v1.types.LabelDetectionMode): + What labels should be detected with LABEL_DETECTION, in + addition to video-level labels or segment-level labels. If + unspecified, defaults to ``SHOT_MODE``. + stationary_camera (bool): + Whether the video has been shot from a stationary (i.e., + non-moving) camera. When set to true, might improve + detection accuracy for moving objects. Should be used with + ``SHOT_AND_FRAME_MODE`` enabled. + model (str): + Model to use for label detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + frame_confidence_threshold (float): + The confidence threshold we perform filtering on the labels + from frame-level detection. If not set, it is set to 0.4 by + default. The valid range for this threshold is [0.1, 0.9]. + Any value set outside of this range will be clipped. Note: + For best results, follow the default threshold. We will + update the default threshold everytime when we release a new + model. + video_confidence_threshold (float): + The confidence threshold we perform filtering on the labels + from video-level and shot-level detections. If not set, it's + set to 0.3 by default. The valid range for this threshold is + [0.1, 0.9]. Any value set outside of this range will be + clipped. Note: For best results, follow the default + threshold. We will update the default threshold everytime + when we release a new model. + """ + + label_detection_mode: "LabelDetectionMode" = proto.Field( + proto.ENUM, + number=1, + enum="LabelDetectionMode", + ) + stationary_camera: bool = proto.Field( + proto.BOOL, + number=2, + ) + model: str = proto.Field( + proto.STRING, + number=3, + ) + frame_confidence_threshold: float = proto.Field( + proto.FLOAT, + number=4, + ) + video_confidence_threshold: float = proto.Field( + proto.FLOAT, + number=5, + ) + + +class ShotChangeDetectionConfig(proto.Message): + r"""Config for SHOT_CHANGE_DETECTION. + + Attributes: + model (str): + Model to use for shot change detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ObjectTrackingConfig(proto.Message): + r"""Config for OBJECT_TRACKING. + + Attributes: + model (str): + Model to use for object tracking. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FaceDetectionConfig(proto.Message): + r"""Config for FACE_DETECTION. + + Attributes: + model (str): + Model to use for face detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + include_bounding_boxes (bool): + Whether bounding boxes are included in the + face annotation output. + include_attributes (bool): + Whether to enable face attributes detection, such as + glasses, dark_glasses, mouth_open etc. Ignored if + 'include_bounding_boxes' is set to false. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + include_bounding_boxes: bool = proto.Field( + proto.BOOL, + number=2, + ) + include_attributes: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class PersonDetectionConfig(proto.Message): + r"""Config for PERSON_DETECTION. + + Attributes: + include_bounding_boxes (bool): + Whether bounding boxes are included in the + person detection annotation output. + include_pose_landmarks (bool): + Whether to enable pose landmarks detection. Ignored if + 'include_bounding_boxes' is set to false. + include_attributes (bool): + Whether to enable person attributes detection, such as cloth + color (black, blue, etc), type (coat, dress, etc), pattern + (plain, floral, etc), hair, etc. Ignored if + 'include_bounding_boxes' is set to false. + """ + + include_bounding_boxes: bool = proto.Field( + proto.BOOL, + number=1, + ) + include_pose_landmarks: bool = proto.Field( + proto.BOOL, + number=2, + ) + include_attributes: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ExplicitContentDetectionConfig(proto.Message): + r"""Config for EXPLICIT_CONTENT_DETECTION. + + Attributes: + model (str): + Model to use for explicit content detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + + +class TextDetectionConfig(proto.Message): + r"""Config for TEXT_DETECTION. + + Attributes: + language_hints (MutableSequence[str]): + Language hint can be specified if the + language to be detected is known a priori. It + can increase the accuracy of the detection. + Language hint must be language code in BCP-47 + format. + + Automatic language detection is performed if no + hint is provided. + model (str): + Model to use for text detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + language_hints: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + model: str = proto.Field( + proto.STRING, + number=2, + ) + + +class VideoSegment(proto.Message): + r"""Video segment. + + Attributes: + start_time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the start of the segment + (inclusive). + end_time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the end of the segment + (inclusive). + """ + + start_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + end_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class LabelSegment(proto.Message): + r"""Video segment level annotation results for label detection. + + Attributes: + segment (google.cloud.videointelligence_v1.types.VideoSegment): + Video segment where a label was detected. + confidence (float): + Confidence that the label is accurate. Range: [0, 1]. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class LabelFrame(proto.Message): + r"""Video frame level annotation results for label detection. + + Attributes: + time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the video frame for this + location. + confidence (float): + Confidence that the label is accurate. Range: [0, 1]. + """ + + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class Entity(proto.Message): + r"""Detected entity from video analysis. + + Attributes: + entity_id (str): + Opaque entity ID. Some IDs may be available in `Google + Knowledge Graph Search + API `__. + description (str): + Textual description, e.g., ``Fixed-gear bicycle``. + language_code (str): + Language code for ``description`` in BCP-47 format. + """ + + entity_id: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + language_code: str = proto.Field( + proto.STRING, + number=3, + ) + + +class LabelAnnotation(proto.Message): + r"""Label annotation. + + Attributes: + entity (google.cloud.videointelligence_v1.types.Entity): + Detected entity. + category_entities (MutableSequence[google.cloud.videointelligence_v1.types.Entity]): + Common categories for the detected entity. For example, when + the label is ``Terrier``, the category is likely ``dog``. + And in some cases there might be more than one categories + e.g., ``Terrier`` could also be a ``pet``. + segments (MutableSequence[google.cloud.videointelligence_v1.types.LabelSegment]): + All video segments where a label was + detected. + frames (MutableSequence[google.cloud.videointelligence_v1.types.LabelFrame]): + All video frames where a label was detected. + version (str): + Feature version. + """ + + entity: "Entity" = proto.Field( + proto.MESSAGE, + number=1, + message="Entity", + ) + category_entities: MutableSequence["Entity"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Entity", + ) + segments: MutableSequence["LabelSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="LabelSegment", + ) + frames: MutableSequence["LabelFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="LabelFrame", + ) + version: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ExplicitContentFrame(proto.Message): + r"""Video frame level annotation results for explicit content. + + Attributes: + time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the video frame for this + location. + pornography_likelihood (google.cloud.videointelligence_v1.types.Likelihood): + Likelihood of the pornography content.. + """ + + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + pornography_likelihood: "Likelihood" = proto.Field( + proto.ENUM, + number=2, + enum="Likelihood", + ) + + +class ExplicitContentAnnotation(proto.Message): + r"""Explicit content annotation (based on per-frame visual + signals only). If no explicit content has been detected in a + frame, no annotations are present for that frame. + + Attributes: + frames (MutableSequence[google.cloud.videointelligence_v1.types.ExplicitContentFrame]): + All video frames where explicit content was + detected. + version (str): + Feature version. + """ + + frames: MutableSequence["ExplicitContentFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ExplicitContentFrame", + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class NormalizedBoundingBox(proto.Message): + r"""Normalized bounding box. The normalized vertex coordinates are + relative to the original image. Range: [0, 1]. + + Attributes: + left (float): + Left X coordinate. + top (float): + Top Y coordinate. + right (float): + Right X coordinate. + bottom (float): + Bottom Y coordinate. + """ + + left: float = proto.Field( + proto.FLOAT, + number=1, + ) + top: float = proto.Field( + proto.FLOAT, + number=2, + ) + right: float = proto.Field( + proto.FLOAT, + number=3, + ) + bottom: float = proto.Field( + proto.FLOAT, + number=4, + ) + + +class FaceDetectionAnnotation(proto.Message): + r"""Face detection annotation. + + Attributes: + tracks (MutableSequence[google.cloud.videointelligence_v1.types.Track]): + The face tracks with attributes. + thumbnail (bytes): + The thumbnail of a person's face. + version (str): + Feature version. + """ + + tracks: MutableSequence["Track"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Track", + ) + thumbnail: bytes = proto.Field( + proto.BYTES, + number=4, + ) + version: str = proto.Field( + proto.STRING, + number=5, + ) + + +class PersonDetectionAnnotation(proto.Message): + r"""Person detection annotation per video. + + Attributes: + tracks (MutableSequence[google.cloud.videointelligence_v1.types.Track]): + The detected tracks of a person. + version (str): + Feature version. + """ + + tracks: MutableSequence["Track"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Track", + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class FaceSegment(proto.Message): + r"""Video segment level annotation results for face detection. + + Attributes: + segment (google.cloud.videointelligence_v1.types.VideoSegment): + Video segment where a face was detected. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + + +class FaceFrame(proto.Message): + r"""Deprecated. No effect. + + Attributes: + normalized_bounding_boxes (MutableSequence[google.cloud.videointelligence_v1.types.NormalizedBoundingBox]): + Normalized Bounding boxes in a frame. + There can be more than one boxes if the same + face is detected in multiple locations within + the current frame. + time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the video frame for this + location. + """ + + normalized_bounding_boxes: MutableSequence[ + "NormalizedBoundingBox" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="NormalizedBoundingBox", + ) + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class FaceAnnotation(proto.Message): + r"""Deprecated. No effect. + + Attributes: + thumbnail (bytes): + Thumbnail of a representative face view (in + JPEG format). + segments (MutableSequence[google.cloud.videointelligence_v1.types.FaceSegment]): + All video segments where a face was detected. + frames (MutableSequence[google.cloud.videointelligence_v1.types.FaceFrame]): + All video frames where a face was detected. + """ + + thumbnail: bytes = proto.Field( + proto.BYTES, + number=1, + ) + segments: MutableSequence["FaceSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="FaceSegment", + ) + frames: MutableSequence["FaceFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="FaceFrame", + ) + + +class TimestampedObject(proto.Message): + r"""For tracking related features. An object at time_offset with + attributes, and located with normalized_bounding_box. + + Attributes: + normalized_bounding_box (google.cloud.videointelligence_v1.types.NormalizedBoundingBox): + Normalized Bounding box in a frame, where the + object is located. + time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the video frame for this + object. + attributes (MutableSequence[google.cloud.videointelligence_v1.types.DetectedAttribute]): + Optional. The attributes of the object in the + bounding box. + landmarks (MutableSequence[google.cloud.videointelligence_v1.types.DetectedLandmark]): + Optional. The detected landmarks. + """ + + normalized_bounding_box: "NormalizedBoundingBox" = proto.Field( + proto.MESSAGE, + number=1, + message="NormalizedBoundingBox", + ) + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + attributes: MutableSequence["DetectedAttribute"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="DetectedAttribute", + ) + landmarks: MutableSequence["DetectedLandmark"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="DetectedLandmark", + ) + + +class Track(proto.Message): + r"""A track of an object instance. + + Attributes: + segment (google.cloud.videointelligence_v1.types.VideoSegment): + Video segment of a track. + timestamped_objects (MutableSequence[google.cloud.videointelligence_v1.types.TimestampedObject]): + The object with timestamp and attributes per + frame in the track. + attributes (MutableSequence[google.cloud.videointelligence_v1.types.DetectedAttribute]): + Optional. Attributes in the track level. + confidence (float): + Optional. The confidence score of the tracked + object. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + timestamped_objects: MutableSequence["TimestampedObject"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="TimestampedObject", + ) + attributes: MutableSequence["DetectedAttribute"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="DetectedAttribute", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=4, + ) + + +class DetectedAttribute(proto.Message): + r"""A generic detected attribute represented by name in string + format. + + Attributes: + name (str): + The name of the attribute, for example, glasses, + dark_glasses, mouth_open. A full list of supported type + names will be provided in the document. + confidence (float): + Detected attribute confidence. Range [0, 1]. + value (str): + Text value of the detection result. For + example, the value for "HairColor" can be + "black", "blonde", etc. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + value: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DetectedLandmark(proto.Message): + r"""A generic detected landmark represented by name in string + format and a 2D location. + + Attributes: + name (str): + The name of this landmark, for example, left_hand, + right_shoulder. + point (google.cloud.videointelligence_v1.types.NormalizedVertex): + The 2D point of the detected landmark using + the normalized image coordindate system. The + normalized coordinates have the range from 0 to + 1. + confidence (float): + The confidence score of the detected landmark. Range [0, 1]. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + point: "NormalizedVertex" = proto.Field( + proto.MESSAGE, + number=2, + message="NormalizedVertex", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=3, + ) + + +class VideoAnnotationResults(proto.Message): + r"""Annotation results for a single video. + + Attributes: + input_uri (str): + Video file location in `Cloud + Storage `__. + segment (google.cloud.videointelligence_v1.types.VideoSegment): + Video segment on which the annotation is run. + segment_label_annotations (MutableSequence[google.cloud.videointelligence_v1.types.LabelAnnotation]): + Topical label annotations on video level or + user-specified segment level. There is exactly + one element for each unique label. + segment_presence_label_annotations (MutableSequence[google.cloud.videointelligence_v1.types.LabelAnnotation]): + Presence label annotations on video level or user-specified + segment level. There is exactly one element for each unique + label. Compared to the existing topical + ``segment_label_annotations``, this field presents more + fine-grained, segment-level labels detected in video content + and is made available only when the client sets + ``LabelDetectionConfig.model`` to "builtin/latest" in the + request. + shot_label_annotations (MutableSequence[google.cloud.videointelligence_v1.types.LabelAnnotation]): + Topical label annotations on shot level. + There is exactly one element for each unique + label. + shot_presence_label_annotations (MutableSequence[google.cloud.videointelligence_v1.types.LabelAnnotation]): + Presence label annotations on shot level. There is exactly + one element for each unique label. Compared to the existing + topical ``shot_label_annotations``, this field presents more + fine-grained, shot-level labels detected in video content + and is made available only when the client sets + ``LabelDetectionConfig.model`` to "builtin/latest" in the + request. + frame_label_annotations (MutableSequence[google.cloud.videointelligence_v1.types.LabelAnnotation]): + Label annotations on frame level. + There is exactly one element for each unique + label. + face_annotations (MutableSequence[google.cloud.videointelligence_v1.types.FaceAnnotation]): + Deprecated. Please use ``face_detection_annotations`` + instead. + face_detection_annotations (MutableSequence[google.cloud.videointelligence_v1.types.FaceDetectionAnnotation]): + Face detection annotations. + shot_annotations (MutableSequence[google.cloud.videointelligence_v1.types.VideoSegment]): + Shot annotations. Each shot is represented as + a video segment. + explicit_annotation (google.cloud.videointelligence_v1.types.ExplicitContentAnnotation): + Explicit content annotation. + speech_transcriptions (MutableSequence[google.cloud.videointelligence_v1.types.SpeechTranscription]): + Speech transcription. + text_annotations (MutableSequence[google.cloud.videointelligence_v1.types.TextAnnotation]): + OCR text detection and tracking. + Annotations for list of detected text snippets. + Each will have list of frame information + associated with it. + object_annotations (MutableSequence[google.cloud.videointelligence_v1.types.ObjectTrackingAnnotation]): + Annotations for list of objects detected and + tracked in video. + logo_recognition_annotations (MutableSequence[google.cloud.videointelligence_v1.types.LogoRecognitionAnnotation]): + Annotations for list of logos detected, + tracked and recognized in video. + person_detection_annotations (MutableSequence[google.cloud.videointelligence_v1.types.PersonDetectionAnnotation]): + Person detection annotations. + error (google.rpc.status_pb2.Status): + If set, indicates an error. Note that for a single + ``AnnotateVideoRequest`` some videos may succeed and some + may fail. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=10, + message="VideoSegment", + ) + segment_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="LabelAnnotation", + ) + segment_presence_label_annotations: MutableSequence[ + "LabelAnnotation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=23, + message="LabelAnnotation", + ) + shot_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="LabelAnnotation", + ) + shot_presence_label_annotations: MutableSequence[ + "LabelAnnotation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=24, + message="LabelAnnotation", + ) + frame_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="LabelAnnotation", + ) + face_annotations: MutableSequence["FaceAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="FaceAnnotation", + ) + face_detection_annotations: MutableSequence[ + "FaceDetectionAnnotation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message="FaceDetectionAnnotation", + ) + shot_annotations: MutableSequence["VideoSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="VideoSegment", + ) + explicit_annotation: "ExplicitContentAnnotation" = proto.Field( + proto.MESSAGE, + number=7, + message="ExplicitContentAnnotation", + ) + speech_transcriptions: MutableSequence["SpeechTranscription"] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="SpeechTranscription", + ) + text_annotations: MutableSequence["TextAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="TextAnnotation", + ) + object_annotations: MutableSequence[ + "ObjectTrackingAnnotation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="ObjectTrackingAnnotation", + ) + logo_recognition_annotations: MutableSequence[ + "LogoRecognitionAnnotation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=19, + message="LogoRecognitionAnnotation", + ) + person_detection_annotations: MutableSequence[ + "PersonDetectionAnnotation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message="PersonDetectionAnnotation", + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=9, + message=status_pb2.Status, + ) + + +class AnnotateVideoResponse(proto.Message): + r"""Video annotation response. Included in the ``response`` field of the + ``Operation`` returned by the ``GetOperation`` call of the + ``google::longrunning::Operations`` service. + + Attributes: + annotation_results (MutableSequence[google.cloud.videointelligence_v1.types.VideoAnnotationResults]): + Annotation results for all videos specified in + ``AnnotateVideoRequest``. + """ + + annotation_results: MutableSequence["VideoAnnotationResults"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoAnnotationResults", + ) + + +class VideoAnnotationProgress(proto.Message): + r"""Annotation progress for a single video. + + Attributes: + input_uri (str): + Video file location in `Cloud + Storage `__. + progress_percent (int): + Approximate percentage processed thus far. + Guaranteed to be 100 when fully processed. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the request was received. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Time of the most recent update. + feature (google.cloud.videointelligence_v1.types.Feature): + Specifies which feature is being tracked if + the request contains more than one feature. + segment (google.cloud.videointelligence_v1.types.VideoSegment): + Specifies which segment is being tracked if + the request contains more than one segment. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + progress_percent: int = proto.Field( + proto.INT32, + number=2, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + feature: "Feature" = proto.Field( + proto.ENUM, + number=5, + enum="Feature", + ) + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=6, + message="VideoSegment", + ) + + +class AnnotateVideoProgress(proto.Message): + r"""Video annotation progress. Included in the ``metadata`` field of the + ``Operation`` returned by the ``GetOperation`` call of the + ``google::longrunning::Operations`` service. + + Attributes: + annotation_progress (MutableSequence[google.cloud.videointelligence_v1.types.VideoAnnotationProgress]): + Progress metadata for all videos specified in + ``AnnotateVideoRequest``. + """ + + annotation_progress: MutableSequence[ + "VideoAnnotationProgress" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoAnnotationProgress", + ) + + +class SpeechTranscriptionConfig(proto.Message): + r"""Config for SPEECH_TRANSCRIPTION. + + Attributes: + language_code (str): + Required. *Required* The language of the supplied audio as a + `BCP-47 `__ + language tag. Example: "en-US". See `Language + Support `__ + for a list of the currently supported language codes. + max_alternatives (int): + Optional. Maximum number of recognition hypotheses to be + returned. Specifically, the maximum number of + ``SpeechRecognitionAlternative`` messages within each + ``SpeechTranscription``. The server may return fewer than + ``max_alternatives``. Valid values are ``0``-``30``. A value + of ``0`` or ``1`` will return a maximum of one. If omitted, + will return a maximum of one. + filter_profanity (bool): + Optional. If set to ``true``, the server will attempt to + filter out profanities, replacing all but the initial + character in each filtered word with asterisks, e.g. "f***". + If set to ``false`` or omitted, profanities won't be + filtered out. + speech_contexts (MutableSequence[google.cloud.videointelligence_v1.types.SpeechContext]): + Optional. A means to provide context to + assist the speech recognition. + enable_automatic_punctuation (bool): + Optional. If 'true', adds punctuation to + recognition result hypotheses. This feature is + only available in select languages. Setting this + for requests in other languages has no effect at + all. The default 'false' value does not add + punctuation to result hypotheses. NOTE: "This is + currently offered as an experimental service, + complimentary to all users. In the future this + may be exclusively available as a premium + feature.". + audio_tracks (MutableSequence[int]): + Optional. For file formats, such as MXF or + MKV, supporting multiple audio tracks, specify + up to two tracks. Default: track 0. + enable_speaker_diarization (bool): + Optional. If 'true', enables speaker detection for each + recognized word in the top alternative of the recognition + result using a speaker_tag provided in the WordInfo. Note: + When this is true, we send all the words from the beginning + of the audio for the top alternative in every consecutive + response. This is done in order to improve our speaker tags + as our models learn to identify the speakers in the + conversation over time. + diarization_speaker_count (int): + Optional. If set, specifies the estimated number of speakers + in the conversation. If not set, defaults to '2'. Ignored + unless enable_speaker_diarization is set to true. + enable_word_confidence (bool): + Optional. If ``true``, the top result includes a list of + words and the confidence for those words. If ``false``, no + word-level confidence information is returned. The default + is ``false``. + """ + + language_code: str = proto.Field( + proto.STRING, + number=1, + ) + max_alternatives: int = proto.Field( + proto.INT32, + number=2, + ) + filter_profanity: bool = proto.Field( + proto.BOOL, + number=3, + ) + speech_contexts: MutableSequence["SpeechContext"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="SpeechContext", + ) + enable_automatic_punctuation: bool = proto.Field( + proto.BOOL, + number=5, + ) + audio_tracks: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=6, + ) + enable_speaker_diarization: bool = proto.Field( + proto.BOOL, + number=7, + ) + diarization_speaker_count: int = proto.Field( + proto.INT32, + number=8, + ) + enable_word_confidence: bool = proto.Field( + proto.BOOL, + number=9, + ) + + +class SpeechContext(proto.Message): + r"""Provides "hints" to the speech recognizer to favor specific + words and phrases in the results. + + Attributes: + phrases (MutableSequence[str]): + Optional. A list of strings containing words and phrases + "hints" so that the speech recognition is more likely to + recognize them. This can be used to improve the accuracy for + specific words and phrases, for example, if specific + commands are typically spoken by the user. This can also be + used to add additional words to the vocabulary of the + recognizer. See `usage + limits `__. + """ + + phrases: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class SpeechTranscription(proto.Message): + r"""A speech recognition result corresponding to a portion of the + audio. + + Attributes: + alternatives (MutableSequence[google.cloud.videointelligence_v1.types.SpeechRecognitionAlternative]): + May contain one or more recognition hypotheses (up to the + maximum specified in ``max_alternatives``). These + alternatives are ordered in terms of accuracy, with the top + (first) alternative being the most probable, as ranked by + the recognizer. + language_code (str): + Output only. The + `BCP-47 `__ + language tag of the language in this result. This language + code was detected to have the most likelihood of being + spoken in the audio. + """ + + alternatives: MutableSequence["SpeechRecognitionAlternative"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SpeechRecognitionAlternative", + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SpeechRecognitionAlternative(proto.Message): + r"""Alternative hypotheses (a.k.a. n-best list). + + Attributes: + transcript (str): + Transcript text representing the words that + the user spoke. + confidence (float): + Output only. The confidence estimate between 0.0 and 1.0. A + higher number indicates an estimated greater likelihood that + the recognized words are correct. This field is set only for + the top alternative. This field is not guaranteed to be + accurate and users should not rely on it to be always + provided. The default of 0.0 is a sentinel value indicating + ``confidence`` was not set. + words (MutableSequence[google.cloud.videointelligence_v1.types.WordInfo]): + Output only. A list of word-specific information for each + recognized word. Note: When ``enable_speaker_diarization`` + is set to true, you will see all the words from the + beginning of the audio. + """ + + transcript: str = proto.Field( + proto.STRING, + number=1, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + words: MutableSequence["WordInfo"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="WordInfo", + ) + + +class WordInfo(proto.Message): + r"""Word-specific information for recognized words. Word information is + only included in the response when certain request parameters are + set, such as ``enable_word_time_offsets``. + + Attributes: + start_time (google.protobuf.duration_pb2.Duration): + Time offset relative to the beginning of the audio, and + corresponding to the start of the spoken word. This field is + only set if ``enable_word_time_offsets=true`` and only in + the top hypothesis. This is an experimental feature and the + accuracy of the time offset can vary. + end_time (google.protobuf.duration_pb2.Duration): + Time offset relative to the beginning of the audio, and + corresponding to the end of the spoken word. This field is + only set if ``enable_word_time_offsets=true`` and only in + the top hypothesis. This is an experimental feature and the + accuracy of the time offset can vary. + word (str): + The word corresponding to this set of + information. + confidence (float): + Output only. The confidence estimate between 0.0 and 1.0. A + higher number indicates an estimated greater likelihood that + the recognized words are correct. This field is set only for + the top alternative. This field is not guaranteed to be + accurate and users should not rely on it to be always + provided. The default of 0.0 is a sentinel value indicating + ``confidence`` was not set. + speaker_tag (int): + Output only. A distinct integer value is assigned for every + speaker within the audio. This field specifies which one of + those speakers was detected to have spoken this word. Value + ranges from 1 up to diarization_speaker_count, and is only + set if speaker diarization is enabled. + """ + + start_time: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + end_time: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + word: str = proto.Field( + proto.STRING, + number=3, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=4, + ) + speaker_tag: int = proto.Field( + proto.INT32, + number=5, + ) + + +class NormalizedVertex(proto.Message): + r"""A vertex represents a 2D point in the image. + NOTE: the normalized vertex coordinates are relative to the + original image and range from 0 to 1. + + Attributes: + x (float): + X coordinate. + y (float): + Y coordinate. + """ + + x: float = proto.Field( + proto.FLOAT, + number=1, + ) + y: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class NormalizedBoundingPoly(proto.Message): + r"""Normalized bounding polygon for text (that might not be aligned with + axis). Contains list of the corner points in clockwise order + starting from top-left corner. For example, for a rectangular + bounding box: When the text is horizontal it might look like: 0----1 + \| \| 3----2 + + When it's clockwise rotated 180 degrees around the top-left corner + it becomes: 2----3 \| \| 1----0 + + and the vertex order will still be (0, 1, 2, 3). Note that values + can be less than 0, or greater than 1 due to trignometric + calculations for location of the box. + + Attributes: + vertices (MutableSequence[google.cloud.videointelligence_v1.types.NormalizedVertex]): + Normalized vertices of the bounding polygon. + """ + + vertices: MutableSequence["NormalizedVertex"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="NormalizedVertex", + ) + + +class TextSegment(proto.Message): + r"""Video segment level annotation results for text detection. + + Attributes: + segment (google.cloud.videointelligence_v1.types.VideoSegment): + Video segment where a text snippet was + detected. + confidence (float): + Confidence for the track of detected text. It + is calculated as the highest over all frames + where OCR detected text appears. + frames (MutableSequence[google.cloud.videointelligence_v1.types.TextFrame]): + Information related to the frames where OCR + detected text appears. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + frames: MutableSequence["TextFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="TextFrame", + ) + + +class TextFrame(proto.Message): + r"""Video frame level annotation results for text annotation + (OCR). Contains information regarding timestamp and bounding box + locations for the frames containing detected OCR text snippets. + + Attributes: + rotated_bounding_box (google.cloud.videointelligence_v1.types.NormalizedBoundingPoly): + Bounding polygon of the detected text for + this frame. + time_offset (google.protobuf.duration_pb2.Duration): + Timestamp of this frame. + """ + + rotated_bounding_box: "NormalizedBoundingPoly" = proto.Field( + proto.MESSAGE, + number=1, + message="NormalizedBoundingPoly", + ) + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class TextAnnotation(proto.Message): + r"""Annotations related to one detected OCR text snippet. This + will contain the corresponding text, confidence value, and frame + level information for each detection. + + Attributes: + text (str): + The detected text. + segments (MutableSequence[google.cloud.videointelligence_v1.types.TextSegment]): + All video segments where OCR detected text + appears. + version (str): + Feature version. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + segments: MutableSequence["TextSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="TextSegment", + ) + version: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ObjectTrackingFrame(proto.Message): + r"""Video frame level annotations for object detection and + tracking. This field stores per frame location, time offset, and + confidence. + + Attributes: + normalized_bounding_box (google.cloud.videointelligence_v1.types.NormalizedBoundingBox): + The normalized bounding box location of this + object track for the frame. + time_offset (google.protobuf.duration_pb2.Duration): + The timestamp of the frame in microseconds. + """ + + normalized_bounding_box: "NormalizedBoundingBox" = proto.Field( + proto.MESSAGE, + number=1, + message="NormalizedBoundingBox", + ) + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class ObjectTrackingAnnotation(proto.Message): + r"""Annotations corresponding to one tracked object. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + segment (google.cloud.videointelligence_v1.types.VideoSegment): + Non-streaming batch mode ONLY. + Each object track corresponds to one video + segment where it appears. + + This field is a member of `oneof`_ ``track_info``. + track_id (int): + Streaming mode ONLY. In streaming mode, we do not know the + end time of a tracked object before it is completed. Hence, + there is no VideoSegment info returned. Instead, we provide + a unique identifiable integer track_id so that the customers + can correlate the results of the ongoing + ObjectTrackAnnotation of the same track_id over time. + + This field is a member of `oneof`_ ``track_info``. + entity (google.cloud.videointelligence_v1.types.Entity): + Entity to specify the object category that + this track is labeled as. + confidence (float): + Object category's labeling confidence of this + track. + frames (MutableSequence[google.cloud.videointelligence_v1.types.ObjectTrackingFrame]): + Information corresponding to all frames where + this object track appears. Non-streaming batch + mode: it may be one or multiple + ObjectTrackingFrame messages in frames. + Streaming mode: it can only be one + ObjectTrackingFrame message in frames. + version (str): + Feature version. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=3, + oneof="track_info", + message="VideoSegment", + ) + track_id: int = proto.Field( + proto.INT64, + number=5, + oneof="track_info", + ) + entity: "Entity" = proto.Field( + proto.MESSAGE, + number=1, + message="Entity", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=4, + ) + frames: MutableSequence["ObjectTrackingFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ObjectTrackingFrame", + ) + version: str = proto.Field( + proto.STRING, + number=6, + ) + + +class LogoRecognitionAnnotation(proto.Message): + r"""Annotation corresponding to one detected, tracked and + recognized logo class. + + Attributes: + entity (google.cloud.videointelligence_v1.types.Entity): + Entity category information to specify the + logo class that all the logo tracks within this + LogoRecognitionAnnotation are recognized as. + tracks (MutableSequence[google.cloud.videointelligence_v1.types.Track]): + All logo tracks where the recognized logo + appears. Each track corresponds to one logo + instance appearing in consecutive frames. + segments (MutableSequence[google.cloud.videointelligence_v1.types.VideoSegment]): + All video segments where the recognized logo + appears. There might be multiple instances of + the same logo class appearing in one + VideoSegment. + """ + + entity: "Entity" = proto.Field( + proto.MESSAGE, + number=1, + message="Entity", + ) + tracks: MutableSequence["Track"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Track", + ) + segments: MutableSequence["VideoSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="VideoSegment", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/__init__.py new file mode 100644 index 000000000000..1ca9a92caaf8 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/__init__.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.videointelligence_v1beta2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.video_intelligence_service import ( + VideoIntelligenceServiceAsyncClient, + VideoIntelligenceServiceClient, +) +from .types.video_intelligence import ( + AnnotateVideoProgress, + AnnotateVideoRequest, + AnnotateVideoResponse, + Entity, + ExplicitContentAnnotation, + ExplicitContentDetectionConfig, + ExplicitContentFrame, + FaceAnnotation, + FaceDetectionConfig, + FaceFrame, + FaceSegment, + Feature, + LabelAnnotation, + LabelDetectionConfig, + LabelDetectionMode, + LabelFrame, + LabelSegment, + Likelihood, + NormalizedBoundingBox, + ShotChangeDetectionConfig, + VideoAnnotationProgress, + VideoAnnotationResults, + VideoContext, + VideoSegment, +) + +__all__ = ( + "VideoIntelligenceServiceAsyncClient", + "AnnotateVideoProgress", + "AnnotateVideoRequest", + "AnnotateVideoResponse", + "Entity", + "ExplicitContentAnnotation", + "ExplicitContentDetectionConfig", + "ExplicitContentFrame", + "FaceAnnotation", + "FaceDetectionConfig", + "FaceFrame", + "FaceSegment", + "Feature", + "LabelAnnotation", + "LabelDetectionConfig", + "LabelDetectionMode", + "LabelFrame", + "LabelSegment", + "Likelihood", + "NormalizedBoundingBox", + "ShotChangeDetectionConfig", + "VideoAnnotationProgress", + "VideoAnnotationResults", + "VideoContext", + "VideoIntelligenceServiceClient", + "VideoSegment", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/gapic_metadata.json b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/gapic_metadata.json new file mode 100644 index 000000000000..b4479ce7b064 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/gapic_metadata.json @@ -0,0 +1,43 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.videointelligence_v1beta2", + "protoPackage": "google.cloud.videointelligence.v1beta2", + "schema": "1.0", + "services": { + "VideoIntelligenceService": { + "clients": { + "grpc": { + "libraryClient": "VideoIntelligenceServiceClient", + "rpcs": { + "AnnotateVideo": { + "methods": [ + "annotate_video" + ] + } + } + }, + "grpc-async": { + "libraryClient": "VideoIntelligenceServiceAsyncClient", + "rpcs": { + "AnnotateVideo": { + "methods": [ + "annotate_video" + ] + } + } + }, + "rest": { + "libraryClient": "VideoIntelligenceServiceClient", + "rpcs": { + "AnnotateVideo": { + "methods": [ + "annotate_video" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/gapic_version.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/gapic_version.py new file mode 100644 index 000000000000..84f00fd3f92c --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.11.4" # {x-release-please-version} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/py.typed b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/py.typed new file mode 100644 index 000000000000..e7fb166bf3e5 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-videointelligence package uses inline types. diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/__init__.py new file mode 100644 index 000000000000..9bfaa3bf2e62 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import VideoIntelligenceServiceAsyncClient +from .client import VideoIntelligenceServiceClient + +__all__ = ( + "VideoIntelligenceServiceClient", + "VideoIntelligenceServiceAsyncClient", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/async_client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/async_client.py new file mode 100644 index 000000000000..fd075ac1c8ee --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/async_client.py @@ -0,0 +1,380 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1beta2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore + +from google.cloud.videointelligence_v1beta2.types import video_intelligence + +from .client import VideoIntelligenceServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .transports.grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport + + +class VideoIntelligenceServiceAsyncClient: + """Service that implements Google Cloud Video Intelligence API.""" + + _client: VideoIntelligenceServiceClient + + DEFAULT_ENDPOINT = VideoIntelligenceServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = VideoIntelligenceServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + VideoIntelligenceServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(VideoIntelligenceServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + VideoIntelligenceServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + VideoIntelligenceServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + VideoIntelligenceServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceAsyncClient: The constructed client. + """ + return VideoIntelligenceServiceClient.from_service_account_info.__func__(VideoIntelligenceServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceAsyncClient: The constructed client. + """ + return VideoIntelligenceServiceClient.from_service_account_file.__func__(VideoIntelligenceServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return VideoIntelligenceServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> VideoIntelligenceServiceTransport: + """Returns the transport used by the client instance. + + Returns: + VideoIntelligenceServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(VideoIntelligenceServiceClient).get_transport_class, + type(VideoIntelligenceServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, VideoIntelligenceServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the video intelligence service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.VideoIntelligenceServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = VideoIntelligenceServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def annotate_video( + self, + request: Optional[Union[video_intelligence.AnnotateVideoRequest, dict]] = None, + *, + input_uri: Optional[str] = None, + features: Optional[MutableSequence[video_intelligence.Feature]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import videointelligence_v1beta2 + + async def sample_annotate_video(): + # Create a client + client = videointelligence_v1beta2.VideoIntelligenceServiceAsyncClient() + + # Initialize request argument(s) + request = videointelligence_v1beta2.AnnotateVideoRequest( + features=['FACE_DETECTION'], + ) + + # Make the request + operation = client.annotate_video(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.videointelligence_v1beta2.types.AnnotateVideoRequest, dict]]): + The request object. Video annotation request. + input_uri (:class:`str`): + Input video location. Currently, only `Google Cloud + Storage `__ URIs are + supported, which must be specified in the following + format: ``gs://bucket-id/object-id`` (other URI formats + return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + A video URI may include wildcards in ``object-id``, and + thus identify multiple videos. Supported wildcards: '*' + to match 0 or more characters; '?' to match 1 character. + If unset, the input video should be embedded in the + request as ``input_content``. If set, ``input_content`` + should be unset. + + This corresponds to the ``input_uri`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (:class:`MutableSequence[google.cloud.videointelligence_v1beta2.types.Feature]`): + Required. Requested video annotation + features. + + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.videointelligence_v1beta2.types.AnnotateVideoResponse` Video annotation response. Included in the response + field of the Operation returned by the GetOperation + call of the google::longrunning::Operations service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([input_uri, features]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = video_intelligence.AnnotateVideoRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if input_uri is not None: + request.input_uri = input_uri + if features: + request.features.extend(features) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.annotate_video, + default_retry=retries.Retry( + initial=1.0, + maximum=120.0, + multiplier=2.5, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + video_intelligence.AnnotateVideoResponse, + metadata_type=video_intelligence.AnnotateVideoProgress, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "VideoIntelligenceServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("VideoIntelligenceServiceAsyncClient",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/client.py new file mode 100644 index 000000000000..88955d8c5f8a --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/client.py @@ -0,0 +1,581 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1beta2 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore + +from google.cloud.videointelligence_v1beta2.types import video_intelligence + +from .transports.base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .transports.grpc import VideoIntelligenceServiceGrpcTransport +from .transports.grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport +from .transports.rest import VideoIntelligenceServiceRestTransport + + +class VideoIntelligenceServiceClientMeta(type): + """Metaclass for the VideoIntelligenceService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[VideoIntelligenceServiceTransport]] + _transport_registry["grpc"] = VideoIntelligenceServiceGrpcTransport + _transport_registry["grpc_asyncio"] = VideoIntelligenceServiceGrpcAsyncIOTransport + _transport_registry["rest"] = VideoIntelligenceServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[VideoIntelligenceServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class VideoIntelligenceServiceClient(metaclass=VideoIntelligenceServiceClientMeta): + """Service that implements Google Cloud Video Intelligence API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "videointelligence.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> VideoIntelligenceServiceTransport: + """Returns the transport used by the client instance. + + Returns: + VideoIntelligenceServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, VideoIntelligenceServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the video intelligence service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, VideoIntelligenceServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, VideoIntelligenceServiceTransport): + # transport is a VideoIntelligenceServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def annotate_video( + self, + request: Optional[Union[video_intelligence.AnnotateVideoRequest, dict]] = None, + *, + input_uri: Optional[str] = None, + features: Optional[MutableSequence[video_intelligence.Feature]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import videointelligence_v1beta2 + + def sample_annotate_video(): + # Create a client + client = videointelligence_v1beta2.VideoIntelligenceServiceClient() + + # Initialize request argument(s) + request = videointelligence_v1beta2.AnnotateVideoRequest( + features=['FACE_DETECTION'], + ) + + # Make the request + operation = client.annotate_video(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.videointelligence_v1beta2.types.AnnotateVideoRequest, dict]): + The request object. Video annotation request. + input_uri (str): + Input video location. Currently, only `Google Cloud + Storage `__ URIs are + supported, which must be specified in the following + format: ``gs://bucket-id/object-id`` (other URI formats + return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + A video URI may include wildcards in ``object-id``, and + thus identify multiple videos. Supported wildcards: '*' + to match 0 or more characters; '?' to match 1 character. + If unset, the input video should be embedded in the + request as ``input_content``. If set, ``input_content`` + should be unset. + + This corresponds to the ``input_uri`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (MutableSequence[google.cloud.videointelligence_v1beta2.types.Feature]): + Required. Requested video annotation + features. + + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.videointelligence_v1beta2.types.AnnotateVideoResponse` Video annotation response. Included in the response + field of the Operation returned by the GetOperation + call of the google::longrunning::Operations service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([input_uri, features]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a video_intelligence.AnnotateVideoRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, video_intelligence.AnnotateVideoRequest): + request = video_intelligence.AnnotateVideoRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if input_uri is not None: + request.input_uri = input_uri + if features is not None: + request.features = features + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.annotate_video] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + video_intelligence.AnnotateVideoResponse, + metadata_type=video_intelligence.AnnotateVideoProgress, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "VideoIntelligenceServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("VideoIntelligenceServiceClient",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/__init__.py new file mode 100644 index 000000000000..775ef9e3503a --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import VideoIntelligenceServiceTransport +from .grpc import VideoIntelligenceServiceGrpcTransport +from .grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport +from .rest import ( + VideoIntelligenceServiceRestInterceptor, + VideoIntelligenceServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[VideoIntelligenceServiceTransport]] +_transport_registry["grpc"] = VideoIntelligenceServiceGrpcTransport +_transport_registry["grpc_asyncio"] = VideoIntelligenceServiceGrpcAsyncIOTransport +_transport_registry["rest"] = VideoIntelligenceServiceRestTransport + +__all__ = ( + "VideoIntelligenceServiceTransport", + "VideoIntelligenceServiceGrpcTransport", + "VideoIntelligenceServiceGrpcAsyncIOTransport", + "VideoIntelligenceServiceRestTransport", + "VideoIntelligenceServiceRestInterceptor", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/base.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/base.py new file mode 100644 index 000000000000..2d57255cf68d --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/base.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1beta2 import gapic_version as package_version +from google.cloud.videointelligence_v1beta2.types import video_intelligence + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class VideoIntelligenceServiceTransport(abc.ABC): + """Abstract transport class for VideoIntelligenceService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "videointelligence.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.annotate_video: gapic_v1.method.wrap_method( + self.annotate_video, + default_retry=retries.Retry( + initial=1.0, + maximum=120.0, + multiplier=2.5, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def annotate_video( + self, + ) -> Callable[ + [video_intelligence.AnnotateVideoRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("VideoIntelligenceServiceTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/grpc.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/grpc.py new file mode 100644 index 000000000000..a7624cfadbee --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/grpc.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.videointelligence_v1beta2.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport + + +class VideoIntelligenceServiceGrpcTransport(VideoIntelligenceServiceTransport): + """gRPC backend transport for VideoIntelligenceService. + + Service that implements Google Cloud Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def annotate_video( + self, + ) -> Callable[[video_intelligence.AnnotateVideoRequest], operations_pb2.Operation]: + r"""Return a callable for the annotate video method over gRPC. + + Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + Returns: + Callable[[~.AnnotateVideoRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "annotate_video" not in self._stubs: + self._stubs["annotate_video"] = self.grpc_channel.unary_unary( + "/google.cloud.videointelligence.v1beta2.VideoIntelligenceService/AnnotateVideo", + request_serializer=video_intelligence.AnnotateVideoRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["annotate_video"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("VideoIntelligenceServiceGrpcTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/grpc_asyncio.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..4a531d4f47e5 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/grpc_asyncio.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.videointelligence_v1beta2.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .grpc import VideoIntelligenceServiceGrpcTransport + + +class VideoIntelligenceServiceGrpcAsyncIOTransport(VideoIntelligenceServiceTransport): + """gRPC AsyncIO backend transport for VideoIntelligenceService. + + Service that implements Google Cloud Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def annotate_video( + self, + ) -> Callable[ + [video_intelligence.AnnotateVideoRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the annotate video method over gRPC. + + Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + Returns: + Callable[[~.AnnotateVideoRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "annotate_video" not in self._stubs: + self._stubs["annotate_video"] = self.grpc_channel.unary_unary( + "/google.cloud.videointelligence.v1beta2.VideoIntelligenceService/AnnotateVideo", + request_serializer=video_intelligence.AnnotateVideoRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["annotate_video"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("VideoIntelligenceServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/rest.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/rest.py new file mode 100644 index 000000000000..69f04e81f1c8 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/services/video_intelligence_service/transports/rest.py @@ -0,0 +1,386 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.videointelligence_v1beta2.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import VideoIntelligenceServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class VideoIntelligenceServiceRestInterceptor: + """Interceptor for VideoIntelligenceService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the VideoIntelligenceServiceRestTransport. + + .. code-block:: python + class MyCustomVideoIntelligenceServiceInterceptor(VideoIntelligenceServiceRestInterceptor): + def pre_annotate_video(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_annotate_video(self, response): + logging.log(f"Received response: {response}") + return response + + transport = VideoIntelligenceServiceRestTransport(interceptor=MyCustomVideoIntelligenceServiceInterceptor()) + client = VideoIntelligenceServiceClient(transport=transport) + + + """ + + def pre_annotate_video( + self, + request: video_intelligence.AnnotateVideoRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[video_intelligence.AnnotateVideoRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for annotate_video + + Override in a subclass to manipulate the request or metadata + before they are sent to the VideoIntelligenceService server. + """ + return request, metadata + + def post_annotate_video( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for annotate_video + + Override in a subclass to manipulate the response + after it is returned by the VideoIntelligenceService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class VideoIntelligenceServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: VideoIntelligenceServiceRestInterceptor + + +class VideoIntelligenceServiceRestTransport(VideoIntelligenceServiceTransport): + """REST backend transport for VideoIntelligenceService. + + Service that implements Google Cloud Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[VideoIntelligenceServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or VideoIntelligenceServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1beta2/{name=projects/*/locations/*}/operations", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1beta2/operations/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1beta2/operations/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1beta2/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta2/operations/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1beta2", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _AnnotateVideo(VideoIntelligenceServiceRestStub): + def __hash__(self): + return hash("AnnotateVideo") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: video_intelligence.AnnotateVideoRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the annotate video method over HTTP. + + Args: + request (~.video_intelligence.AnnotateVideoRequest): + The request object. Video annotation request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/videos:annotate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_annotate_video(request, metadata) + pb_request = video_intelligence.AnnotateVideoRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_annotate_video(resp) + return resp + + @property + def annotate_video( + self, + ) -> Callable[[video_intelligence.AnnotateVideoRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnnotateVideo(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("VideoIntelligenceServiceRestTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/types/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/types/__init__.py new file mode 100644 index 000000000000..ffcc543dc030 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/types/__init__.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .video_intelligence import ( + AnnotateVideoProgress, + AnnotateVideoRequest, + AnnotateVideoResponse, + Entity, + ExplicitContentAnnotation, + ExplicitContentDetectionConfig, + ExplicitContentFrame, + FaceAnnotation, + FaceDetectionConfig, + FaceFrame, + FaceSegment, + Feature, + LabelAnnotation, + LabelDetectionConfig, + LabelDetectionMode, + LabelFrame, + LabelSegment, + Likelihood, + NormalizedBoundingBox, + ShotChangeDetectionConfig, + VideoAnnotationProgress, + VideoAnnotationResults, + VideoContext, + VideoSegment, +) + +__all__ = ( + "AnnotateVideoProgress", + "AnnotateVideoRequest", + "AnnotateVideoResponse", + "Entity", + "ExplicitContentAnnotation", + "ExplicitContentDetectionConfig", + "ExplicitContentFrame", + "FaceAnnotation", + "FaceDetectionConfig", + "FaceFrame", + "FaceSegment", + "LabelAnnotation", + "LabelDetectionConfig", + "LabelFrame", + "LabelSegment", + "NormalizedBoundingBox", + "ShotChangeDetectionConfig", + "VideoAnnotationProgress", + "VideoAnnotationResults", + "VideoContext", + "VideoSegment", + "Feature", + "LabelDetectionMode", + "Likelihood", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/types/video_intelligence.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/types/video_intelligence.py new file mode 100644 index 000000000000..4a8b8459ed27 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1beta2/types/video_intelligence.py @@ -0,0 +1,765 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.videointelligence.v1beta2", + manifest={ + "Feature", + "LabelDetectionMode", + "Likelihood", + "AnnotateVideoRequest", + "VideoContext", + "LabelDetectionConfig", + "ShotChangeDetectionConfig", + "ExplicitContentDetectionConfig", + "FaceDetectionConfig", + "VideoSegment", + "LabelSegment", + "LabelFrame", + "Entity", + "LabelAnnotation", + "ExplicitContentFrame", + "ExplicitContentAnnotation", + "NormalizedBoundingBox", + "FaceSegment", + "FaceFrame", + "FaceAnnotation", + "VideoAnnotationResults", + "AnnotateVideoResponse", + "VideoAnnotationProgress", + "AnnotateVideoProgress", + }, +) + + +class Feature(proto.Enum): + r"""Video annotation feature. + + Values: + FEATURE_UNSPECIFIED (0): + Unspecified. + LABEL_DETECTION (1): + Label detection. Detect objects, such as dog + or flower. + SHOT_CHANGE_DETECTION (2): + Shot change detection. + EXPLICIT_CONTENT_DETECTION (3): + Explicit content detection. + FACE_DETECTION (4): + Human face detection and tracking. + """ + FEATURE_UNSPECIFIED = 0 + LABEL_DETECTION = 1 + SHOT_CHANGE_DETECTION = 2 + EXPLICIT_CONTENT_DETECTION = 3 + FACE_DETECTION = 4 + + +class LabelDetectionMode(proto.Enum): + r"""Label detection mode. + + Values: + LABEL_DETECTION_MODE_UNSPECIFIED (0): + Unspecified. + SHOT_MODE (1): + Detect shot-level labels. + FRAME_MODE (2): + Detect frame-level labels. + SHOT_AND_FRAME_MODE (3): + Detect both shot-level and frame-level + labels. + """ + LABEL_DETECTION_MODE_UNSPECIFIED = 0 + SHOT_MODE = 1 + FRAME_MODE = 2 + SHOT_AND_FRAME_MODE = 3 + + +class Likelihood(proto.Enum): + r"""Bucketized representation of likelihood. + + Values: + LIKELIHOOD_UNSPECIFIED (0): + Unspecified likelihood. + VERY_UNLIKELY (1): + Very unlikely. + UNLIKELY (2): + Unlikely. + POSSIBLE (3): + Possible. + LIKELY (4): + Likely. + VERY_LIKELY (5): + Very likely. + """ + LIKELIHOOD_UNSPECIFIED = 0 + VERY_UNLIKELY = 1 + UNLIKELY = 2 + POSSIBLE = 3 + LIKELY = 4 + VERY_LIKELY = 5 + + +class AnnotateVideoRequest(proto.Message): + r"""Video annotation request. + + Attributes: + input_uri (str): + Input video location. Currently, only `Google Cloud + Storage `__ URIs are + supported, which must be specified in the following format: + ``gs://bucket-id/object-id`` (other URI formats return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + A video URI may include wildcards in ``object-id``, and thus + identify multiple videos. Supported wildcards: '*' to match + 0 or more characters; '?' to match 1 character. If unset, + the input video should be embedded in the request as + ``input_content``. If set, ``input_content`` should be + unset. + input_content (bytes): + The video data bytes. If unset, the input video(s) should be + specified via ``input_uri``. If set, ``input_uri`` should be + unset. + features (MutableSequence[google.cloud.videointelligence_v1beta2.types.Feature]): + Required. Requested video annotation + features. + video_context (google.cloud.videointelligence_v1beta2.types.VideoContext): + Additional video context and/or + feature-specific parameters. + output_uri (str): + Optional. Location where the output (in JSON format) should + be stored. Currently, only `Google Cloud + Storage `__ URIs are + supported, which must be specified in the following format: + ``gs://bucket-id/object-id`` (other URI formats return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + location_id (str): + Optional. Cloud region where annotation should take place. + Supported cloud regions: ``us-east1``, ``us-west1``, + ``europe-west1``, ``asia-east1``. If no region is specified, + a region will be determined based on video file location. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + input_content: bytes = proto.Field( + proto.BYTES, + number=6, + ) + features: MutableSequence["Feature"] = proto.RepeatedField( + proto.ENUM, + number=2, + enum="Feature", + ) + video_context: "VideoContext" = proto.Field( + proto.MESSAGE, + number=3, + message="VideoContext", + ) + output_uri: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class VideoContext(proto.Message): + r"""Video context and/or feature-specific parameters. + + Attributes: + segments (MutableSequence[google.cloud.videointelligence_v1beta2.types.VideoSegment]): + Video segments to annotate. The segments may + overlap and are not required to be contiguous or + span the whole video. If unspecified, each video + is treated as a single segment. + label_detection_config (google.cloud.videointelligence_v1beta2.types.LabelDetectionConfig): + Config for LABEL_DETECTION. + shot_change_detection_config (google.cloud.videointelligence_v1beta2.types.ShotChangeDetectionConfig): + Config for SHOT_CHANGE_DETECTION. + explicit_content_detection_config (google.cloud.videointelligence_v1beta2.types.ExplicitContentDetectionConfig): + Config for EXPLICIT_CONTENT_DETECTION. + face_detection_config (google.cloud.videointelligence_v1beta2.types.FaceDetectionConfig): + Config for FACE_DETECTION. + """ + + segments: MutableSequence["VideoSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + label_detection_config: "LabelDetectionConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="LabelDetectionConfig", + ) + shot_change_detection_config: "ShotChangeDetectionConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="ShotChangeDetectionConfig", + ) + explicit_content_detection_config: "ExplicitContentDetectionConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="ExplicitContentDetectionConfig", + ) + face_detection_config: "FaceDetectionConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="FaceDetectionConfig", + ) + + +class LabelDetectionConfig(proto.Message): + r"""Config for LABEL_DETECTION. + + Attributes: + label_detection_mode (google.cloud.videointelligence_v1beta2.types.LabelDetectionMode): + What labels should be detected with LABEL_DETECTION, in + addition to video-level labels or segment-level labels. If + unspecified, defaults to ``SHOT_MODE``. + stationary_camera (bool): + Whether the video has been shot from a stationary (i.e. + non-moving) camera. When set to true, might improve + detection accuracy for moving objects. Should be used with + ``SHOT_AND_FRAME_MODE`` enabled. + model (str): + Model to use for label detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + label_detection_mode: "LabelDetectionMode" = proto.Field( + proto.ENUM, + number=1, + enum="LabelDetectionMode", + ) + stationary_camera: bool = proto.Field( + proto.BOOL, + number=2, + ) + model: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ShotChangeDetectionConfig(proto.Message): + r"""Config for SHOT_CHANGE_DETECTION. + + Attributes: + model (str): + Model to use for shot change detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ExplicitContentDetectionConfig(proto.Message): + r"""Config for EXPLICIT_CONTENT_DETECTION. + + Attributes: + model (str): + Model to use for explicit content detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FaceDetectionConfig(proto.Message): + r"""Config for FACE_DETECTION. + + Attributes: + model (str): + Model to use for face detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + include_bounding_boxes (bool): + Whether bounding boxes be included in the + face annotation output. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + include_bounding_boxes: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class VideoSegment(proto.Message): + r"""Video segment. + + Attributes: + start_time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the start of the segment + (inclusive). + end_time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the end of the segment + (inclusive). + """ + + start_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + end_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class LabelSegment(proto.Message): + r"""Video segment level annotation results for label detection. + + Attributes: + segment (google.cloud.videointelligence_v1beta2.types.VideoSegment): + Video segment where a label was detected. + confidence (float): + Confidence that the label is accurate. Range: [0, 1]. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class LabelFrame(proto.Message): + r"""Video frame level annotation results for label detection. + + Attributes: + time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the video frame for this + location. + confidence (float): + Confidence that the label is accurate. Range: [0, 1]. + """ + + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class Entity(proto.Message): + r"""Detected entity from video analysis. + + Attributes: + entity_id (str): + Opaque entity ID. Some IDs may be available in `Google + Knowledge Graph Search + API `__. + description (str): + Textual description, e.g. ``Fixed-gear bicycle``. + language_code (str): + Language code for ``description`` in BCP-47 format. + """ + + entity_id: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + language_code: str = proto.Field( + proto.STRING, + number=3, + ) + + +class LabelAnnotation(proto.Message): + r"""Label annotation. + + Attributes: + entity (google.cloud.videointelligence_v1beta2.types.Entity): + Detected entity. + category_entities (MutableSequence[google.cloud.videointelligence_v1beta2.types.Entity]): + Common categories for the detected entity. E.g. when the + label is ``Terrier`` the category is likely ``dog``. And in + some cases there might be more than one categories e.g. + ``Terrier`` could also be a ``pet``. + segments (MutableSequence[google.cloud.videointelligence_v1beta2.types.LabelSegment]): + All video segments where a label was + detected. + frames (MutableSequence[google.cloud.videointelligence_v1beta2.types.LabelFrame]): + All video frames where a label was detected. + """ + + entity: "Entity" = proto.Field( + proto.MESSAGE, + number=1, + message="Entity", + ) + category_entities: MutableSequence["Entity"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Entity", + ) + segments: MutableSequence["LabelSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="LabelSegment", + ) + frames: MutableSequence["LabelFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="LabelFrame", + ) + + +class ExplicitContentFrame(proto.Message): + r"""Video frame level annotation results for explicit content. + + Attributes: + time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the video frame for this + location. + pornography_likelihood (google.cloud.videointelligence_v1beta2.types.Likelihood): + Likelihood of the pornography content.. + """ + + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + pornography_likelihood: "Likelihood" = proto.Field( + proto.ENUM, + number=2, + enum="Likelihood", + ) + + +class ExplicitContentAnnotation(proto.Message): + r"""Explicit content annotation (based on per-frame visual + signals only). If no explicit content has been detected in a + frame, no annotations are present for that frame. + + Attributes: + frames (MutableSequence[google.cloud.videointelligence_v1beta2.types.ExplicitContentFrame]): + All video frames where explicit content was + detected. + """ + + frames: MutableSequence["ExplicitContentFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ExplicitContentFrame", + ) + + +class NormalizedBoundingBox(proto.Message): + r"""Normalized bounding box. The normalized vertex coordinates are + relative to the original image. Range: [0, 1]. + + Attributes: + left (float): + Left X coordinate. + top (float): + Top Y coordinate. + right (float): + Right X coordinate. + bottom (float): + Bottom Y coordinate. + """ + + left: float = proto.Field( + proto.FLOAT, + number=1, + ) + top: float = proto.Field( + proto.FLOAT, + number=2, + ) + right: float = proto.Field( + proto.FLOAT, + number=3, + ) + bottom: float = proto.Field( + proto.FLOAT, + number=4, + ) + + +class FaceSegment(proto.Message): + r"""Video segment level annotation results for face detection. + + Attributes: + segment (google.cloud.videointelligence_v1beta2.types.VideoSegment): + Video segment where a face was detected. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + + +class FaceFrame(proto.Message): + r"""Video frame level annotation results for face detection. + + Attributes: + normalized_bounding_boxes (MutableSequence[google.cloud.videointelligence_v1beta2.types.NormalizedBoundingBox]): + Normalized Bounding boxes in a frame. + There can be more than one boxes if the same + face is detected in multiple locations within + the current frame. + time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the video frame for this + location. + """ + + normalized_bounding_boxes: MutableSequence[ + "NormalizedBoundingBox" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="NormalizedBoundingBox", + ) + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class FaceAnnotation(proto.Message): + r"""Face annotation. + + Attributes: + thumbnail (bytes): + Thumbnail of a representative face view (in + JPEG format). + segments (MutableSequence[google.cloud.videointelligence_v1beta2.types.FaceSegment]): + All video segments where a face was detected. + frames (MutableSequence[google.cloud.videointelligence_v1beta2.types.FaceFrame]): + All video frames where a face was detected. + """ + + thumbnail: bytes = proto.Field( + proto.BYTES, + number=1, + ) + segments: MutableSequence["FaceSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="FaceSegment", + ) + frames: MutableSequence["FaceFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="FaceFrame", + ) + + +class VideoAnnotationResults(proto.Message): + r"""Annotation results for a single video. + + Attributes: + input_uri (str): + Video file location in `Google Cloud + Storage `__. + segment_label_annotations (MutableSequence[google.cloud.videointelligence_v1beta2.types.LabelAnnotation]): + Label annotations on video level or user + specified segment level. There is exactly one + element for each unique label. + shot_label_annotations (MutableSequence[google.cloud.videointelligence_v1beta2.types.LabelAnnotation]): + Label annotations on shot level. + There is exactly one element for each unique + label. + frame_label_annotations (MutableSequence[google.cloud.videointelligence_v1beta2.types.LabelAnnotation]): + Label annotations on frame level. + There is exactly one element for each unique + label. + face_annotations (MutableSequence[google.cloud.videointelligence_v1beta2.types.FaceAnnotation]): + Face annotations. There is exactly one + element for each unique face. + shot_annotations (MutableSequence[google.cloud.videointelligence_v1beta2.types.VideoSegment]): + Shot annotations. Each shot is represented as + a video segment. + explicit_annotation (google.cloud.videointelligence_v1beta2.types.ExplicitContentAnnotation): + Explicit content annotation. + error (google.rpc.status_pb2.Status): + If set, indicates an error. Note that for a single + ``AnnotateVideoRequest`` some videos may succeed and some + may fail. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + segment_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="LabelAnnotation", + ) + shot_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="LabelAnnotation", + ) + frame_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="LabelAnnotation", + ) + face_annotations: MutableSequence["FaceAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="FaceAnnotation", + ) + shot_annotations: MutableSequence["VideoSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="VideoSegment", + ) + explicit_annotation: "ExplicitContentAnnotation" = proto.Field( + proto.MESSAGE, + number=7, + message="ExplicitContentAnnotation", + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=9, + message=status_pb2.Status, + ) + + +class AnnotateVideoResponse(proto.Message): + r"""Video annotation response. Included in the ``response`` field of the + ``Operation`` returned by the ``GetOperation`` call of the + ``google::longrunning::Operations`` service. + + Attributes: + annotation_results (MutableSequence[google.cloud.videointelligence_v1beta2.types.VideoAnnotationResults]): + Annotation results for all videos specified in + ``AnnotateVideoRequest``. + """ + + annotation_results: MutableSequence["VideoAnnotationResults"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoAnnotationResults", + ) + + +class VideoAnnotationProgress(proto.Message): + r"""Annotation progress for a single video. + + Attributes: + input_uri (str): + Video file location in `Google Cloud + Storage `__. + progress_percent (int): + Approximate percentage processed thus far. + Guaranteed to be 100 when fully processed. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the request was received. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Time of the most recent update. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + progress_percent: int = proto.Field( + proto.INT32, + number=2, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class AnnotateVideoProgress(proto.Message): + r"""Video annotation progress. Included in the ``metadata`` field of the + ``Operation`` returned by the ``GetOperation`` call of the + ``google::longrunning::Operations`` service. + + Attributes: + annotation_progress (MutableSequence[google.cloud.videointelligence_v1beta2.types.VideoAnnotationProgress]): + Progress metadata for all videos specified in + ``AnnotateVideoRequest``. + """ + + annotation_progress: MutableSequence[ + "VideoAnnotationProgress" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoAnnotationProgress", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/__init__.py new file mode 100644 index 000000000000..b9b39a4a0519 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/__init__.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.videointelligence_v1p1beta1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.video_intelligence_service import ( + VideoIntelligenceServiceAsyncClient, + VideoIntelligenceServiceClient, +) +from .types.video_intelligence import ( + AnnotateVideoProgress, + AnnotateVideoRequest, + AnnotateVideoResponse, + Entity, + ExplicitContentAnnotation, + ExplicitContentDetectionConfig, + ExplicitContentFrame, + Feature, + LabelAnnotation, + LabelDetectionConfig, + LabelDetectionMode, + LabelFrame, + LabelSegment, + Likelihood, + ShotChangeDetectionConfig, + SpeechContext, + SpeechRecognitionAlternative, + SpeechTranscription, + SpeechTranscriptionConfig, + VideoAnnotationProgress, + VideoAnnotationResults, + VideoContext, + VideoSegment, + WordInfo, +) + +__all__ = ( + "VideoIntelligenceServiceAsyncClient", + "AnnotateVideoProgress", + "AnnotateVideoRequest", + "AnnotateVideoResponse", + "Entity", + "ExplicitContentAnnotation", + "ExplicitContentDetectionConfig", + "ExplicitContentFrame", + "Feature", + "LabelAnnotation", + "LabelDetectionConfig", + "LabelDetectionMode", + "LabelFrame", + "LabelSegment", + "Likelihood", + "ShotChangeDetectionConfig", + "SpeechContext", + "SpeechRecognitionAlternative", + "SpeechTranscription", + "SpeechTranscriptionConfig", + "VideoAnnotationProgress", + "VideoAnnotationResults", + "VideoContext", + "VideoIntelligenceServiceClient", + "VideoSegment", + "WordInfo", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/gapic_metadata.json b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/gapic_metadata.json new file mode 100644 index 000000000000..a1d72074a6ae --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/gapic_metadata.json @@ -0,0 +1,43 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.videointelligence_v1p1beta1", + "protoPackage": "google.cloud.videointelligence.v1p1beta1", + "schema": "1.0", + "services": { + "VideoIntelligenceService": { + "clients": { + "grpc": { + "libraryClient": "VideoIntelligenceServiceClient", + "rpcs": { + "AnnotateVideo": { + "methods": [ + "annotate_video" + ] + } + } + }, + "grpc-async": { + "libraryClient": "VideoIntelligenceServiceAsyncClient", + "rpcs": { + "AnnotateVideo": { + "methods": [ + "annotate_video" + ] + } + } + }, + "rest": { + "libraryClient": "VideoIntelligenceServiceClient", + "rpcs": { + "AnnotateVideo": { + "methods": [ + "annotate_video" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/gapic_version.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/gapic_version.py new file mode 100644 index 000000000000..84f00fd3f92c --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.11.4" # {x-release-please-version} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/py.typed b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/py.typed new file mode 100644 index 000000000000..e7fb166bf3e5 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-videointelligence package uses inline types. diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/__init__.py new file mode 100644 index 000000000000..9bfaa3bf2e62 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import VideoIntelligenceServiceAsyncClient +from .client import VideoIntelligenceServiceClient + +__all__ = ( + "VideoIntelligenceServiceClient", + "VideoIntelligenceServiceAsyncClient", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/async_client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/async_client.py new file mode 100644 index 000000000000..00ca18af9f80 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/async_client.py @@ -0,0 +1,380 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1p1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore + +from google.cloud.videointelligence_v1p1beta1.types import video_intelligence + +from .client import VideoIntelligenceServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .transports.grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport + + +class VideoIntelligenceServiceAsyncClient: + """Service that implements Google Cloud Video Intelligence API.""" + + _client: VideoIntelligenceServiceClient + + DEFAULT_ENDPOINT = VideoIntelligenceServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = VideoIntelligenceServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + VideoIntelligenceServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(VideoIntelligenceServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + VideoIntelligenceServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + VideoIntelligenceServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + VideoIntelligenceServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceAsyncClient: The constructed client. + """ + return VideoIntelligenceServiceClient.from_service_account_info.__func__(VideoIntelligenceServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceAsyncClient: The constructed client. + """ + return VideoIntelligenceServiceClient.from_service_account_file.__func__(VideoIntelligenceServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return VideoIntelligenceServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> VideoIntelligenceServiceTransport: + """Returns the transport used by the client instance. + + Returns: + VideoIntelligenceServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(VideoIntelligenceServiceClient).get_transport_class, + type(VideoIntelligenceServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, VideoIntelligenceServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the video intelligence service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.VideoIntelligenceServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = VideoIntelligenceServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def annotate_video( + self, + request: Optional[Union[video_intelligence.AnnotateVideoRequest, dict]] = None, + *, + input_uri: Optional[str] = None, + features: Optional[MutableSequence[video_intelligence.Feature]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import videointelligence_v1p1beta1 + + async def sample_annotate_video(): + # Create a client + client = videointelligence_v1p1beta1.VideoIntelligenceServiceAsyncClient() + + # Initialize request argument(s) + request = videointelligence_v1p1beta1.AnnotateVideoRequest( + features=['SPEECH_TRANSCRIPTION'], + ) + + # Make the request + operation = client.annotate_video(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.videointelligence_v1p1beta1.types.AnnotateVideoRequest, dict]]): + The request object. Video annotation request. + input_uri (:class:`str`): + Input video location. Currently, only `Google Cloud + Storage `__ URIs are + supported, which must be specified in the following + format: ``gs://bucket-id/object-id`` (other URI formats + return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + A video URI may include wildcards in ``object-id``, and + thus identify multiple videos. Supported wildcards: '*' + to match 0 or more characters; '?' to match 1 character. + If unset, the input video should be embedded in the + request as ``input_content``. If set, ``input_content`` + should be unset. + + This corresponds to the ``input_uri`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (:class:`MutableSequence[google.cloud.videointelligence_v1p1beta1.types.Feature]`): + Required. Requested video annotation + features. + + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.videointelligence_v1p1beta1.types.AnnotateVideoResponse` Video annotation response. Included in the response + field of the Operation returned by the GetOperation + call of the google::longrunning::Operations service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([input_uri, features]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = video_intelligence.AnnotateVideoRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if input_uri is not None: + request.input_uri = input_uri + if features: + request.features.extend(features) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.annotate_video, + default_retry=retries.Retry( + initial=1.0, + maximum=120.0, + multiplier=2.5, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + video_intelligence.AnnotateVideoResponse, + metadata_type=video_intelligence.AnnotateVideoProgress, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "VideoIntelligenceServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("VideoIntelligenceServiceAsyncClient",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/client.py new file mode 100644 index 000000000000..cac0f1465452 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/client.py @@ -0,0 +1,581 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1p1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore + +from google.cloud.videointelligence_v1p1beta1.types import video_intelligence + +from .transports.base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .transports.grpc import VideoIntelligenceServiceGrpcTransport +from .transports.grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport +from .transports.rest import VideoIntelligenceServiceRestTransport + + +class VideoIntelligenceServiceClientMeta(type): + """Metaclass for the VideoIntelligenceService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[VideoIntelligenceServiceTransport]] + _transport_registry["grpc"] = VideoIntelligenceServiceGrpcTransport + _transport_registry["grpc_asyncio"] = VideoIntelligenceServiceGrpcAsyncIOTransport + _transport_registry["rest"] = VideoIntelligenceServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[VideoIntelligenceServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class VideoIntelligenceServiceClient(metaclass=VideoIntelligenceServiceClientMeta): + """Service that implements Google Cloud Video Intelligence API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "videointelligence.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> VideoIntelligenceServiceTransport: + """Returns the transport used by the client instance. + + Returns: + VideoIntelligenceServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, VideoIntelligenceServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the video intelligence service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, VideoIntelligenceServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, VideoIntelligenceServiceTransport): + # transport is a VideoIntelligenceServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def annotate_video( + self, + request: Optional[Union[video_intelligence.AnnotateVideoRequest, dict]] = None, + *, + input_uri: Optional[str] = None, + features: Optional[MutableSequence[video_intelligence.Feature]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import videointelligence_v1p1beta1 + + def sample_annotate_video(): + # Create a client + client = videointelligence_v1p1beta1.VideoIntelligenceServiceClient() + + # Initialize request argument(s) + request = videointelligence_v1p1beta1.AnnotateVideoRequest( + features=['SPEECH_TRANSCRIPTION'], + ) + + # Make the request + operation = client.annotate_video(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.videointelligence_v1p1beta1.types.AnnotateVideoRequest, dict]): + The request object. Video annotation request. + input_uri (str): + Input video location. Currently, only `Google Cloud + Storage `__ URIs are + supported, which must be specified in the following + format: ``gs://bucket-id/object-id`` (other URI formats + return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + A video URI may include wildcards in ``object-id``, and + thus identify multiple videos. Supported wildcards: '*' + to match 0 or more characters; '?' to match 1 character. + If unset, the input video should be embedded in the + request as ``input_content``. If set, ``input_content`` + should be unset. + + This corresponds to the ``input_uri`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.Feature]): + Required. Requested video annotation + features. + + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.videointelligence_v1p1beta1.types.AnnotateVideoResponse` Video annotation response. Included in the response + field of the Operation returned by the GetOperation + call of the google::longrunning::Operations service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([input_uri, features]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a video_intelligence.AnnotateVideoRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, video_intelligence.AnnotateVideoRequest): + request = video_intelligence.AnnotateVideoRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if input_uri is not None: + request.input_uri = input_uri + if features is not None: + request.features = features + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.annotate_video] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + video_intelligence.AnnotateVideoResponse, + metadata_type=video_intelligence.AnnotateVideoProgress, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "VideoIntelligenceServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("VideoIntelligenceServiceClient",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/__init__.py new file mode 100644 index 000000000000..775ef9e3503a --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import VideoIntelligenceServiceTransport +from .grpc import VideoIntelligenceServiceGrpcTransport +from .grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport +from .rest import ( + VideoIntelligenceServiceRestInterceptor, + VideoIntelligenceServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[VideoIntelligenceServiceTransport]] +_transport_registry["grpc"] = VideoIntelligenceServiceGrpcTransport +_transport_registry["grpc_asyncio"] = VideoIntelligenceServiceGrpcAsyncIOTransport +_transport_registry["rest"] = VideoIntelligenceServiceRestTransport + +__all__ = ( + "VideoIntelligenceServiceTransport", + "VideoIntelligenceServiceGrpcTransport", + "VideoIntelligenceServiceGrpcAsyncIOTransport", + "VideoIntelligenceServiceRestTransport", + "VideoIntelligenceServiceRestInterceptor", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/base.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/base.py new file mode 100644 index 000000000000..46457b41f755 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/base.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1p1beta1 import gapic_version as package_version +from google.cloud.videointelligence_v1p1beta1.types import video_intelligence + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class VideoIntelligenceServiceTransport(abc.ABC): + """Abstract transport class for VideoIntelligenceService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "videointelligence.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.annotate_video: gapic_v1.method.wrap_method( + self.annotate_video, + default_retry=retries.Retry( + initial=1.0, + maximum=120.0, + multiplier=2.5, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def annotate_video( + self, + ) -> Callable[ + [video_intelligence.AnnotateVideoRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("VideoIntelligenceServiceTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/grpc.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/grpc.py new file mode 100644 index 000000000000..8d2586005d15 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/grpc.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.videointelligence_v1p1beta1.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport + + +class VideoIntelligenceServiceGrpcTransport(VideoIntelligenceServiceTransport): + """gRPC backend transport for VideoIntelligenceService. + + Service that implements Google Cloud Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def annotate_video( + self, + ) -> Callable[[video_intelligence.AnnotateVideoRequest], operations_pb2.Operation]: + r"""Return a callable for the annotate video method over gRPC. + + Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + Returns: + Callable[[~.AnnotateVideoRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "annotate_video" not in self._stubs: + self._stubs["annotate_video"] = self.grpc_channel.unary_unary( + "/google.cloud.videointelligence.v1p1beta1.VideoIntelligenceService/AnnotateVideo", + request_serializer=video_intelligence.AnnotateVideoRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["annotate_video"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("VideoIntelligenceServiceGrpcTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/grpc_asyncio.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..c8f5c14ddb77 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/grpc_asyncio.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.videointelligence_v1p1beta1.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .grpc import VideoIntelligenceServiceGrpcTransport + + +class VideoIntelligenceServiceGrpcAsyncIOTransport(VideoIntelligenceServiceTransport): + """gRPC AsyncIO backend transport for VideoIntelligenceService. + + Service that implements Google Cloud Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def annotate_video( + self, + ) -> Callable[ + [video_intelligence.AnnotateVideoRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the annotate video method over gRPC. + + Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + Returns: + Callable[[~.AnnotateVideoRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "annotate_video" not in self._stubs: + self._stubs["annotate_video"] = self.grpc_channel.unary_unary( + "/google.cloud.videointelligence.v1p1beta1.VideoIntelligenceService/AnnotateVideo", + request_serializer=video_intelligence.AnnotateVideoRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["annotate_video"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("VideoIntelligenceServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/rest.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/rest.py new file mode 100644 index 000000000000..0b0eedfdf4f9 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/rest.py @@ -0,0 +1,386 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.videointelligence_v1p1beta1.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import VideoIntelligenceServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class VideoIntelligenceServiceRestInterceptor: + """Interceptor for VideoIntelligenceService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the VideoIntelligenceServiceRestTransport. + + .. code-block:: python + class MyCustomVideoIntelligenceServiceInterceptor(VideoIntelligenceServiceRestInterceptor): + def pre_annotate_video(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_annotate_video(self, response): + logging.log(f"Received response: {response}") + return response + + transport = VideoIntelligenceServiceRestTransport(interceptor=MyCustomVideoIntelligenceServiceInterceptor()) + client = VideoIntelligenceServiceClient(transport=transport) + + + """ + + def pre_annotate_video( + self, + request: video_intelligence.AnnotateVideoRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[video_intelligence.AnnotateVideoRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for annotate_video + + Override in a subclass to manipulate the request or metadata + before they are sent to the VideoIntelligenceService server. + """ + return request, metadata + + def post_annotate_video( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for annotate_video + + Override in a subclass to manipulate the response + after it is returned by the VideoIntelligenceService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class VideoIntelligenceServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: VideoIntelligenceServiceRestInterceptor + + +class VideoIntelligenceServiceRestTransport(VideoIntelligenceServiceTransport): + """REST backend transport for VideoIntelligenceService. + + Service that implements Google Cloud Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[VideoIntelligenceServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or VideoIntelligenceServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1p1beta1/{name=projects/*/locations/*}/operations", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1p1beta1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1p1beta1/operations/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1p1beta1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1p1beta1/operations/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1p1beta1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1p1beta1/operations/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1p1beta1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _AnnotateVideo(VideoIntelligenceServiceRestStub): + def __hash__(self): + return hash("AnnotateVideo") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: video_intelligence.AnnotateVideoRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the annotate video method over HTTP. + + Args: + request (~.video_intelligence.AnnotateVideoRequest): + The request object. Video annotation request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1p1beta1/videos:annotate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_annotate_video(request, metadata) + pb_request = video_intelligence.AnnotateVideoRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_annotate_video(resp) + return resp + + @property + def annotate_video( + self, + ) -> Callable[[video_intelligence.AnnotateVideoRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnnotateVideo(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("VideoIntelligenceServiceRestTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/types/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/types/__init__.py new file mode 100644 index 000000000000..7245522c2a99 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/types/__init__.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .video_intelligence import ( + AnnotateVideoProgress, + AnnotateVideoRequest, + AnnotateVideoResponse, + Entity, + ExplicitContentAnnotation, + ExplicitContentDetectionConfig, + ExplicitContentFrame, + Feature, + LabelAnnotation, + LabelDetectionConfig, + LabelDetectionMode, + LabelFrame, + LabelSegment, + Likelihood, + ShotChangeDetectionConfig, + SpeechContext, + SpeechRecognitionAlternative, + SpeechTranscription, + SpeechTranscriptionConfig, + VideoAnnotationProgress, + VideoAnnotationResults, + VideoContext, + VideoSegment, + WordInfo, +) + +__all__ = ( + "AnnotateVideoProgress", + "AnnotateVideoRequest", + "AnnotateVideoResponse", + "Entity", + "ExplicitContentAnnotation", + "ExplicitContentDetectionConfig", + "ExplicitContentFrame", + "LabelAnnotation", + "LabelDetectionConfig", + "LabelFrame", + "LabelSegment", + "ShotChangeDetectionConfig", + "SpeechContext", + "SpeechRecognitionAlternative", + "SpeechTranscription", + "SpeechTranscriptionConfig", + "VideoAnnotationProgress", + "VideoAnnotationResults", + "VideoContext", + "VideoSegment", + "WordInfo", + "Feature", + "LabelDetectionMode", + "Likelihood", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/types/video_intelligence.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/types/video_intelligence.py new file mode 100644 index 000000000000..19d82596ef16 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p1beta1/types/video_intelligence.py @@ -0,0 +1,824 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.videointelligence.v1p1beta1", + manifest={ + "Feature", + "LabelDetectionMode", + "Likelihood", + "AnnotateVideoRequest", + "VideoContext", + "LabelDetectionConfig", + "ShotChangeDetectionConfig", + "ExplicitContentDetectionConfig", + "VideoSegment", + "LabelSegment", + "LabelFrame", + "Entity", + "LabelAnnotation", + "ExplicitContentFrame", + "ExplicitContentAnnotation", + "VideoAnnotationResults", + "AnnotateVideoResponse", + "VideoAnnotationProgress", + "AnnotateVideoProgress", + "SpeechTranscriptionConfig", + "SpeechContext", + "SpeechTranscription", + "SpeechRecognitionAlternative", + "WordInfo", + }, +) + + +class Feature(proto.Enum): + r"""Video annotation feature. + + Values: + FEATURE_UNSPECIFIED (0): + Unspecified. + LABEL_DETECTION (1): + Label detection. Detect objects, such as dog + or flower. + SHOT_CHANGE_DETECTION (2): + Shot change detection. + EXPLICIT_CONTENT_DETECTION (3): + Explicit content detection. + SPEECH_TRANSCRIPTION (6): + Speech transcription. + """ + FEATURE_UNSPECIFIED = 0 + LABEL_DETECTION = 1 + SHOT_CHANGE_DETECTION = 2 + EXPLICIT_CONTENT_DETECTION = 3 + SPEECH_TRANSCRIPTION = 6 + + +class LabelDetectionMode(proto.Enum): + r"""Label detection mode. + + Values: + LABEL_DETECTION_MODE_UNSPECIFIED (0): + Unspecified. + SHOT_MODE (1): + Detect shot-level labels. + FRAME_MODE (2): + Detect frame-level labels. + SHOT_AND_FRAME_MODE (3): + Detect both shot-level and frame-level + labels. + """ + LABEL_DETECTION_MODE_UNSPECIFIED = 0 + SHOT_MODE = 1 + FRAME_MODE = 2 + SHOT_AND_FRAME_MODE = 3 + + +class Likelihood(proto.Enum): + r"""Bucketized representation of likelihood. + + Values: + LIKELIHOOD_UNSPECIFIED (0): + Unspecified likelihood. + VERY_UNLIKELY (1): + Very unlikely. + UNLIKELY (2): + Unlikely. + POSSIBLE (3): + Possible. + LIKELY (4): + Likely. + VERY_LIKELY (5): + Very likely. + """ + LIKELIHOOD_UNSPECIFIED = 0 + VERY_UNLIKELY = 1 + UNLIKELY = 2 + POSSIBLE = 3 + LIKELY = 4 + VERY_LIKELY = 5 + + +class AnnotateVideoRequest(proto.Message): + r"""Video annotation request. + + Attributes: + input_uri (str): + Input video location. Currently, only `Google Cloud + Storage `__ URIs are + supported, which must be specified in the following format: + ``gs://bucket-id/object-id`` (other URI formats return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + A video URI may include wildcards in ``object-id``, and thus + identify multiple videos. Supported wildcards: '*' to match + 0 or more characters; '?' to match 1 character. If unset, + the input video should be embedded in the request as + ``input_content``. If set, ``input_content`` should be + unset. + input_content (bytes): + The video data bytes. If unset, the input video(s) should be + specified via ``input_uri``. If set, ``input_uri`` should be + unset. + features (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.Feature]): + Required. Requested video annotation + features. + video_context (google.cloud.videointelligence_v1p1beta1.types.VideoContext): + Additional video context and/or + feature-specific parameters. + output_uri (str): + Optional. Location where the output (in JSON format) should + be stored. Currently, only `Google Cloud + Storage `__ URIs are + supported, which must be specified in the following format: + ``gs://bucket-id/object-id`` (other URI formats return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + location_id (str): + Optional. Cloud region where annotation should take place. + Supported cloud regions: ``us-east1``, ``us-west1``, + ``europe-west1``, ``asia-east1``. If no region is specified, + a region will be determined based on video file location. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + input_content: bytes = proto.Field( + proto.BYTES, + number=6, + ) + features: MutableSequence["Feature"] = proto.RepeatedField( + proto.ENUM, + number=2, + enum="Feature", + ) + video_context: "VideoContext" = proto.Field( + proto.MESSAGE, + number=3, + message="VideoContext", + ) + output_uri: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class VideoContext(proto.Message): + r"""Video context and/or feature-specific parameters. + + Attributes: + segments (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.VideoSegment]): + Video segments to annotate. The segments may + overlap and are not required to be contiguous or + span the whole video. If unspecified, each video + is treated as a single segment. + label_detection_config (google.cloud.videointelligence_v1p1beta1.types.LabelDetectionConfig): + Config for LABEL_DETECTION. + shot_change_detection_config (google.cloud.videointelligence_v1p1beta1.types.ShotChangeDetectionConfig): + Config for SHOT_CHANGE_DETECTION. + explicit_content_detection_config (google.cloud.videointelligence_v1p1beta1.types.ExplicitContentDetectionConfig): + Config for EXPLICIT_CONTENT_DETECTION. + speech_transcription_config (google.cloud.videointelligence_v1p1beta1.types.SpeechTranscriptionConfig): + Config for SPEECH_TRANSCRIPTION. + """ + + segments: MutableSequence["VideoSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + label_detection_config: "LabelDetectionConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="LabelDetectionConfig", + ) + shot_change_detection_config: "ShotChangeDetectionConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="ShotChangeDetectionConfig", + ) + explicit_content_detection_config: "ExplicitContentDetectionConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="ExplicitContentDetectionConfig", + ) + speech_transcription_config: "SpeechTranscriptionConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="SpeechTranscriptionConfig", + ) + + +class LabelDetectionConfig(proto.Message): + r"""Config for LABEL_DETECTION. + + Attributes: + label_detection_mode (google.cloud.videointelligence_v1p1beta1.types.LabelDetectionMode): + What labels should be detected with LABEL_DETECTION, in + addition to video-level labels or segment-level labels. If + unspecified, defaults to ``SHOT_MODE``. + stationary_camera (bool): + Whether the video has been shot from a stationary (i.e. + non-moving) camera. When set to true, might improve + detection accuracy for moving objects. Should be used with + ``SHOT_AND_FRAME_MODE`` enabled. + model (str): + Model to use for label detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + label_detection_mode: "LabelDetectionMode" = proto.Field( + proto.ENUM, + number=1, + enum="LabelDetectionMode", + ) + stationary_camera: bool = proto.Field( + proto.BOOL, + number=2, + ) + model: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ShotChangeDetectionConfig(proto.Message): + r"""Config for SHOT_CHANGE_DETECTION. + + Attributes: + model (str): + Model to use for shot change detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ExplicitContentDetectionConfig(proto.Message): + r"""Config for EXPLICIT_CONTENT_DETECTION. + + Attributes: + model (str): + Model to use for explicit content detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + + +class VideoSegment(proto.Message): + r"""Video segment. + + Attributes: + start_time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the start of the segment + (inclusive). + end_time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the end of the segment + (inclusive). + """ + + start_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + end_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class LabelSegment(proto.Message): + r"""Video segment level annotation results for label detection. + + Attributes: + segment (google.cloud.videointelligence_v1p1beta1.types.VideoSegment): + Video segment where a label was detected. + confidence (float): + Confidence that the label is accurate. Range: [0, 1]. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class LabelFrame(proto.Message): + r"""Video frame level annotation results for label detection. + + Attributes: + time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the video frame for this + location. + confidence (float): + Confidence that the label is accurate. Range: [0, 1]. + """ + + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class Entity(proto.Message): + r"""Detected entity from video analysis. + + Attributes: + entity_id (str): + Opaque entity ID. Some IDs may be available in `Google + Knowledge Graph Search + API `__. + description (str): + Textual description, e.g. ``Fixed-gear bicycle``. + language_code (str): + Language code for ``description`` in BCP-47 format. + """ + + entity_id: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + language_code: str = proto.Field( + proto.STRING, + number=3, + ) + + +class LabelAnnotation(proto.Message): + r"""Label annotation. + + Attributes: + entity (google.cloud.videointelligence_v1p1beta1.types.Entity): + Detected entity. + category_entities (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.Entity]): + Common categories for the detected entity. E.g. when the + label is ``Terrier`` the category is likely ``dog``. And in + some cases there might be more than one categories e.g. + ``Terrier`` could also be a ``pet``. + segments (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.LabelSegment]): + All video segments where a label was + detected. + frames (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.LabelFrame]): + All video frames where a label was detected. + """ + + entity: "Entity" = proto.Field( + proto.MESSAGE, + number=1, + message="Entity", + ) + category_entities: MutableSequence["Entity"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Entity", + ) + segments: MutableSequence["LabelSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="LabelSegment", + ) + frames: MutableSequence["LabelFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="LabelFrame", + ) + + +class ExplicitContentFrame(proto.Message): + r"""Video frame level annotation results for explicit content. + + Attributes: + time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the video frame for this + location. + pornography_likelihood (google.cloud.videointelligence_v1p1beta1.types.Likelihood): + Likelihood of the pornography content.. + """ + + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + pornography_likelihood: "Likelihood" = proto.Field( + proto.ENUM, + number=2, + enum="Likelihood", + ) + + +class ExplicitContentAnnotation(proto.Message): + r"""Explicit content annotation (based on per-frame visual + signals only). If no explicit content has been detected in a + frame, no annotations are present for that frame. + + Attributes: + frames (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.ExplicitContentFrame]): + All video frames where explicit content was + detected. + """ + + frames: MutableSequence["ExplicitContentFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ExplicitContentFrame", + ) + + +class VideoAnnotationResults(proto.Message): + r"""Annotation results for a single video. + + Attributes: + input_uri (str): + Output only. Video file location in `Google Cloud + Storage `__. + segment_label_annotations (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.LabelAnnotation]): + Label annotations on video level or user + specified segment level. There is exactly one + element for each unique label. + shot_label_annotations (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.LabelAnnotation]): + Label annotations on shot level. + There is exactly one element for each unique + label. + frame_label_annotations (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.LabelAnnotation]): + Label annotations on frame level. + There is exactly one element for each unique + label. + shot_annotations (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.VideoSegment]): + Shot annotations. Each shot is represented as + a video segment. + explicit_annotation (google.cloud.videointelligence_v1p1beta1.types.ExplicitContentAnnotation): + Explicit content annotation. + speech_transcriptions (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.SpeechTranscription]): + Speech transcription. + error (google.rpc.status_pb2.Status): + Output only. If set, indicates an error. Note that for a + single ``AnnotateVideoRequest`` some videos may succeed and + some may fail. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + segment_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="LabelAnnotation", + ) + shot_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="LabelAnnotation", + ) + frame_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="LabelAnnotation", + ) + shot_annotations: MutableSequence["VideoSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="VideoSegment", + ) + explicit_annotation: "ExplicitContentAnnotation" = proto.Field( + proto.MESSAGE, + number=7, + message="ExplicitContentAnnotation", + ) + speech_transcriptions: MutableSequence["SpeechTranscription"] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="SpeechTranscription", + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=9, + message=status_pb2.Status, + ) + + +class AnnotateVideoResponse(proto.Message): + r"""Video annotation response. Included in the ``response`` field of the + ``Operation`` returned by the ``GetOperation`` call of the + ``google::longrunning::Operations`` service. + + Attributes: + annotation_results (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.VideoAnnotationResults]): + Annotation results for all videos specified in + ``AnnotateVideoRequest``. + """ + + annotation_results: MutableSequence["VideoAnnotationResults"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoAnnotationResults", + ) + + +class VideoAnnotationProgress(proto.Message): + r"""Annotation progress for a single video. + + Attributes: + input_uri (str): + Output only. Video file location in `Google Cloud + Storage `__. + progress_percent (int): + Output only. Approximate percentage processed + thus far. Guaranteed to be 100 when fully + processed. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the request was + received. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time of the most recent update. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + progress_percent: int = proto.Field( + proto.INT32, + number=2, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class AnnotateVideoProgress(proto.Message): + r"""Video annotation progress. Included in the ``metadata`` field of the + ``Operation`` returned by the ``GetOperation`` call of the + ``google::longrunning::Operations`` service. + + Attributes: + annotation_progress (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.VideoAnnotationProgress]): + Progress metadata for all videos specified in + ``AnnotateVideoRequest``. + """ + + annotation_progress: MutableSequence[ + "VideoAnnotationProgress" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoAnnotationProgress", + ) + + +class SpeechTranscriptionConfig(proto.Message): + r"""Config for SPEECH_TRANSCRIPTION. + + Attributes: + language_code (str): + Required. *Required* The language of the supplied audio as a + `BCP-47 `__ + language tag. Example: "en-US". See `Language + Support `__ + for a list of the currently supported language codes. + max_alternatives (int): + Optional. Maximum number of recognition hypotheses to be + returned. Specifically, the maximum number of + ``SpeechRecognitionAlternative`` messages within each + ``SpeechTranscription``. The server may return fewer than + ``max_alternatives``. Valid values are ``0``-``30``. A value + of ``0`` or ``1`` will return a maximum of one. If omitted, + will return a maximum of one. + filter_profanity (bool): + Optional. If set to ``true``, the server will attempt to + filter out profanities, replacing all but the initial + character in each filtered word with asterisks, e.g. "f***". + If set to ``false`` or omitted, profanities won't be + filtered out. + speech_contexts (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.SpeechContext]): + Optional. A means to provide context to + assist the speech recognition. + enable_automatic_punctuation (bool): + Optional. If 'true', adds punctuation to + recognition result hypotheses. This feature is + only available in select languages. Setting this + for requests in other languages has no effect at + all. The default 'false' value does not add + punctuation to result hypotheses. NOTE: "This is + currently offered as an experimental service, + complimentary to all users. In the future this + may be exclusively available as a premium + feature.". + audio_tracks (MutableSequence[int]): + Optional. For file formats, such as MXF or + MKV, supporting multiple audio tracks, specify + up to two tracks. Default: track 0. + """ + + language_code: str = proto.Field( + proto.STRING, + number=1, + ) + max_alternatives: int = proto.Field( + proto.INT32, + number=2, + ) + filter_profanity: bool = proto.Field( + proto.BOOL, + number=3, + ) + speech_contexts: MutableSequence["SpeechContext"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="SpeechContext", + ) + enable_automatic_punctuation: bool = proto.Field( + proto.BOOL, + number=5, + ) + audio_tracks: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=6, + ) + + +class SpeechContext(proto.Message): + r"""Provides "hints" to the speech recognizer to favor specific + words and phrases in the results. + + Attributes: + phrases (MutableSequence[str]): + Optional. A list of strings containing words and phrases + "hints" so that the speech recognition is more likely to + recognize them. This can be used to improve the accuracy for + specific words and phrases, for example, if specific + commands are typically spoken by the user. This can also be + used to add additional words to the vocabulary of the + recognizer. See `usage + limits `__. + """ + + phrases: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class SpeechTranscription(proto.Message): + r"""A speech recognition result corresponding to a portion of the + audio. + + Attributes: + alternatives (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.SpeechRecognitionAlternative]): + May contain one or more recognition hypotheses (up to the + maximum specified in ``max_alternatives``). These + alternatives are ordered in terms of accuracy, with the top + (first) alternative being the most probable, as ranked by + the recognizer. + """ + + alternatives: MutableSequence["SpeechRecognitionAlternative"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SpeechRecognitionAlternative", + ) + + +class SpeechRecognitionAlternative(proto.Message): + r"""Alternative hypotheses (a.k.a. n-best list). + + Attributes: + transcript (str): + Output only. Transcript text representing the + words that the user spoke. + confidence (float): + Output only. The confidence estimate between 0.0 and 1.0. A + higher number indicates an estimated greater likelihood that + the recognized words are correct. This field is set only for + the top alternative. This field is not guaranteed to be + accurate and users should not rely on it to be always + provided. The default of 0.0 is a sentinel value indicating + ``confidence`` was not set. + words (MutableSequence[google.cloud.videointelligence_v1p1beta1.types.WordInfo]): + Output only. A list of word-specific + information for each recognized word. + """ + + transcript: str = proto.Field( + proto.STRING, + number=1, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + words: MutableSequence["WordInfo"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="WordInfo", + ) + + +class WordInfo(proto.Message): + r"""Word-specific information for recognized words. Word information is + only included in the response when certain request parameters are + set, such as ``enable_word_time_offsets``. + + Attributes: + start_time (google.protobuf.duration_pb2.Duration): + Output only. Time offset relative to the beginning of the + audio, and corresponding to the start of the spoken word. + This field is only set if ``enable_word_time_offsets=true`` + and only in the top hypothesis. This is an experimental + feature and the accuracy of the time offset can vary. + end_time (google.protobuf.duration_pb2.Duration): + Output only. Time offset relative to the beginning of the + audio, and corresponding to the end of the spoken word. This + field is only set if ``enable_word_time_offsets=true`` and + only in the top hypothesis. This is an experimental feature + and the accuracy of the time offset can vary. + word (str): + Output only. The word corresponding to this + set of information. + """ + + start_time: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + end_time: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + word: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/__init__.py new file mode 100644 index 000000000000..2594d06ffd18 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/__init__.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.videointelligence_v1p2beta1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.video_intelligence_service import ( + VideoIntelligenceServiceAsyncClient, + VideoIntelligenceServiceClient, +) +from .types.video_intelligence import ( + AnnotateVideoProgress, + AnnotateVideoRequest, + AnnotateVideoResponse, + Entity, + ExplicitContentAnnotation, + ExplicitContentDetectionConfig, + ExplicitContentFrame, + Feature, + LabelAnnotation, + LabelDetectionConfig, + LabelDetectionMode, + LabelFrame, + LabelSegment, + Likelihood, + NormalizedBoundingBox, + NormalizedBoundingPoly, + NormalizedVertex, + ObjectTrackingAnnotation, + ObjectTrackingFrame, + ShotChangeDetectionConfig, + TextAnnotation, + TextDetectionConfig, + TextFrame, + TextSegment, + VideoAnnotationProgress, + VideoAnnotationResults, + VideoContext, + VideoSegment, +) + +__all__ = ( + "VideoIntelligenceServiceAsyncClient", + "AnnotateVideoProgress", + "AnnotateVideoRequest", + "AnnotateVideoResponse", + "Entity", + "ExplicitContentAnnotation", + "ExplicitContentDetectionConfig", + "ExplicitContentFrame", + "Feature", + "LabelAnnotation", + "LabelDetectionConfig", + "LabelDetectionMode", + "LabelFrame", + "LabelSegment", + "Likelihood", + "NormalizedBoundingBox", + "NormalizedBoundingPoly", + "NormalizedVertex", + "ObjectTrackingAnnotation", + "ObjectTrackingFrame", + "ShotChangeDetectionConfig", + "TextAnnotation", + "TextDetectionConfig", + "TextFrame", + "TextSegment", + "VideoAnnotationProgress", + "VideoAnnotationResults", + "VideoContext", + "VideoIntelligenceServiceClient", + "VideoSegment", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/gapic_metadata.json b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/gapic_metadata.json new file mode 100644 index 000000000000..66b196cf8adf --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/gapic_metadata.json @@ -0,0 +1,43 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.videointelligence_v1p2beta1", + "protoPackage": "google.cloud.videointelligence.v1p2beta1", + "schema": "1.0", + "services": { + "VideoIntelligenceService": { + "clients": { + "grpc": { + "libraryClient": "VideoIntelligenceServiceClient", + "rpcs": { + "AnnotateVideo": { + "methods": [ + "annotate_video" + ] + } + } + }, + "grpc-async": { + "libraryClient": "VideoIntelligenceServiceAsyncClient", + "rpcs": { + "AnnotateVideo": { + "methods": [ + "annotate_video" + ] + } + } + }, + "rest": { + "libraryClient": "VideoIntelligenceServiceClient", + "rpcs": { + "AnnotateVideo": { + "methods": [ + "annotate_video" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/gapic_version.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/gapic_version.py new file mode 100644 index 000000000000..84f00fd3f92c --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.11.4" # {x-release-please-version} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/py.typed b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/py.typed new file mode 100644 index 000000000000..e7fb166bf3e5 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-videointelligence package uses inline types. diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/__init__.py new file mode 100644 index 000000000000..9bfaa3bf2e62 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import VideoIntelligenceServiceAsyncClient +from .client import VideoIntelligenceServiceClient + +__all__ = ( + "VideoIntelligenceServiceClient", + "VideoIntelligenceServiceAsyncClient", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/async_client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/async_client.py new file mode 100644 index 000000000000..3920d904c5f7 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/async_client.py @@ -0,0 +1,380 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1p2beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore + +from google.cloud.videointelligence_v1p2beta1.types import video_intelligence + +from .client import VideoIntelligenceServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .transports.grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport + + +class VideoIntelligenceServiceAsyncClient: + """Service that implements Google Cloud Video Intelligence API.""" + + _client: VideoIntelligenceServiceClient + + DEFAULT_ENDPOINT = VideoIntelligenceServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = VideoIntelligenceServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + VideoIntelligenceServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(VideoIntelligenceServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + VideoIntelligenceServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + VideoIntelligenceServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + VideoIntelligenceServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceAsyncClient: The constructed client. + """ + return VideoIntelligenceServiceClient.from_service_account_info.__func__(VideoIntelligenceServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceAsyncClient: The constructed client. + """ + return VideoIntelligenceServiceClient.from_service_account_file.__func__(VideoIntelligenceServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return VideoIntelligenceServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> VideoIntelligenceServiceTransport: + """Returns the transport used by the client instance. + + Returns: + VideoIntelligenceServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(VideoIntelligenceServiceClient).get_transport_class, + type(VideoIntelligenceServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, VideoIntelligenceServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the video intelligence service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.VideoIntelligenceServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = VideoIntelligenceServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def annotate_video( + self, + request: Optional[Union[video_intelligence.AnnotateVideoRequest, dict]] = None, + *, + input_uri: Optional[str] = None, + features: Optional[MutableSequence[video_intelligence.Feature]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import videointelligence_v1p2beta1 + + async def sample_annotate_video(): + # Create a client + client = videointelligence_v1p2beta1.VideoIntelligenceServiceAsyncClient() + + # Initialize request argument(s) + request = videointelligence_v1p2beta1.AnnotateVideoRequest( + features=['OBJECT_TRACKING'], + ) + + # Make the request + operation = client.annotate_video(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.videointelligence_v1p2beta1.types.AnnotateVideoRequest, dict]]): + The request object. Video annotation request. + input_uri (:class:`str`): + Input video location. Currently, only `Google Cloud + Storage `__ URIs are + supported, which must be specified in the following + format: ``gs://bucket-id/object-id`` (other URI formats + return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + A video URI may include wildcards in ``object-id``, and + thus identify multiple videos. Supported wildcards: '*' + to match 0 or more characters; '?' to match 1 character. + If unset, the input video should be embedded in the + request as ``input_content``. If set, ``input_content`` + should be unset. + + This corresponds to the ``input_uri`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (:class:`MutableSequence[google.cloud.videointelligence_v1p2beta1.types.Feature]`): + Required. Requested video annotation + features. + + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.videointelligence_v1p2beta1.types.AnnotateVideoResponse` Video annotation response. Included in the response + field of the Operation returned by the GetOperation + call of the google::longrunning::Operations service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([input_uri, features]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = video_intelligence.AnnotateVideoRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if input_uri is not None: + request.input_uri = input_uri + if features: + request.features.extend(features) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.annotate_video, + default_retry=retries.Retry( + initial=1.0, + maximum=120.0, + multiplier=2.5, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + video_intelligence.AnnotateVideoResponse, + metadata_type=video_intelligence.AnnotateVideoProgress, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "VideoIntelligenceServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("VideoIntelligenceServiceAsyncClient",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/client.py new file mode 100644 index 000000000000..2e8380c3036b --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/client.py @@ -0,0 +1,581 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1p2beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore + +from google.cloud.videointelligence_v1p2beta1.types import video_intelligence + +from .transports.base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .transports.grpc import VideoIntelligenceServiceGrpcTransport +from .transports.grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport +from .transports.rest import VideoIntelligenceServiceRestTransport + + +class VideoIntelligenceServiceClientMeta(type): + """Metaclass for the VideoIntelligenceService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[VideoIntelligenceServiceTransport]] + _transport_registry["grpc"] = VideoIntelligenceServiceGrpcTransport + _transport_registry["grpc_asyncio"] = VideoIntelligenceServiceGrpcAsyncIOTransport + _transport_registry["rest"] = VideoIntelligenceServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[VideoIntelligenceServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class VideoIntelligenceServiceClient(metaclass=VideoIntelligenceServiceClientMeta): + """Service that implements Google Cloud Video Intelligence API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "videointelligence.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> VideoIntelligenceServiceTransport: + """Returns the transport used by the client instance. + + Returns: + VideoIntelligenceServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, VideoIntelligenceServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the video intelligence service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, VideoIntelligenceServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, VideoIntelligenceServiceTransport): + # transport is a VideoIntelligenceServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def annotate_video( + self, + request: Optional[Union[video_intelligence.AnnotateVideoRequest, dict]] = None, + *, + input_uri: Optional[str] = None, + features: Optional[MutableSequence[video_intelligence.Feature]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import videointelligence_v1p2beta1 + + def sample_annotate_video(): + # Create a client + client = videointelligence_v1p2beta1.VideoIntelligenceServiceClient() + + # Initialize request argument(s) + request = videointelligence_v1p2beta1.AnnotateVideoRequest( + features=['OBJECT_TRACKING'], + ) + + # Make the request + operation = client.annotate_video(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.videointelligence_v1p2beta1.types.AnnotateVideoRequest, dict]): + The request object. Video annotation request. + input_uri (str): + Input video location. Currently, only `Google Cloud + Storage `__ URIs are + supported, which must be specified in the following + format: ``gs://bucket-id/object-id`` (other URI formats + return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + A video URI may include wildcards in ``object-id``, and + thus identify multiple videos. Supported wildcards: '*' + to match 0 or more characters; '?' to match 1 character. + If unset, the input video should be embedded in the + request as ``input_content``. If set, ``input_content`` + should be unset. + + This corresponds to the ``input_uri`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.Feature]): + Required. Requested video annotation + features. + + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.videointelligence_v1p2beta1.types.AnnotateVideoResponse` Video annotation response. Included in the response + field of the Operation returned by the GetOperation + call of the google::longrunning::Operations service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([input_uri, features]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a video_intelligence.AnnotateVideoRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, video_intelligence.AnnotateVideoRequest): + request = video_intelligence.AnnotateVideoRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if input_uri is not None: + request.input_uri = input_uri + if features is not None: + request.features = features + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.annotate_video] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + video_intelligence.AnnotateVideoResponse, + metadata_type=video_intelligence.AnnotateVideoProgress, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "VideoIntelligenceServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("VideoIntelligenceServiceClient",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/__init__.py new file mode 100644 index 000000000000..775ef9e3503a --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import VideoIntelligenceServiceTransport +from .grpc import VideoIntelligenceServiceGrpcTransport +from .grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport +from .rest import ( + VideoIntelligenceServiceRestInterceptor, + VideoIntelligenceServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[VideoIntelligenceServiceTransport]] +_transport_registry["grpc"] = VideoIntelligenceServiceGrpcTransport +_transport_registry["grpc_asyncio"] = VideoIntelligenceServiceGrpcAsyncIOTransport +_transport_registry["rest"] = VideoIntelligenceServiceRestTransport + +__all__ = ( + "VideoIntelligenceServiceTransport", + "VideoIntelligenceServiceGrpcTransport", + "VideoIntelligenceServiceGrpcAsyncIOTransport", + "VideoIntelligenceServiceRestTransport", + "VideoIntelligenceServiceRestInterceptor", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/base.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/base.py new file mode 100644 index 000000000000..04acd1d5af7d --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/base.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1p2beta1 import gapic_version as package_version +from google.cloud.videointelligence_v1p2beta1.types import video_intelligence + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class VideoIntelligenceServiceTransport(abc.ABC): + """Abstract transport class for VideoIntelligenceService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "videointelligence.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.annotate_video: gapic_v1.method.wrap_method( + self.annotate_video, + default_retry=retries.Retry( + initial=1.0, + maximum=120.0, + multiplier=2.5, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def annotate_video( + self, + ) -> Callable[ + [video_intelligence.AnnotateVideoRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("VideoIntelligenceServiceTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/grpc.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/grpc.py new file mode 100644 index 000000000000..94a184a89882 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/grpc.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.videointelligence_v1p2beta1.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport + + +class VideoIntelligenceServiceGrpcTransport(VideoIntelligenceServiceTransport): + """gRPC backend transport for VideoIntelligenceService. + + Service that implements Google Cloud Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def annotate_video( + self, + ) -> Callable[[video_intelligence.AnnotateVideoRequest], operations_pb2.Operation]: + r"""Return a callable for the annotate video method over gRPC. + + Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + Returns: + Callable[[~.AnnotateVideoRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "annotate_video" not in self._stubs: + self._stubs["annotate_video"] = self.grpc_channel.unary_unary( + "/google.cloud.videointelligence.v1p2beta1.VideoIntelligenceService/AnnotateVideo", + request_serializer=video_intelligence.AnnotateVideoRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["annotate_video"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("VideoIntelligenceServiceGrpcTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/grpc_asyncio.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..bc0c776a9956 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/grpc_asyncio.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.videointelligence_v1p2beta1.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .grpc import VideoIntelligenceServiceGrpcTransport + + +class VideoIntelligenceServiceGrpcAsyncIOTransport(VideoIntelligenceServiceTransport): + """gRPC AsyncIO backend transport for VideoIntelligenceService. + + Service that implements Google Cloud Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def annotate_video( + self, + ) -> Callable[ + [video_intelligence.AnnotateVideoRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the annotate video method over gRPC. + + Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + Returns: + Callable[[~.AnnotateVideoRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "annotate_video" not in self._stubs: + self._stubs["annotate_video"] = self.grpc_channel.unary_unary( + "/google.cloud.videointelligence.v1p2beta1.VideoIntelligenceService/AnnotateVideo", + request_serializer=video_intelligence.AnnotateVideoRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["annotate_video"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("VideoIntelligenceServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/rest.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/rest.py new file mode 100644 index 000000000000..275a20c78e15 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/services/video_intelligence_service/transports/rest.py @@ -0,0 +1,386 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.videointelligence_v1p2beta1.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import VideoIntelligenceServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class VideoIntelligenceServiceRestInterceptor: + """Interceptor for VideoIntelligenceService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the VideoIntelligenceServiceRestTransport. + + .. code-block:: python + class MyCustomVideoIntelligenceServiceInterceptor(VideoIntelligenceServiceRestInterceptor): + def pre_annotate_video(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_annotate_video(self, response): + logging.log(f"Received response: {response}") + return response + + transport = VideoIntelligenceServiceRestTransport(interceptor=MyCustomVideoIntelligenceServiceInterceptor()) + client = VideoIntelligenceServiceClient(transport=transport) + + + """ + + def pre_annotate_video( + self, + request: video_intelligence.AnnotateVideoRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[video_intelligence.AnnotateVideoRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for annotate_video + + Override in a subclass to manipulate the request or metadata + before they are sent to the VideoIntelligenceService server. + """ + return request, metadata + + def post_annotate_video( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for annotate_video + + Override in a subclass to manipulate the response + after it is returned by the VideoIntelligenceService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class VideoIntelligenceServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: VideoIntelligenceServiceRestInterceptor + + +class VideoIntelligenceServiceRestTransport(VideoIntelligenceServiceTransport): + """REST backend transport for VideoIntelligenceService. + + Service that implements Google Cloud Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[VideoIntelligenceServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or VideoIntelligenceServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1p2beta1/{name=projects/*/locations/*}/operations", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1p2beta1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1p2beta1/operations/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1p2beta1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1p2beta1/operations/{name=projects/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1p2beta1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1p2beta1/operations/{name=projects/*/locations/*/operations/*}:cancel", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1p2beta1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _AnnotateVideo(VideoIntelligenceServiceRestStub): + def __hash__(self): + return hash("AnnotateVideo") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: video_intelligence.AnnotateVideoRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the annotate video method over HTTP. + + Args: + request (~.video_intelligence.AnnotateVideoRequest): + The request object. Video annotation request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1p2beta1/videos:annotate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_annotate_video(request, metadata) + pb_request = video_intelligence.AnnotateVideoRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_annotate_video(resp) + return resp + + @property + def annotate_video( + self, + ) -> Callable[[video_intelligence.AnnotateVideoRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnnotateVideo(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("VideoIntelligenceServiceRestTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/types/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/types/__init__.py new file mode 100644 index 000000000000..8d53c357093c --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/types/__init__.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .video_intelligence import ( + AnnotateVideoProgress, + AnnotateVideoRequest, + AnnotateVideoResponse, + Entity, + ExplicitContentAnnotation, + ExplicitContentDetectionConfig, + ExplicitContentFrame, + Feature, + LabelAnnotation, + LabelDetectionConfig, + LabelDetectionMode, + LabelFrame, + LabelSegment, + Likelihood, + NormalizedBoundingBox, + NormalizedBoundingPoly, + NormalizedVertex, + ObjectTrackingAnnotation, + ObjectTrackingFrame, + ShotChangeDetectionConfig, + TextAnnotation, + TextDetectionConfig, + TextFrame, + TextSegment, + VideoAnnotationProgress, + VideoAnnotationResults, + VideoContext, + VideoSegment, +) + +__all__ = ( + "AnnotateVideoProgress", + "AnnotateVideoRequest", + "AnnotateVideoResponse", + "Entity", + "ExplicitContentAnnotation", + "ExplicitContentDetectionConfig", + "ExplicitContentFrame", + "LabelAnnotation", + "LabelDetectionConfig", + "LabelFrame", + "LabelSegment", + "NormalizedBoundingBox", + "NormalizedBoundingPoly", + "NormalizedVertex", + "ObjectTrackingAnnotation", + "ObjectTrackingFrame", + "ShotChangeDetectionConfig", + "TextAnnotation", + "TextDetectionConfig", + "TextFrame", + "TextSegment", + "VideoAnnotationProgress", + "VideoAnnotationResults", + "VideoContext", + "VideoSegment", + "Feature", + "LabelDetectionMode", + "Likelihood", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/types/video_intelligence.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/types/video_intelligence.py new file mode 100644 index 000000000000..e439ccca8eae --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p2beta1/types/video_intelligence.py @@ -0,0 +1,927 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.videointelligence.v1p2beta1", + manifest={ + "Feature", + "LabelDetectionMode", + "Likelihood", + "AnnotateVideoRequest", + "VideoContext", + "LabelDetectionConfig", + "ShotChangeDetectionConfig", + "ExplicitContentDetectionConfig", + "TextDetectionConfig", + "VideoSegment", + "LabelSegment", + "LabelFrame", + "Entity", + "LabelAnnotation", + "ExplicitContentFrame", + "ExplicitContentAnnotation", + "NormalizedBoundingBox", + "VideoAnnotationResults", + "AnnotateVideoResponse", + "VideoAnnotationProgress", + "AnnotateVideoProgress", + "NormalizedVertex", + "NormalizedBoundingPoly", + "TextSegment", + "TextFrame", + "TextAnnotation", + "ObjectTrackingFrame", + "ObjectTrackingAnnotation", + }, +) + + +class Feature(proto.Enum): + r"""Video annotation feature. + + Values: + FEATURE_UNSPECIFIED (0): + Unspecified. + LABEL_DETECTION (1): + Label detection. Detect objects, such as dog + or flower. + SHOT_CHANGE_DETECTION (2): + Shot change detection. + EXPLICIT_CONTENT_DETECTION (3): + Explicit content detection. + TEXT_DETECTION (7): + OCR text detection and tracking. + OBJECT_TRACKING (9): + Object detection and tracking. + """ + FEATURE_UNSPECIFIED = 0 + LABEL_DETECTION = 1 + SHOT_CHANGE_DETECTION = 2 + EXPLICIT_CONTENT_DETECTION = 3 + TEXT_DETECTION = 7 + OBJECT_TRACKING = 9 + + +class LabelDetectionMode(proto.Enum): + r"""Label detection mode. + + Values: + LABEL_DETECTION_MODE_UNSPECIFIED (0): + Unspecified. + SHOT_MODE (1): + Detect shot-level labels. + FRAME_MODE (2): + Detect frame-level labels. + SHOT_AND_FRAME_MODE (3): + Detect both shot-level and frame-level + labels. + """ + LABEL_DETECTION_MODE_UNSPECIFIED = 0 + SHOT_MODE = 1 + FRAME_MODE = 2 + SHOT_AND_FRAME_MODE = 3 + + +class Likelihood(proto.Enum): + r"""Bucketized representation of likelihood. + + Values: + LIKELIHOOD_UNSPECIFIED (0): + Unspecified likelihood. + VERY_UNLIKELY (1): + Very unlikely. + UNLIKELY (2): + Unlikely. + POSSIBLE (3): + Possible. + LIKELY (4): + Likely. + VERY_LIKELY (5): + Very likely. + """ + LIKELIHOOD_UNSPECIFIED = 0 + VERY_UNLIKELY = 1 + UNLIKELY = 2 + POSSIBLE = 3 + LIKELY = 4 + VERY_LIKELY = 5 + + +class AnnotateVideoRequest(proto.Message): + r"""Video annotation request. + + Attributes: + input_uri (str): + Input video location. Currently, only `Google Cloud + Storage `__ URIs are + supported, which must be specified in the following format: + ``gs://bucket-id/object-id`` (other URI formats return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + A video URI may include wildcards in ``object-id``, and thus + identify multiple videos. Supported wildcards: '*' to match + 0 or more characters; '?' to match 1 character. If unset, + the input video should be embedded in the request as + ``input_content``. If set, ``input_content`` should be + unset. + input_content (bytes): + The video data bytes. If unset, the input video(s) should be + specified via ``input_uri``. If set, ``input_uri`` should be + unset. + features (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.Feature]): + Required. Requested video annotation + features. + video_context (google.cloud.videointelligence_v1p2beta1.types.VideoContext): + Additional video context and/or + feature-specific parameters. + output_uri (str): + Optional. Location where the output (in JSON format) should + be stored. Currently, only `Google Cloud + Storage `__ URIs are + supported, which must be specified in the following format: + ``gs://bucket-id/object-id`` (other URI formats return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + location_id (str): + Optional. Cloud region where annotation should take place. + Supported cloud regions: ``us-east1``, ``us-west1``, + ``europe-west1``, ``asia-east1``. If no region is specified, + a region will be determined based on video file location. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + input_content: bytes = proto.Field( + proto.BYTES, + number=6, + ) + features: MutableSequence["Feature"] = proto.RepeatedField( + proto.ENUM, + number=2, + enum="Feature", + ) + video_context: "VideoContext" = proto.Field( + proto.MESSAGE, + number=3, + message="VideoContext", + ) + output_uri: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class VideoContext(proto.Message): + r"""Video context and/or feature-specific parameters. + + Attributes: + segments (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.VideoSegment]): + Video segments to annotate. The segments may + overlap and are not required to be contiguous or + span the whole video. If unspecified, each video + is treated as a single segment. + label_detection_config (google.cloud.videointelligence_v1p2beta1.types.LabelDetectionConfig): + Config for LABEL_DETECTION. + shot_change_detection_config (google.cloud.videointelligence_v1p2beta1.types.ShotChangeDetectionConfig): + Config for SHOT_CHANGE_DETECTION. + explicit_content_detection_config (google.cloud.videointelligence_v1p2beta1.types.ExplicitContentDetectionConfig): + Config for EXPLICIT_CONTENT_DETECTION. + text_detection_config (google.cloud.videointelligence_v1p2beta1.types.TextDetectionConfig): + Config for TEXT_DETECTION. + """ + + segments: MutableSequence["VideoSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + label_detection_config: "LabelDetectionConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="LabelDetectionConfig", + ) + shot_change_detection_config: "ShotChangeDetectionConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="ShotChangeDetectionConfig", + ) + explicit_content_detection_config: "ExplicitContentDetectionConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="ExplicitContentDetectionConfig", + ) + text_detection_config: "TextDetectionConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="TextDetectionConfig", + ) + + +class LabelDetectionConfig(proto.Message): + r"""Config for LABEL_DETECTION. + + Attributes: + label_detection_mode (google.cloud.videointelligence_v1p2beta1.types.LabelDetectionMode): + What labels should be detected with LABEL_DETECTION, in + addition to video-level labels or segment-level labels. If + unspecified, defaults to ``SHOT_MODE``. + stationary_camera (bool): + Whether the video has been shot from a stationary (i.e. + non-moving) camera. When set to true, might improve + detection accuracy for moving objects. Should be used with + ``SHOT_AND_FRAME_MODE`` enabled. + model (str): + Model to use for label detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + label_detection_mode: "LabelDetectionMode" = proto.Field( + proto.ENUM, + number=1, + enum="LabelDetectionMode", + ) + stationary_camera: bool = proto.Field( + proto.BOOL, + number=2, + ) + model: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ShotChangeDetectionConfig(proto.Message): + r"""Config for SHOT_CHANGE_DETECTION. + + Attributes: + model (str): + Model to use for shot change detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ExplicitContentDetectionConfig(proto.Message): + r"""Config for EXPLICIT_CONTENT_DETECTION. + + Attributes: + model (str): + Model to use for explicit content detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + + +class TextDetectionConfig(proto.Message): + r"""Config for TEXT_DETECTION. + + Attributes: + language_hints (MutableSequence[str]): + Language hint can be specified if the + language to be detected is known a priori. It + can increase the accuracy of the detection. + Language hint must be language code in BCP-47 + format. + + Automatic language detection is performed if no + hint is provided. + """ + + language_hints: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class VideoSegment(proto.Message): + r"""Video segment. + + Attributes: + start_time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the start of the segment + (inclusive). + end_time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the end of the segment + (inclusive). + """ + + start_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + end_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class LabelSegment(proto.Message): + r"""Video segment level annotation results for label detection. + + Attributes: + segment (google.cloud.videointelligence_v1p2beta1.types.VideoSegment): + Video segment where a label was detected. + confidence (float): + Confidence that the label is accurate. Range: [0, 1]. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class LabelFrame(proto.Message): + r"""Video frame level annotation results for label detection. + + Attributes: + time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the video frame for this + location. + confidence (float): + Confidence that the label is accurate. Range: [0, 1]. + """ + + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class Entity(proto.Message): + r"""Detected entity from video analysis. + + Attributes: + entity_id (str): + Opaque entity ID. Some IDs may be available in `Google + Knowledge Graph Search + API `__. + description (str): + Textual description, e.g. ``Fixed-gear bicycle``. + language_code (str): + Language code for ``description`` in BCP-47 format. + """ + + entity_id: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + language_code: str = proto.Field( + proto.STRING, + number=3, + ) + + +class LabelAnnotation(proto.Message): + r"""Label annotation. + + Attributes: + entity (google.cloud.videointelligence_v1p2beta1.types.Entity): + Detected entity. + category_entities (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.Entity]): + Common categories for the detected entity. E.g. when the + label is ``Terrier`` the category is likely ``dog``. And in + some cases there might be more than one categories e.g. + ``Terrier`` could also be a ``pet``. + segments (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.LabelSegment]): + All video segments where a label was + detected. + frames (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.LabelFrame]): + All video frames where a label was detected. + """ + + entity: "Entity" = proto.Field( + proto.MESSAGE, + number=1, + message="Entity", + ) + category_entities: MutableSequence["Entity"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Entity", + ) + segments: MutableSequence["LabelSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="LabelSegment", + ) + frames: MutableSequence["LabelFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="LabelFrame", + ) + + +class ExplicitContentFrame(proto.Message): + r"""Video frame level annotation results for explicit content. + + Attributes: + time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the video frame for this + location. + pornography_likelihood (google.cloud.videointelligence_v1p2beta1.types.Likelihood): + Likelihood of the pornography content.. + """ + + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + pornography_likelihood: "Likelihood" = proto.Field( + proto.ENUM, + number=2, + enum="Likelihood", + ) + + +class ExplicitContentAnnotation(proto.Message): + r"""Explicit content annotation (based on per-frame visual + signals only). If no explicit content has been detected in a + frame, no annotations are present for that frame. + + Attributes: + frames (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.ExplicitContentFrame]): + All video frames where explicit content was + detected. + """ + + frames: MutableSequence["ExplicitContentFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ExplicitContentFrame", + ) + + +class NormalizedBoundingBox(proto.Message): + r"""Normalized bounding box. The normalized vertex coordinates are + relative to the original image. Range: [0, 1]. + + Attributes: + left (float): + Left X coordinate. + top (float): + Top Y coordinate. + right (float): + Right X coordinate. + bottom (float): + Bottom Y coordinate. + """ + + left: float = proto.Field( + proto.FLOAT, + number=1, + ) + top: float = proto.Field( + proto.FLOAT, + number=2, + ) + right: float = proto.Field( + proto.FLOAT, + number=3, + ) + bottom: float = proto.Field( + proto.FLOAT, + number=4, + ) + + +class VideoAnnotationResults(proto.Message): + r"""Annotation results for a single video. + + Attributes: + input_uri (str): + Video file location in `Google Cloud + Storage `__. + segment_label_annotations (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.LabelAnnotation]): + Label annotations on video level or user + specified segment level. There is exactly one + element for each unique label. + shot_label_annotations (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.LabelAnnotation]): + Label annotations on shot level. + There is exactly one element for each unique + label. + frame_label_annotations (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.LabelAnnotation]): + Label annotations on frame level. + There is exactly one element for each unique + label. + shot_annotations (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.VideoSegment]): + Shot annotations. Each shot is represented as + a video segment. + explicit_annotation (google.cloud.videointelligence_v1p2beta1.types.ExplicitContentAnnotation): + Explicit content annotation. + text_annotations (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.TextAnnotation]): + OCR text detection and tracking. + Annotations for list of detected text snippets. + Each will have list of frame information + associated with it. + object_annotations (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.ObjectTrackingAnnotation]): + Annotations for list of objects detected and + tracked in video. + error (google.rpc.status_pb2.Status): + If set, indicates an error. Note that for a single + ``AnnotateVideoRequest`` some videos may succeed and some + may fail. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + segment_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="LabelAnnotation", + ) + shot_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="LabelAnnotation", + ) + frame_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="LabelAnnotation", + ) + shot_annotations: MutableSequence["VideoSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="VideoSegment", + ) + explicit_annotation: "ExplicitContentAnnotation" = proto.Field( + proto.MESSAGE, + number=7, + message="ExplicitContentAnnotation", + ) + text_annotations: MutableSequence["TextAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="TextAnnotation", + ) + object_annotations: MutableSequence[ + "ObjectTrackingAnnotation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="ObjectTrackingAnnotation", + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=9, + message=status_pb2.Status, + ) + + +class AnnotateVideoResponse(proto.Message): + r"""Video annotation response. Included in the ``response`` field of the + ``Operation`` returned by the ``GetOperation`` call of the + ``google::longrunning::Operations`` service. + + Attributes: + annotation_results (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.VideoAnnotationResults]): + Annotation results for all videos specified in + ``AnnotateVideoRequest``. + """ + + annotation_results: MutableSequence["VideoAnnotationResults"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoAnnotationResults", + ) + + +class VideoAnnotationProgress(proto.Message): + r"""Annotation progress for a single video. + + Attributes: + input_uri (str): + Video file location in `Google Cloud + Storage `__. + progress_percent (int): + Approximate percentage processed thus far. + Guaranteed to be 100 when fully processed. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the request was received. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Time of the most recent update. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + progress_percent: int = proto.Field( + proto.INT32, + number=2, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class AnnotateVideoProgress(proto.Message): + r"""Video annotation progress. Included in the ``metadata`` field of the + ``Operation`` returned by the ``GetOperation`` call of the + ``google::longrunning::Operations`` service. + + Attributes: + annotation_progress (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.VideoAnnotationProgress]): + Progress metadata for all videos specified in + ``AnnotateVideoRequest``. + """ + + annotation_progress: MutableSequence[ + "VideoAnnotationProgress" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoAnnotationProgress", + ) + + +class NormalizedVertex(proto.Message): + r"""A vertex represents a 2D point in the image. + NOTE: the normalized vertex coordinates are relative to the + original image and range from 0 to 1. + + Attributes: + x (float): + X coordinate. + y (float): + Y coordinate. + """ + + x: float = proto.Field( + proto.FLOAT, + number=1, + ) + y: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class NormalizedBoundingPoly(proto.Message): + r"""Normalized bounding polygon for text (that might not be aligned with + axis). Contains list of the corner points in clockwise order + starting from top-left corner. For example, for a rectangular + bounding box: When the text is horizontal it might look like: 0----1 + \| \| 3----2 + + When it's clockwise rotated 180 degrees around the top-left corner + it becomes: 2----3 \| \| 1----0 + + and the vertex order will still be (0, 1, 2, 3). Note that values + can be less than 0, or greater than 1 due to trignometric + calculations for location of the box. + + Attributes: + vertices (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.NormalizedVertex]): + Normalized vertices of the bounding polygon. + """ + + vertices: MutableSequence["NormalizedVertex"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="NormalizedVertex", + ) + + +class TextSegment(proto.Message): + r"""Video segment level annotation results for text detection. + + Attributes: + segment (google.cloud.videointelligence_v1p2beta1.types.VideoSegment): + Video segment where a text snippet was + detected. + confidence (float): + Confidence for the track of detected text. It + is calculated as the highest over all frames + where OCR detected text appears. + frames (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.TextFrame]): + Information related to the frames where OCR + detected text appears. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + frames: MutableSequence["TextFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="TextFrame", + ) + + +class TextFrame(proto.Message): + r"""Video frame level annotation results for text annotation + (OCR). Contains information regarding timestamp and bounding box + locations for the frames containing detected OCR text snippets. + + Attributes: + rotated_bounding_box (google.cloud.videointelligence_v1p2beta1.types.NormalizedBoundingPoly): + Bounding polygon of the detected text for + this frame. + time_offset (google.protobuf.duration_pb2.Duration): + Timestamp of this frame. + """ + + rotated_bounding_box: "NormalizedBoundingPoly" = proto.Field( + proto.MESSAGE, + number=1, + message="NormalizedBoundingPoly", + ) + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class TextAnnotation(proto.Message): + r"""Annotations related to one detected OCR text snippet. This + will contain the corresponding text, confidence value, and frame + level information for each detection. + + Attributes: + text (str): + The detected text. + segments (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.TextSegment]): + All video segments where OCR detected text + appears. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + segments: MutableSequence["TextSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="TextSegment", + ) + + +class ObjectTrackingFrame(proto.Message): + r"""Video frame level annotations for object detection and + tracking. This field stores per frame location, time offset, and + confidence. + + Attributes: + normalized_bounding_box (google.cloud.videointelligence_v1p2beta1.types.NormalizedBoundingBox): + The normalized bounding box location of this + object track for the frame. + time_offset (google.protobuf.duration_pb2.Duration): + The timestamp of the frame in microseconds. + """ + + normalized_bounding_box: "NormalizedBoundingBox" = proto.Field( + proto.MESSAGE, + number=1, + message="NormalizedBoundingBox", + ) + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class ObjectTrackingAnnotation(proto.Message): + r"""Annotations corresponding to one tracked object. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + segment (google.cloud.videointelligence_v1p2beta1.types.VideoSegment): + Non-streaming batch mode ONLY. + Each object track corresponds to one video + segment where it appears. + + This field is a member of `oneof`_ ``track_info``. + track_id (int): + Streaming mode ONLY. In streaming mode, we do not know the + end time of a tracked object before it is completed. Hence, + there is no VideoSegment info returned. Instead, we provide + a unique identifiable integer track_id so that the customers + can correlate the results of the ongoing + ObjectTrackAnnotation of the same track_id over time. + + This field is a member of `oneof`_ ``track_info``. + entity (google.cloud.videointelligence_v1p2beta1.types.Entity): + Entity to specify the object category that + this track is labeled as. + confidence (float): + Object category's labeling confidence of this + track. + frames (MutableSequence[google.cloud.videointelligence_v1p2beta1.types.ObjectTrackingFrame]): + Information corresponding to all frames where + this object track appears. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=3, + oneof="track_info", + message="VideoSegment", + ) + track_id: int = proto.Field( + proto.INT64, + number=5, + oneof="track_info", + ) + entity: "Entity" = proto.Field( + proto.MESSAGE, + number=1, + message="Entity", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=4, + ) + frames: MutableSequence["ObjectTrackingFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ObjectTrackingFrame", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/__init__.py new file mode 100644 index 000000000000..c8a292de7805 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/__init__.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.videointelligence_v1p3beta1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.streaming_video_intelligence_service import ( + StreamingVideoIntelligenceServiceAsyncClient, + StreamingVideoIntelligenceServiceClient, +) +from .services.video_intelligence_service import ( + VideoIntelligenceServiceAsyncClient, + VideoIntelligenceServiceClient, +) +from .types.video_intelligence import ( + AnnotateVideoProgress, + AnnotateVideoRequest, + AnnotateVideoResponse, + Celebrity, + CelebrityRecognitionAnnotation, + CelebrityTrack, + DetectedAttribute, + DetectedLandmark, + Entity, + ExplicitContentAnnotation, + ExplicitContentDetectionConfig, + ExplicitContentFrame, + FaceDetectionAnnotation, + FaceDetectionConfig, + Feature, + LabelAnnotation, + LabelDetectionConfig, + LabelDetectionMode, + LabelFrame, + LabelSegment, + Likelihood, + LogoRecognitionAnnotation, + NormalizedBoundingBox, + NormalizedBoundingPoly, + NormalizedVertex, + ObjectTrackingAnnotation, + ObjectTrackingConfig, + ObjectTrackingFrame, + PersonDetectionAnnotation, + PersonDetectionConfig, + ShotChangeDetectionConfig, + SpeechContext, + SpeechRecognitionAlternative, + SpeechTranscription, + SpeechTranscriptionConfig, + StreamingAnnotateVideoRequest, + StreamingAnnotateVideoResponse, + StreamingAutomlActionRecognitionConfig, + StreamingAutomlClassificationConfig, + StreamingAutomlObjectTrackingConfig, + StreamingExplicitContentDetectionConfig, + StreamingFeature, + StreamingLabelDetectionConfig, + StreamingObjectTrackingConfig, + StreamingShotChangeDetectionConfig, + StreamingStorageConfig, + StreamingVideoAnnotationResults, + StreamingVideoConfig, + TextAnnotation, + TextDetectionConfig, + TextFrame, + TextSegment, + TimestampedObject, + Track, + VideoAnnotationProgress, + VideoAnnotationResults, + VideoContext, + VideoSegment, + WordInfo, +) + +__all__ = ( + "StreamingVideoIntelligenceServiceAsyncClient", + "VideoIntelligenceServiceAsyncClient", + "AnnotateVideoProgress", + "AnnotateVideoRequest", + "AnnotateVideoResponse", + "Celebrity", + "CelebrityRecognitionAnnotation", + "CelebrityTrack", + "DetectedAttribute", + "DetectedLandmark", + "Entity", + "ExplicitContentAnnotation", + "ExplicitContentDetectionConfig", + "ExplicitContentFrame", + "FaceDetectionAnnotation", + "FaceDetectionConfig", + "Feature", + "LabelAnnotation", + "LabelDetectionConfig", + "LabelDetectionMode", + "LabelFrame", + "LabelSegment", + "Likelihood", + "LogoRecognitionAnnotation", + "NormalizedBoundingBox", + "NormalizedBoundingPoly", + "NormalizedVertex", + "ObjectTrackingAnnotation", + "ObjectTrackingConfig", + "ObjectTrackingFrame", + "PersonDetectionAnnotation", + "PersonDetectionConfig", + "ShotChangeDetectionConfig", + "SpeechContext", + "SpeechRecognitionAlternative", + "SpeechTranscription", + "SpeechTranscriptionConfig", + "StreamingAnnotateVideoRequest", + "StreamingAnnotateVideoResponse", + "StreamingAutomlActionRecognitionConfig", + "StreamingAutomlClassificationConfig", + "StreamingAutomlObjectTrackingConfig", + "StreamingExplicitContentDetectionConfig", + "StreamingFeature", + "StreamingLabelDetectionConfig", + "StreamingObjectTrackingConfig", + "StreamingShotChangeDetectionConfig", + "StreamingStorageConfig", + "StreamingVideoAnnotationResults", + "StreamingVideoConfig", + "StreamingVideoIntelligenceServiceClient", + "TextAnnotation", + "TextDetectionConfig", + "TextFrame", + "TextSegment", + "TimestampedObject", + "Track", + "VideoAnnotationProgress", + "VideoAnnotationResults", + "VideoContext", + "VideoIntelligenceServiceClient", + "VideoSegment", + "WordInfo", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/gapic_metadata.json b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/gapic_metadata.json new file mode 100644 index 000000000000..040a74a18188 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/gapic_metadata.json @@ -0,0 +1,57 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.videointelligence_v1p3beta1", + "protoPackage": "google.cloud.videointelligence.v1p3beta1", + "schema": "1.0", + "services": { + "StreamingVideoIntelligenceService": { + "clients": { + "grpc": { + "libraryClient": "StreamingVideoIntelligenceServiceClient", + "rpcs": { + "StreamingAnnotateVideo": { + "methods": [ + "streaming_annotate_video" + ] + } + } + }, + "grpc-async": { + "libraryClient": "StreamingVideoIntelligenceServiceAsyncClient", + "rpcs": { + "StreamingAnnotateVideo": { + "methods": [ + "streaming_annotate_video" + ] + } + } + } + } + }, + "VideoIntelligenceService": { + "clients": { + "grpc": { + "libraryClient": "VideoIntelligenceServiceClient", + "rpcs": { + "AnnotateVideo": { + "methods": [ + "annotate_video" + ] + } + } + }, + "grpc-async": { + "libraryClient": "VideoIntelligenceServiceAsyncClient", + "rpcs": { + "AnnotateVideo": { + "methods": [ + "annotate_video" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/gapic_version.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/gapic_version.py new file mode 100644 index 000000000000..84f00fd3f92c --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.11.4" # {x-release-please-version} diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/py.typed b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/py.typed new file mode 100644 index 000000000000..e7fb166bf3e5 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-videointelligence package uses inline types. diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/__init__.py new file mode 100644 index 000000000000..f98c9f94029b --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import StreamingVideoIntelligenceServiceAsyncClient +from .client import StreamingVideoIntelligenceServiceClient + +__all__ = ( + "StreamingVideoIntelligenceServiceClient", + "StreamingVideoIntelligenceServiceAsyncClient", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/async_client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/async_client.py new file mode 100644 index 000000000000..3a2f0ccf607b --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/async_client.py @@ -0,0 +1,350 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + AsyncIterable, + AsyncIterator, + Awaitable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1p3beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.rpc import status_pb2 # type: ignore + +from google.cloud.videointelligence_v1p3beta1.types import video_intelligence + +from .client import StreamingVideoIntelligenceServiceClient +from .transports.base import ( + DEFAULT_CLIENT_INFO, + StreamingVideoIntelligenceServiceTransport, +) +from .transports.grpc_asyncio import ( + StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, +) + + +class StreamingVideoIntelligenceServiceAsyncClient: + """Service that implements streaming Video Intelligence API.""" + + _client: StreamingVideoIntelligenceServiceClient + + DEFAULT_ENDPOINT = StreamingVideoIntelligenceServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ( + StreamingVideoIntelligenceServiceClient.DEFAULT_MTLS_ENDPOINT + ) + + common_billing_account_path = staticmethod( + StreamingVideoIntelligenceServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + StreamingVideoIntelligenceServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + StreamingVideoIntelligenceServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + StreamingVideoIntelligenceServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + StreamingVideoIntelligenceServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + StreamingVideoIntelligenceServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + StreamingVideoIntelligenceServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + StreamingVideoIntelligenceServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + StreamingVideoIntelligenceServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + StreamingVideoIntelligenceServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StreamingVideoIntelligenceServiceAsyncClient: The constructed client. + """ + return StreamingVideoIntelligenceServiceClient.from_service_account_info.__func__(StreamingVideoIntelligenceServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StreamingVideoIntelligenceServiceAsyncClient: The constructed client. + """ + return StreamingVideoIntelligenceServiceClient.from_service_account_file.__func__(StreamingVideoIntelligenceServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return StreamingVideoIntelligenceServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> StreamingVideoIntelligenceServiceTransport: + """Returns the transport used by the client instance. + + Returns: + StreamingVideoIntelligenceServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(StreamingVideoIntelligenceServiceClient).get_transport_class, + type(StreamingVideoIntelligenceServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[ + str, StreamingVideoIntelligenceServiceTransport + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the streaming video intelligence service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.StreamingVideoIntelligenceServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = StreamingVideoIntelligenceServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + def streaming_annotate_video( + self, + requests: Optional[ + AsyncIterator[video_intelligence.StreamingAnnotateVideoRequest] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[video_intelligence.StreamingAnnotateVideoResponse]]: + r"""Performs video annotation with bidirectional + streaming: emitting results while sending video/audio + bytes. This method is only available via the gRPC API + (not REST). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import videointelligence_v1p3beta1 + + async def sample_streaming_annotate_video(): + # Create a client + client = videointelligence_v1p3beta1.StreamingVideoIntelligenceServiceAsyncClient() + + # Initialize request argument(s) + request = videointelligence_v1p3beta1.StreamingAnnotateVideoRequest( + ) + + # This method expects an iterator which contains + # 'videointelligence_v1p3beta1.StreamingAnnotateVideoRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_annotate_video(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + Args: + requests (AsyncIterator[`google.cloud.videointelligence_v1p3beta1.types.StreamingAnnotateVideoRequest`]): + The request object AsyncIterator. The top-level message sent by the client for the + ``StreamingAnnotateVideo`` method. Multiple + ``StreamingAnnotateVideoRequest`` messages are sent. The + first message must only contain a + ``StreamingVideoConfig`` message. All subsequent + messages must only contain ``input_content`` data. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.videointelligence_v1p3beta1.types.StreamingAnnotateVideoResponse]: + StreamingAnnotateVideoResponse is the only message returned to the client + by StreamingAnnotateVideo. A series of zero or more + StreamingAnnotateVideoResponse messages are streamed + back to the client. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.streaming_annotate_video, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10800.0, + ), + default_timeout=10800.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "StreamingVideoIntelligenceServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("StreamingVideoIntelligenceServiceAsyncClient",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/client.py new file mode 100644 index 000000000000..d2d158edfce3 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/client.py @@ -0,0 +1,544 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Iterable, + Iterator, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1p3beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.rpc import status_pb2 # type: ignore + +from google.cloud.videointelligence_v1p3beta1.types import video_intelligence + +from .transports.base import ( + DEFAULT_CLIENT_INFO, + StreamingVideoIntelligenceServiceTransport, +) +from .transports.grpc import StreamingVideoIntelligenceServiceGrpcTransport +from .transports.grpc_asyncio import ( + StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, +) + + +class StreamingVideoIntelligenceServiceClientMeta(type): + """Metaclass for the StreamingVideoIntelligenceService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[StreamingVideoIntelligenceServiceTransport]] + _transport_registry["grpc"] = StreamingVideoIntelligenceServiceGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = StreamingVideoIntelligenceServiceGrpcAsyncIOTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[StreamingVideoIntelligenceServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class StreamingVideoIntelligenceServiceClient( + metaclass=StreamingVideoIntelligenceServiceClientMeta +): + """Service that implements streaming Video Intelligence API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "videointelligence.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StreamingVideoIntelligenceServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StreamingVideoIntelligenceServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> StreamingVideoIntelligenceServiceTransport: + """Returns the transport used by the client instance. + + Returns: + StreamingVideoIntelligenceServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, StreamingVideoIntelligenceServiceTransport] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the streaming video intelligence service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, StreamingVideoIntelligenceServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, StreamingVideoIntelligenceServiceTransport): + # transport is a StreamingVideoIntelligenceServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def streaming_annotate_video( + self, + requests: Optional[ + Iterator[video_intelligence.StreamingAnnotateVideoRequest] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[video_intelligence.StreamingAnnotateVideoResponse]: + r"""Performs video annotation with bidirectional + streaming: emitting results while sending video/audio + bytes. This method is only available via the gRPC API + (not REST). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import videointelligence_v1p3beta1 + + def sample_streaming_annotate_video(): + # Create a client + client = videointelligence_v1p3beta1.StreamingVideoIntelligenceServiceClient() + + # Initialize request argument(s) + request = videointelligence_v1p3beta1.StreamingAnnotateVideoRequest( + ) + + # This method expects an iterator which contains + # 'videointelligence_v1p3beta1.StreamingAnnotateVideoRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_annotate_video(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + Args: + requests (Iterator[google.cloud.videointelligence_v1p3beta1.types.StreamingAnnotateVideoRequest]): + The request object iterator. The top-level message sent by the client for the + ``StreamingAnnotateVideo`` method. Multiple + ``StreamingAnnotateVideoRequest`` messages are sent. The + first message must only contain a + ``StreamingVideoConfig`` message. All subsequent + messages must only contain ``input_content`` data. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.videointelligence_v1p3beta1.types.StreamingAnnotateVideoResponse]: + StreamingAnnotateVideoResponse is the only message returned to the client + by StreamingAnnotateVideo. A series of zero or more + StreamingAnnotateVideoResponse messages are streamed + back to the client. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.streaming_annotate_video] + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "StreamingVideoIntelligenceServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("StreamingVideoIntelligenceServiceClient",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/transports/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/transports/__init__.py new file mode 100644 index 000000000000..9f3358031688 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import StreamingVideoIntelligenceServiceTransport +from .grpc import StreamingVideoIntelligenceServiceGrpcTransport +from .grpc_asyncio import StreamingVideoIntelligenceServiceGrpcAsyncIOTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[StreamingVideoIntelligenceServiceTransport]] +_transport_registry["grpc"] = StreamingVideoIntelligenceServiceGrpcTransport +_transport_registry[ + "grpc_asyncio" +] = StreamingVideoIntelligenceServiceGrpcAsyncIOTransport + +__all__ = ( + "StreamingVideoIntelligenceServiceTransport", + "StreamingVideoIntelligenceServiceGrpcTransport", + "StreamingVideoIntelligenceServiceGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/transports/base.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/transports/base.py new file mode 100644 index 000000000000..b97043abaa92 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/transports/base.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1p3beta1 import gapic_version as package_version +from google.cloud.videointelligence_v1p3beta1.types import video_intelligence + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class StreamingVideoIntelligenceServiceTransport(abc.ABC): + """Abstract transport class for StreamingVideoIntelligenceService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "videointelligence.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.streaming_annotate_video: gapic_v1.method.wrap_method( + self.streaming_annotate_video, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=10800.0, + ), + default_timeout=10800.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def streaming_annotate_video( + self, + ) -> Callable[ + [video_intelligence.StreamingAnnotateVideoRequest], + Union[ + video_intelligence.StreamingAnnotateVideoResponse, + Awaitable[video_intelligence.StreamingAnnotateVideoResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("StreamingVideoIntelligenceServiceTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/transports/grpc.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/transports/grpc.py new file mode 100644 index 000000000000..b295eb209796 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/transports/grpc.py @@ -0,0 +1,273 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore + +from google.cloud.videointelligence_v1p3beta1.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO, StreamingVideoIntelligenceServiceTransport + + +class StreamingVideoIntelligenceServiceGrpcTransport( + StreamingVideoIntelligenceServiceTransport +): + """gRPC backend transport for StreamingVideoIntelligenceService. + + Service that implements streaming Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def streaming_annotate_video( + self, + ) -> Callable[ + [video_intelligence.StreamingAnnotateVideoRequest], + video_intelligence.StreamingAnnotateVideoResponse, + ]: + r"""Return a callable for the streaming annotate video method over gRPC. + + Performs video annotation with bidirectional + streaming: emitting results while sending video/audio + bytes. This method is only available via the gRPC API + (not REST). + + Returns: + Callable[[~.StreamingAnnotateVideoRequest], + ~.StreamingAnnotateVideoResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "streaming_annotate_video" not in self._stubs: + self._stubs["streaming_annotate_video"] = self.grpc_channel.stream_stream( + "/google.cloud.videointelligence.v1p3beta1.StreamingVideoIntelligenceService/StreamingAnnotateVideo", + request_serializer=video_intelligence.StreamingAnnotateVideoRequest.serialize, + response_deserializer=video_intelligence.StreamingAnnotateVideoResponse.deserialize, + ) + return self._stubs["streaming_annotate_video"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("StreamingVideoIntelligenceServiceGrpcTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/transports/grpc_asyncio.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..4efc6cc378b3 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/streaming_video_intelligence_service/transports/grpc_asyncio.py @@ -0,0 +1,272 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.videointelligence_v1p3beta1.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO, StreamingVideoIntelligenceServiceTransport +from .grpc import StreamingVideoIntelligenceServiceGrpcTransport + + +class StreamingVideoIntelligenceServiceGrpcAsyncIOTransport( + StreamingVideoIntelligenceServiceTransport +): + """gRPC AsyncIO backend transport for StreamingVideoIntelligenceService. + + Service that implements streaming Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def streaming_annotate_video( + self, + ) -> Callable[ + [video_intelligence.StreamingAnnotateVideoRequest], + Awaitable[video_intelligence.StreamingAnnotateVideoResponse], + ]: + r"""Return a callable for the streaming annotate video method over gRPC. + + Performs video annotation with bidirectional + streaming: emitting results while sending video/audio + bytes. This method is only available via the gRPC API + (not REST). + + Returns: + Callable[[~.StreamingAnnotateVideoRequest], + Awaitable[~.StreamingAnnotateVideoResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "streaming_annotate_video" not in self._stubs: + self._stubs["streaming_annotate_video"] = self.grpc_channel.stream_stream( + "/google.cloud.videointelligence.v1p3beta1.StreamingVideoIntelligenceService/StreamingAnnotateVideo", + request_serializer=video_intelligence.StreamingAnnotateVideoRequest.serialize, + response_deserializer=video_intelligence.StreamingAnnotateVideoResponse.deserialize, + ) + return self._stubs["streaming_annotate_video"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("StreamingVideoIntelligenceServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/__init__.py new file mode 100644 index 000000000000..9bfaa3bf2e62 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import VideoIntelligenceServiceAsyncClient +from .client import VideoIntelligenceServiceClient + +__all__ = ( + "VideoIntelligenceServiceClient", + "VideoIntelligenceServiceAsyncClient", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/async_client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/async_client.py new file mode 100644 index 000000000000..0fc19dd465af --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/async_client.py @@ -0,0 +1,380 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1p3beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore + +from google.cloud.videointelligence_v1p3beta1.types import video_intelligence + +from .client import VideoIntelligenceServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .transports.grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport + + +class VideoIntelligenceServiceAsyncClient: + """Service that implements the Video Intelligence API.""" + + _client: VideoIntelligenceServiceClient + + DEFAULT_ENDPOINT = VideoIntelligenceServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = VideoIntelligenceServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod( + VideoIntelligenceServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(VideoIntelligenceServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + VideoIntelligenceServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + VideoIntelligenceServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + VideoIntelligenceServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + VideoIntelligenceServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceAsyncClient: The constructed client. + """ + return VideoIntelligenceServiceClient.from_service_account_info.__func__(VideoIntelligenceServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceAsyncClient: The constructed client. + """ + return VideoIntelligenceServiceClient.from_service_account_file.__func__(VideoIntelligenceServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return VideoIntelligenceServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> VideoIntelligenceServiceTransport: + """Returns the transport used by the client instance. + + Returns: + VideoIntelligenceServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(VideoIntelligenceServiceClient).get_transport_class, + type(VideoIntelligenceServiceClient), + ) + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, VideoIntelligenceServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the video intelligence service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.VideoIntelligenceServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = VideoIntelligenceServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def annotate_video( + self, + request: Optional[Union[video_intelligence.AnnotateVideoRequest, dict]] = None, + *, + input_uri: Optional[str] = None, + features: Optional[MutableSequence[video_intelligence.Feature]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import videointelligence_v1p3beta1 + + async def sample_annotate_video(): + # Create a client + client = videointelligence_v1p3beta1.VideoIntelligenceServiceAsyncClient() + + # Initialize request argument(s) + request = videointelligence_v1p3beta1.AnnotateVideoRequest( + features=['PERSON_DETECTION'], + ) + + # Make the request + operation = client.annotate_video(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.videointelligence_v1p3beta1.types.AnnotateVideoRequest, dict]]): + The request object. Video annotation request. + input_uri (:class:`str`): + Input video location. Currently, only `Cloud + Storage `__ URIs are + supported. URIs must be specified in the following + format: ``gs://bucket-id/object-id`` (other URI formats + return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + To identify multiple videos, a video URI may include + wildcards in the ``object-id``. Supported wildcards: '*' + to match 0 or more characters; '?' to match 1 character. + If unset, the input video should be embedded in the + request as ``input_content``. If set, ``input_content`` + must be unset. + + This corresponds to the ``input_uri`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (:class:`MutableSequence[google.cloud.videointelligence_v1p3beta1.types.Feature]`): + Required. Requested video annotation + features. + + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.videointelligence_v1p3beta1.types.AnnotateVideoResponse` Video annotation response. Included in the response + field of the Operation returned by the GetOperation + call of the google::longrunning::Operations service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([input_uri, features]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = video_intelligence.AnnotateVideoRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if input_uri is not None: + request.input_uri = input_uri + if features: + request.features.extend(features) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.annotate_video, + default_retry=retries.Retry( + initial=1.0, + maximum=120.0, + multiplier=2.5, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + video_intelligence.AnnotateVideoResponse, + metadata_type=video_intelligence.AnnotateVideoProgress, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "VideoIntelligenceServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("VideoIntelligenceServiceAsyncClient",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/client.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/client.py new file mode 100644 index 000000000000..420377abe173 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/client.py @@ -0,0 +1,579 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1p3beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore + +from google.cloud.videointelligence_v1p3beta1.types import video_intelligence + +from .transports.base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .transports.grpc import VideoIntelligenceServiceGrpcTransport +from .transports.grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport + + +class VideoIntelligenceServiceClientMeta(type): + """Metaclass for the VideoIntelligenceService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[VideoIntelligenceServiceTransport]] + _transport_registry["grpc"] = VideoIntelligenceServiceGrpcTransport + _transport_registry["grpc_asyncio"] = VideoIntelligenceServiceGrpcAsyncIOTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[VideoIntelligenceServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class VideoIntelligenceServiceClient(metaclass=VideoIntelligenceServiceClientMeta): + """Service that implements the Video Intelligence API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "videointelligence.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + VideoIntelligenceServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> VideoIntelligenceServiceTransport: + """Returns the transport used by the client instance. + + Returns: + VideoIntelligenceServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, VideoIntelligenceServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the video intelligence service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, VideoIntelligenceServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, VideoIntelligenceServiceTransport): + # transport is a VideoIntelligenceServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def annotate_video( + self, + request: Optional[Union[video_intelligence.AnnotateVideoRequest, dict]] = None, + *, + input_uri: Optional[str] = None, + features: Optional[MutableSequence[video_intelligence.Feature]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import videointelligence_v1p3beta1 + + def sample_annotate_video(): + # Create a client + client = videointelligence_v1p3beta1.VideoIntelligenceServiceClient() + + # Initialize request argument(s) + request = videointelligence_v1p3beta1.AnnotateVideoRequest( + features=['PERSON_DETECTION'], + ) + + # Make the request + operation = client.annotate_video(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.videointelligence_v1p3beta1.types.AnnotateVideoRequest, dict]): + The request object. Video annotation request. + input_uri (str): + Input video location. Currently, only `Cloud + Storage `__ URIs are + supported. URIs must be specified in the following + format: ``gs://bucket-id/object-id`` (other URI formats + return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + To identify multiple videos, a video URI may include + wildcards in the ``object-id``. Supported wildcards: '*' + to match 0 or more characters; '?' to match 1 character. + If unset, the input video should be embedded in the + request as ``input_content``. If set, ``input_content`` + must be unset. + + This corresponds to the ``input_uri`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + features (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.Feature]): + Required. Requested video annotation + features. + + This corresponds to the ``features`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.videointelligence_v1p3beta1.types.AnnotateVideoResponse` Video annotation response. Included in the response + field of the Operation returned by the GetOperation + call of the google::longrunning::Operations service. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([input_uri, features]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a video_intelligence.AnnotateVideoRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, video_intelligence.AnnotateVideoRequest): + request = video_intelligence.AnnotateVideoRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if input_uri is not None: + request.input_uri = input_uri + if features is not None: + request.features = features + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.annotate_video] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + video_intelligence.AnnotateVideoResponse, + metadata_type=video_intelligence.AnnotateVideoProgress, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "VideoIntelligenceServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("VideoIntelligenceServiceClient",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/transports/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/transports/__init__.py new file mode 100644 index 000000000000..8adcbe5256ca --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/transports/__init__.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import VideoIntelligenceServiceTransport +from .grpc import VideoIntelligenceServiceGrpcTransport +from .grpc_asyncio import VideoIntelligenceServiceGrpcAsyncIOTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[VideoIntelligenceServiceTransport]] +_transport_registry["grpc"] = VideoIntelligenceServiceGrpcTransport +_transport_registry["grpc_asyncio"] = VideoIntelligenceServiceGrpcAsyncIOTransport + +__all__ = ( + "VideoIntelligenceServiceTransport", + "VideoIntelligenceServiceGrpcTransport", + "VideoIntelligenceServiceGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/transports/base.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/transports/base.py new file mode 100644 index 000000000000..4bdf6a64fa08 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/transports/base.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.videointelligence_v1p3beta1 import gapic_version as package_version +from google.cloud.videointelligence_v1p3beta1.types import video_intelligence + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class VideoIntelligenceServiceTransport(abc.ABC): + """Abstract transport class for VideoIntelligenceService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "videointelligence.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.annotate_video: gapic_v1.method.wrap_method( + self.annotate_video, + default_retry=retries.Retry( + initial=1.0, + maximum=120.0, + multiplier=2.5, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def annotate_video( + self, + ) -> Callable[ + [video_intelligence.AnnotateVideoRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("VideoIntelligenceServiceTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/transports/grpc.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/transports/grpc.py new file mode 100644 index 000000000000..fa20f1f06e68 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/transports/grpc.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.videointelligence_v1p3beta1.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport + + +class VideoIntelligenceServiceGrpcTransport(VideoIntelligenceServiceTransport): + """gRPC backend transport for VideoIntelligenceService. + + Service that implements the Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def annotate_video( + self, + ) -> Callable[[video_intelligence.AnnotateVideoRequest], operations_pb2.Operation]: + r"""Return a callable for the annotate video method over gRPC. + + Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + Returns: + Callable[[~.AnnotateVideoRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "annotate_video" not in self._stubs: + self._stubs["annotate_video"] = self.grpc_channel.unary_unary( + "/google.cloud.videointelligence.v1p3beta1.VideoIntelligenceService/AnnotateVideo", + request_serializer=video_intelligence.AnnotateVideoRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["annotate_video"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("VideoIntelligenceServiceGrpcTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/transports/grpc_asyncio.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..2448408a516d --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/services/video_intelligence_service/transports/grpc_asyncio.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.videointelligence_v1p3beta1.types import video_intelligence + +from .base import DEFAULT_CLIENT_INFO, VideoIntelligenceServiceTransport +from .grpc import VideoIntelligenceServiceGrpcTransport + + +class VideoIntelligenceServiceGrpcAsyncIOTransport(VideoIntelligenceServiceTransport): + """gRPC AsyncIO backend transport for VideoIntelligenceService. + + Service that implements the Video Intelligence API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "videointelligence.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def annotate_video( + self, + ) -> Callable[ + [video_intelligence.AnnotateVideoRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the annotate video method over gRPC. + + Performs asynchronous video annotation. Progress and results can + be retrieved through the ``google.longrunning.Operations`` + interface. ``Operation.metadata`` contains + ``AnnotateVideoProgress`` (progress). ``Operation.response`` + contains ``AnnotateVideoResponse`` (results). + + Returns: + Callable[[~.AnnotateVideoRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "annotate_video" not in self._stubs: + self._stubs["annotate_video"] = self.grpc_channel.unary_unary( + "/google.cloud.videointelligence.v1p3beta1.VideoIntelligenceService/AnnotateVideo", + request_serializer=video_intelligence.AnnotateVideoRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["annotate_video"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("VideoIntelligenceServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/types/__init__.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/types/__init__.py new file mode 100644 index 000000000000..0d6dedffe7a0 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/types/__init__.py @@ -0,0 +1,138 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .video_intelligence import ( + AnnotateVideoProgress, + AnnotateVideoRequest, + AnnotateVideoResponse, + Celebrity, + CelebrityRecognitionAnnotation, + CelebrityTrack, + DetectedAttribute, + DetectedLandmark, + Entity, + ExplicitContentAnnotation, + ExplicitContentDetectionConfig, + ExplicitContentFrame, + FaceDetectionAnnotation, + FaceDetectionConfig, + Feature, + LabelAnnotation, + LabelDetectionConfig, + LabelDetectionMode, + LabelFrame, + LabelSegment, + Likelihood, + LogoRecognitionAnnotation, + NormalizedBoundingBox, + NormalizedBoundingPoly, + NormalizedVertex, + ObjectTrackingAnnotation, + ObjectTrackingConfig, + ObjectTrackingFrame, + PersonDetectionAnnotation, + PersonDetectionConfig, + ShotChangeDetectionConfig, + SpeechContext, + SpeechRecognitionAlternative, + SpeechTranscription, + SpeechTranscriptionConfig, + StreamingAnnotateVideoRequest, + StreamingAnnotateVideoResponse, + StreamingAutomlActionRecognitionConfig, + StreamingAutomlClassificationConfig, + StreamingAutomlObjectTrackingConfig, + StreamingExplicitContentDetectionConfig, + StreamingFeature, + StreamingLabelDetectionConfig, + StreamingObjectTrackingConfig, + StreamingShotChangeDetectionConfig, + StreamingStorageConfig, + StreamingVideoAnnotationResults, + StreamingVideoConfig, + TextAnnotation, + TextDetectionConfig, + TextFrame, + TextSegment, + TimestampedObject, + Track, + VideoAnnotationProgress, + VideoAnnotationResults, + VideoContext, + VideoSegment, + WordInfo, +) + +__all__ = ( + "AnnotateVideoProgress", + "AnnotateVideoRequest", + "AnnotateVideoResponse", + "Celebrity", + "CelebrityRecognitionAnnotation", + "CelebrityTrack", + "DetectedAttribute", + "DetectedLandmark", + "Entity", + "ExplicitContentAnnotation", + "ExplicitContentDetectionConfig", + "ExplicitContentFrame", + "FaceDetectionAnnotation", + "FaceDetectionConfig", + "LabelAnnotation", + "LabelDetectionConfig", + "LabelFrame", + "LabelSegment", + "LogoRecognitionAnnotation", + "NormalizedBoundingBox", + "NormalizedBoundingPoly", + "NormalizedVertex", + "ObjectTrackingAnnotation", + "ObjectTrackingConfig", + "ObjectTrackingFrame", + "PersonDetectionAnnotation", + "PersonDetectionConfig", + "ShotChangeDetectionConfig", + "SpeechContext", + "SpeechRecognitionAlternative", + "SpeechTranscription", + "SpeechTranscriptionConfig", + "StreamingAnnotateVideoRequest", + "StreamingAnnotateVideoResponse", + "StreamingAutomlActionRecognitionConfig", + "StreamingAutomlClassificationConfig", + "StreamingAutomlObjectTrackingConfig", + "StreamingExplicitContentDetectionConfig", + "StreamingLabelDetectionConfig", + "StreamingObjectTrackingConfig", + "StreamingShotChangeDetectionConfig", + "StreamingStorageConfig", + "StreamingVideoAnnotationResults", + "StreamingVideoConfig", + "TextAnnotation", + "TextDetectionConfig", + "TextFrame", + "TextSegment", + "TimestampedObject", + "Track", + "VideoAnnotationProgress", + "VideoAnnotationResults", + "VideoContext", + "VideoSegment", + "WordInfo", + "Feature", + "LabelDetectionMode", + "Likelihood", + "StreamingFeature", +) diff --git a/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/types/video_intelligence.py b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/types/video_intelligence.py new file mode 100644 index 000000000000..351f2b24aae4 --- /dev/null +++ b/packages/google-cloud-videointelligence/google/cloud/videointelligence_v1p3beta1/types/video_intelligence.py @@ -0,0 +1,2134 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.videointelligence.v1p3beta1", + manifest={ + "LabelDetectionMode", + "Likelihood", + "StreamingFeature", + "Feature", + "AnnotateVideoRequest", + "VideoContext", + "LabelDetectionConfig", + "ShotChangeDetectionConfig", + "ObjectTrackingConfig", + "ExplicitContentDetectionConfig", + "FaceDetectionConfig", + "PersonDetectionConfig", + "TextDetectionConfig", + "VideoSegment", + "LabelSegment", + "LabelFrame", + "Entity", + "LabelAnnotation", + "ExplicitContentFrame", + "ExplicitContentAnnotation", + "NormalizedBoundingBox", + "TimestampedObject", + "Track", + "DetectedAttribute", + "Celebrity", + "CelebrityTrack", + "CelebrityRecognitionAnnotation", + "DetectedLandmark", + "FaceDetectionAnnotation", + "PersonDetectionAnnotation", + "VideoAnnotationResults", + "AnnotateVideoResponse", + "VideoAnnotationProgress", + "AnnotateVideoProgress", + "SpeechTranscriptionConfig", + "SpeechContext", + "SpeechTranscription", + "SpeechRecognitionAlternative", + "WordInfo", + "NormalizedVertex", + "NormalizedBoundingPoly", + "TextSegment", + "TextFrame", + "TextAnnotation", + "ObjectTrackingFrame", + "ObjectTrackingAnnotation", + "LogoRecognitionAnnotation", + "StreamingAnnotateVideoRequest", + "StreamingVideoConfig", + "StreamingAnnotateVideoResponse", + "StreamingVideoAnnotationResults", + "StreamingShotChangeDetectionConfig", + "StreamingLabelDetectionConfig", + "StreamingExplicitContentDetectionConfig", + "StreamingObjectTrackingConfig", + "StreamingAutomlActionRecognitionConfig", + "StreamingAutomlClassificationConfig", + "StreamingAutomlObjectTrackingConfig", + "StreamingStorageConfig", + }, +) + + +class LabelDetectionMode(proto.Enum): + r"""Label detection mode. + + Values: + LABEL_DETECTION_MODE_UNSPECIFIED (0): + Unspecified. + SHOT_MODE (1): + Detect shot-level labels. + FRAME_MODE (2): + Detect frame-level labels. + SHOT_AND_FRAME_MODE (3): + Detect both shot-level and frame-level + labels. + """ + LABEL_DETECTION_MODE_UNSPECIFIED = 0 + SHOT_MODE = 1 + FRAME_MODE = 2 + SHOT_AND_FRAME_MODE = 3 + + +class Likelihood(proto.Enum): + r"""Bucketized representation of likelihood. + + Values: + LIKELIHOOD_UNSPECIFIED (0): + Unspecified likelihood. + VERY_UNLIKELY (1): + Very unlikely. + UNLIKELY (2): + Unlikely. + POSSIBLE (3): + Possible. + LIKELY (4): + Likely. + VERY_LIKELY (5): + Very likely. + """ + LIKELIHOOD_UNSPECIFIED = 0 + VERY_UNLIKELY = 1 + UNLIKELY = 2 + POSSIBLE = 3 + LIKELY = 4 + VERY_LIKELY = 5 + + +class StreamingFeature(proto.Enum): + r"""Streaming video annotation feature. + + Values: + STREAMING_FEATURE_UNSPECIFIED (0): + Unspecified. + STREAMING_LABEL_DETECTION (1): + Label detection. Detect objects, such as dog + or flower. + STREAMING_SHOT_CHANGE_DETECTION (2): + Shot change detection. + STREAMING_EXPLICIT_CONTENT_DETECTION (3): + Explicit content detection. + STREAMING_OBJECT_TRACKING (4): + Object detection and tracking. + STREAMING_AUTOML_ACTION_RECOGNITION (23): + Action recognition based on AutoML model. + STREAMING_AUTOML_CLASSIFICATION (21): + Video classification based on AutoML model. + STREAMING_AUTOML_OBJECT_TRACKING (22): + Object detection and tracking based on AutoML + model. + """ + STREAMING_FEATURE_UNSPECIFIED = 0 + STREAMING_LABEL_DETECTION = 1 + STREAMING_SHOT_CHANGE_DETECTION = 2 + STREAMING_EXPLICIT_CONTENT_DETECTION = 3 + STREAMING_OBJECT_TRACKING = 4 + STREAMING_AUTOML_ACTION_RECOGNITION = 23 + STREAMING_AUTOML_CLASSIFICATION = 21 + STREAMING_AUTOML_OBJECT_TRACKING = 22 + + +class Feature(proto.Enum): + r"""Video annotation feature. + + Values: + FEATURE_UNSPECIFIED (0): + Unspecified. + LABEL_DETECTION (1): + Label detection. Detect objects, such as dog + or flower. + SHOT_CHANGE_DETECTION (2): + Shot change detection. + EXPLICIT_CONTENT_DETECTION (3): + Explicit content detection. + FACE_DETECTION (4): + Human face detection. + SPEECH_TRANSCRIPTION (6): + Speech transcription. + TEXT_DETECTION (7): + OCR text detection and tracking. + OBJECT_TRACKING (9): + Object detection and tracking. + LOGO_RECOGNITION (12): + Logo detection, tracking, and recognition. + CELEBRITY_RECOGNITION (13): + Celebrity recognition. + PERSON_DETECTION (14): + Person detection. + """ + FEATURE_UNSPECIFIED = 0 + LABEL_DETECTION = 1 + SHOT_CHANGE_DETECTION = 2 + EXPLICIT_CONTENT_DETECTION = 3 + FACE_DETECTION = 4 + SPEECH_TRANSCRIPTION = 6 + TEXT_DETECTION = 7 + OBJECT_TRACKING = 9 + LOGO_RECOGNITION = 12 + CELEBRITY_RECOGNITION = 13 + PERSON_DETECTION = 14 + + +class AnnotateVideoRequest(proto.Message): + r"""Video annotation request. + + Attributes: + input_uri (str): + Input video location. Currently, only `Cloud + Storage `__ URIs are + supported. URIs must be specified in the following format: + ``gs://bucket-id/object-id`` (other URI formats return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + To identify multiple videos, a video URI may include + wildcards in the ``object-id``. Supported wildcards: '*' to + match 0 or more characters; '?' to match 1 character. If + unset, the input video should be embedded in the request as + ``input_content``. If set, ``input_content`` must be unset. + input_content (bytes): + The video data bytes. If unset, the input video(s) should be + specified via the ``input_uri``. If set, ``input_uri`` must + be unset. + features (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.Feature]): + Required. Requested video annotation + features. + video_context (google.cloud.videointelligence_v1p3beta1.types.VideoContext): + Additional video context and/or + feature-specific parameters. + output_uri (str): + Optional. Location where the output (in JSON format) should + be stored. Currently, only `Cloud + Storage `__ URIs are + supported. These must be specified in the following format: + ``gs://bucket-id/object-id`` (other URI formats return + [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). + For more information, see `Request + URIs `__. + location_id (str): + Optional. Cloud region where annotation should take place. + Supported cloud regions are: ``us-east1``, ``us-west1``, + ``europe-west1``, ``asia-east1``. If no region is specified, + the region will be determined based on video file location. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + input_content: bytes = proto.Field( + proto.BYTES, + number=6, + ) + features: MutableSequence["Feature"] = proto.RepeatedField( + proto.ENUM, + number=2, + enum="Feature", + ) + video_context: "VideoContext" = proto.Field( + proto.MESSAGE, + number=3, + message="VideoContext", + ) + output_uri: str = proto.Field( + proto.STRING, + number=4, + ) + location_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class VideoContext(proto.Message): + r"""Video context and/or feature-specific parameters. + + Attributes: + segments (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.VideoSegment]): + Video segments to annotate. The segments may + overlap and are not required to be contiguous or + span the whole video. If unspecified, each video + is treated as a single segment. + label_detection_config (google.cloud.videointelligence_v1p3beta1.types.LabelDetectionConfig): + Config for LABEL_DETECTION. + shot_change_detection_config (google.cloud.videointelligence_v1p3beta1.types.ShotChangeDetectionConfig): + Config for SHOT_CHANGE_DETECTION. + explicit_content_detection_config (google.cloud.videointelligence_v1p3beta1.types.ExplicitContentDetectionConfig): + Config for EXPLICIT_CONTENT_DETECTION. + face_detection_config (google.cloud.videointelligence_v1p3beta1.types.FaceDetectionConfig): + Config for FACE_DETECTION. + speech_transcription_config (google.cloud.videointelligence_v1p3beta1.types.SpeechTranscriptionConfig): + Config for SPEECH_TRANSCRIPTION. + text_detection_config (google.cloud.videointelligence_v1p3beta1.types.TextDetectionConfig): + Config for TEXT_DETECTION. + person_detection_config (google.cloud.videointelligence_v1p3beta1.types.PersonDetectionConfig): + Config for PERSON_DETECTION. + object_tracking_config (google.cloud.videointelligence_v1p3beta1.types.ObjectTrackingConfig): + Config for OBJECT_TRACKING. + """ + + segments: MutableSequence["VideoSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + label_detection_config: "LabelDetectionConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="LabelDetectionConfig", + ) + shot_change_detection_config: "ShotChangeDetectionConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="ShotChangeDetectionConfig", + ) + explicit_content_detection_config: "ExplicitContentDetectionConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="ExplicitContentDetectionConfig", + ) + face_detection_config: "FaceDetectionConfig" = proto.Field( + proto.MESSAGE, + number=5, + message="FaceDetectionConfig", + ) + speech_transcription_config: "SpeechTranscriptionConfig" = proto.Field( + proto.MESSAGE, + number=6, + message="SpeechTranscriptionConfig", + ) + text_detection_config: "TextDetectionConfig" = proto.Field( + proto.MESSAGE, + number=8, + message="TextDetectionConfig", + ) + person_detection_config: "PersonDetectionConfig" = proto.Field( + proto.MESSAGE, + number=11, + message="PersonDetectionConfig", + ) + object_tracking_config: "ObjectTrackingConfig" = proto.Field( + proto.MESSAGE, + number=13, + message="ObjectTrackingConfig", + ) + + +class LabelDetectionConfig(proto.Message): + r"""Config for LABEL_DETECTION. + + Attributes: + label_detection_mode (google.cloud.videointelligence_v1p3beta1.types.LabelDetectionMode): + What labels should be detected with LABEL_DETECTION, in + addition to video-level labels or segment-level labels. If + unspecified, defaults to ``SHOT_MODE``. + stationary_camera (bool): + Whether the video has been shot from a stationary (i.e., + non-moving) camera. When set to true, might improve + detection accuracy for moving objects. Should be used with + ``SHOT_AND_FRAME_MODE`` enabled. + model (str): + Model to use for label detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + frame_confidence_threshold (float): + The confidence threshold we perform filtering on the labels + from frame-level detection. If not set, it is set to 0.4 by + default. The valid range for this threshold is [0.1, 0.9]. + Any value set outside of this range will be clipped. Note: + For best results, follow the default threshold. We will + update the default threshold everytime when we release a new + model. + video_confidence_threshold (float): + The confidence threshold we perform filtering on the labels + from video-level and shot-level detections. If not set, it's + set to 0.3 by default. The valid range for this threshold is + [0.1, 0.9]. Any value set outside of this range will be + clipped. Note: For best results, follow the default + threshold. We will update the default threshold everytime + when we release a new model. + """ + + label_detection_mode: "LabelDetectionMode" = proto.Field( + proto.ENUM, + number=1, + enum="LabelDetectionMode", + ) + stationary_camera: bool = proto.Field( + proto.BOOL, + number=2, + ) + model: str = proto.Field( + proto.STRING, + number=3, + ) + frame_confidence_threshold: float = proto.Field( + proto.FLOAT, + number=4, + ) + video_confidence_threshold: float = proto.Field( + proto.FLOAT, + number=5, + ) + + +class ShotChangeDetectionConfig(proto.Message): + r"""Config for SHOT_CHANGE_DETECTION. + + Attributes: + model (str): + Model to use for shot change detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ObjectTrackingConfig(proto.Message): + r"""Config for OBJECT_TRACKING. + + Attributes: + model (str): + Model to use for object tracking. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ExplicitContentDetectionConfig(proto.Message): + r"""Config for EXPLICIT_CONTENT_DETECTION. + + Attributes: + model (str): + Model to use for explicit content detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FaceDetectionConfig(proto.Message): + r"""Config for FACE_DETECTION. + + Attributes: + model (str): + Model to use for face detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + include_bounding_boxes (bool): + Whether bounding boxes are included in the + face annotation output. + include_attributes (bool): + Whether to enable face attributes detection, such as + glasses, dark_glasses, mouth_open etc. Ignored if + 'include_bounding_boxes' is set to false. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + include_bounding_boxes: bool = proto.Field( + proto.BOOL, + number=2, + ) + include_attributes: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class PersonDetectionConfig(proto.Message): + r"""Config for PERSON_DETECTION. + + Attributes: + include_bounding_boxes (bool): + Whether bounding boxes are included in the + person detection annotation output. + include_pose_landmarks (bool): + Whether to enable pose landmarks detection. Ignored if + 'include_bounding_boxes' is set to false. + include_attributes (bool): + Whether to enable person attributes detection, such as cloth + color (black, blue, etc), type (coat, dress, etc), pattern + (plain, floral, etc), hair, etc. Ignored if + 'include_bounding_boxes' is set to false. + """ + + include_bounding_boxes: bool = proto.Field( + proto.BOOL, + number=1, + ) + include_pose_landmarks: bool = proto.Field( + proto.BOOL, + number=2, + ) + include_attributes: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class TextDetectionConfig(proto.Message): + r"""Config for TEXT_DETECTION. + + Attributes: + language_hints (MutableSequence[str]): + Language hint can be specified if the + language to be detected is known a priori. It + can increase the accuracy of the detection. + Language hint must be language code in BCP-47 + format. + + Automatic language detection is performed if no + hint is provided. + model (str): + Model to use for text detection. + Supported values: "builtin/stable" (the default + if unset) and "builtin/latest". + """ + + language_hints: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + model: str = proto.Field( + proto.STRING, + number=2, + ) + + +class VideoSegment(proto.Message): + r"""Video segment. + + Attributes: + start_time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the start of the segment + (inclusive). + end_time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the end of the segment + (inclusive). + """ + + start_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + end_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class LabelSegment(proto.Message): + r"""Video segment level annotation results for label detection. + + Attributes: + segment (google.cloud.videointelligence_v1p3beta1.types.VideoSegment): + Video segment where a label was detected. + confidence (float): + Confidence that the label is accurate. Range: [0, 1]. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class LabelFrame(proto.Message): + r"""Video frame level annotation results for label detection. + + Attributes: + time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the video frame for this + location. + confidence (float): + Confidence that the label is accurate. Range: [0, 1]. + """ + + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class Entity(proto.Message): + r"""Detected entity from video analysis. + + Attributes: + entity_id (str): + Opaque entity ID. Some IDs may be available in `Google + Knowledge Graph Search + API `__. + description (str): + Textual description, e.g., ``Fixed-gear bicycle``. + language_code (str): + Language code for ``description`` in BCP-47 format. + """ + + entity_id: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + language_code: str = proto.Field( + proto.STRING, + number=3, + ) + + +class LabelAnnotation(proto.Message): + r"""Label annotation. + + Attributes: + entity (google.cloud.videointelligence_v1p3beta1.types.Entity): + Detected entity. + category_entities (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.Entity]): + Common categories for the detected entity. For example, when + the label is ``Terrier``, the category is likely ``dog``. + And in some cases there might be more than one categories + e.g., ``Terrier`` could also be a ``pet``. + segments (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.LabelSegment]): + All video segments where a label was + detected. + frames (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.LabelFrame]): + All video frames where a label was detected. + """ + + entity: "Entity" = proto.Field( + proto.MESSAGE, + number=1, + message="Entity", + ) + category_entities: MutableSequence["Entity"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Entity", + ) + segments: MutableSequence["LabelSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="LabelSegment", + ) + frames: MutableSequence["LabelFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="LabelFrame", + ) + + +class ExplicitContentFrame(proto.Message): + r"""Video frame level annotation results for explicit content. + + Attributes: + time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the video frame for this + location. + pornography_likelihood (google.cloud.videointelligence_v1p3beta1.types.Likelihood): + Likelihood of the pornography content.. + """ + + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + pornography_likelihood: "Likelihood" = proto.Field( + proto.ENUM, + number=2, + enum="Likelihood", + ) + + +class ExplicitContentAnnotation(proto.Message): + r"""Explicit content annotation (based on per-frame visual + signals only). If no explicit content has been detected in a + frame, no annotations are present for that frame. + + Attributes: + frames (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.ExplicitContentFrame]): + All video frames where explicit content was + detected. + """ + + frames: MutableSequence["ExplicitContentFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ExplicitContentFrame", + ) + + +class NormalizedBoundingBox(proto.Message): + r"""Normalized bounding box. The normalized vertex coordinates are + relative to the original image. Range: [0, 1]. + + Attributes: + left (float): + Left X coordinate. + top (float): + Top Y coordinate. + right (float): + Right X coordinate. + bottom (float): + Bottom Y coordinate. + """ + + left: float = proto.Field( + proto.FLOAT, + number=1, + ) + top: float = proto.Field( + proto.FLOAT, + number=2, + ) + right: float = proto.Field( + proto.FLOAT, + number=3, + ) + bottom: float = proto.Field( + proto.FLOAT, + number=4, + ) + + +class TimestampedObject(proto.Message): + r"""For tracking related features. An object at time_offset with + attributes, and located with normalized_bounding_box. + + Attributes: + normalized_bounding_box (google.cloud.videointelligence_v1p3beta1.types.NormalizedBoundingBox): + Normalized Bounding box in a frame, where the + object is located. + time_offset (google.protobuf.duration_pb2.Duration): + Time-offset, relative to the beginning of the + video, corresponding to the video frame for this + object. + attributes (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.DetectedAttribute]): + Optional. The attributes of the object in the + bounding box. + landmarks (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.DetectedLandmark]): + Optional. The detected landmarks. + """ + + normalized_bounding_box: "NormalizedBoundingBox" = proto.Field( + proto.MESSAGE, + number=1, + message="NormalizedBoundingBox", + ) + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + attributes: MutableSequence["DetectedAttribute"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="DetectedAttribute", + ) + landmarks: MutableSequence["DetectedLandmark"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="DetectedLandmark", + ) + + +class Track(proto.Message): + r"""A track of an object instance. + + Attributes: + segment (google.cloud.videointelligence_v1p3beta1.types.VideoSegment): + Video segment of a track. + timestamped_objects (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.TimestampedObject]): + The object with timestamp and attributes per + frame in the track. + attributes (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.DetectedAttribute]): + Optional. Attributes in the track level. + confidence (float): + Optional. The confidence score of the tracked + object. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + timestamped_objects: MutableSequence["TimestampedObject"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="TimestampedObject", + ) + attributes: MutableSequence["DetectedAttribute"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="DetectedAttribute", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=4, + ) + + +class DetectedAttribute(proto.Message): + r"""A generic detected attribute represented by name in string + format. + + Attributes: + name (str): + The name of the attribute, for example, glasses, + dark_glasses, mouth_open. A full list of supported type + names will be provided in the document. + confidence (float): + Detected attribute confidence. Range [0, 1]. + value (str): + Text value of the detection result. For + example, the value for "HairColor" can be + "black", "blonde", etc. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + value: str = proto.Field( + proto.STRING, + number=3, + ) + + +class Celebrity(proto.Message): + r"""Celebrity definition. + + Attributes: + name (str): + The resource name of the celebrity. Have the format + ``video-intelligence/kg-mid`` indicates a celebrity from + preloaded gallery. kg-mid is the id in Google knowledge + graph, which is unique for the celebrity. + display_name (str): + The celebrity name. + description (str): + Textual description of additional information + about the celebrity, if applicable. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + + +class CelebrityTrack(proto.Message): + r"""The annotation result of a celebrity face track. + RecognizedCelebrity field could be empty if the face track does + not have any matched celebrities. + + Attributes: + celebrities (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.CelebrityTrack.RecognizedCelebrity]): + Top N match of the celebrities for the face + in this track. + face_track (google.cloud.videointelligence_v1p3beta1.types.Track): + A track of a person's face. + """ + + class RecognizedCelebrity(proto.Message): + r"""The recognized celebrity with confidence score. + + Attributes: + celebrity (google.cloud.videointelligence_v1p3beta1.types.Celebrity): + The recognized celebrity. + confidence (float): + Recognition confidence. Range [0, 1]. + """ + + celebrity: "Celebrity" = proto.Field( + proto.MESSAGE, + number=1, + message="Celebrity", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + + celebrities: MutableSequence[RecognizedCelebrity] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=RecognizedCelebrity, + ) + face_track: "Track" = proto.Field( + proto.MESSAGE, + number=3, + message="Track", + ) + + +class CelebrityRecognitionAnnotation(proto.Message): + r"""Celebrity recognition annotation per video. + + Attributes: + celebrity_tracks (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.CelebrityTrack]): + The tracks detected from the input video, + including recognized celebrities and other + detected faces in the video. + """ + + celebrity_tracks: MutableSequence["CelebrityTrack"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="CelebrityTrack", + ) + + +class DetectedLandmark(proto.Message): + r"""A generic detected landmark represented by name in string + format and a 2D location. + + Attributes: + name (str): + The name of this landmark, for example, left_hand, + right_shoulder. + point (google.cloud.videointelligence_v1p3beta1.types.NormalizedVertex): + The 2D point of the detected landmark using + the normalized image coordindate system. The + normalized coordinates have the range from 0 to + 1. + confidence (float): + The confidence score of the detected landmark. Range [0, 1]. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + point: "NormalizedVertex" = proto.Field( + proto.MESSAGE, + number=2, + message="NormalizedVertex", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=3, + ) + + +class FaceDetectionAnnotation(proto.Message): + r"""Face detection annotation. + + Attributes: + tracks (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.Track]): + The face tracks with attributes. + thumbnail (bytes): + The thumbnail of a person's face. + """ + + tracks: MutableSequence["Track"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Track", + ) + thumbnail: bytes = proto.Field( + proto.BYTES, + number=4, + ) + + +class PersonDetectionAnnotation(proto.Message): + r"""Person detection annotation per video. + + Attributes: + tracks (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.Track]): + The detected tracks of a person. + """ + + tracks: MutableSequence["Track"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Track", + ) + + +class VideoAnnotationResults(proto.Message): + r"""Annotation results for a single video. + + Attributes: + input_uri (str): + Video file location in `Cloud + Storage `__. + segment (google.cloud.videointelligence_v1p3beta1.types.VideoSegment): + Video segment on which the annotation is run. + segment_label_annotations (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.LabelAnnotation]): + Topical label annotations on video level or + user-specified segment level. There is exactly + one element for each unique label. + segment_presence_label_annotations (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.LabelAnnotation]): + Presence label annotations on video level or user-specified + segment level. There is exactly one element for each unique + label. Compared to the existing topical + ``segment_label_annotations``, this field presents more + fine-grained, segment-level labels detected in video content + and is made available only when the client sets + ``LabelDetectionConfig.model`` to "builtin/latest" in the + request. + shot_label_annotations (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.LabelAnnotation]): + Topical label annotations on shot level. + There is exactly one element for each unique + label. + shot_presence_label_annotations (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.LabelAnnotation]): + Presence label annotations on shot level. There is exactly + one element for each unique label. Compared to the existing + topical ``shot_label_annotations``, this field presents more + fine-grained, shot-level labels detected in video content + and is made available only when the client sets + ``LabelDetectionConfig.model`` to "builtin/latest" in the + request. + frame_label_annotations (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.LabelAnnotation]): + Label annotations on frame level. + There is exactly one element for each unique + label. + face_detection_annotations (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.FaceDetectionAnnotation]): + Face detection annotations. + shot_annotations (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.VideoSegment]): + Shot annotations. Each shot is represented as + a video segment. + explicit_annotation (google.cloud.videointelligence_v1p3beta1.types.ExplicitContentAnnotation): + Explicit content annotation. + speech_transcriptions (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.SpeechTranscription]): + Speech transcription. + text_annotations (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.TextAnnotation]): + OCR text detection and tracking. + Annotations for list of detected text snippets. + Each will have list of frame information + associated with it. + object_annotations (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.ObjectTrackingAnnotation]): + Annotations for list of objects detected and + tracked in video. + logo_recognition_annotations (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.LogoRecognitionAnnotation]): + Annotations for list of logos detected, + tracked and recognized in video. + person_detection_annotations (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.PersonDetectionAnnotation]): + Person detection annotations. + celebrity_recognition_annotations (google.cloud.videointelligence_v1p3beta1.types.CelebrityRecognitionAnnotation): + Celebrity recognition annotations. + error (google.rpc.status_pb2.Status): + If set, indicates an error. Note that for a single + ``AnnotateVideoRequest`` some videos may succeed and some + may fail. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=10, + message="VideoSegment", + ) + segment_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="LabelAnnotation", + ) + segment_presence_label_annotations: MutableSequence[ + "LabelAnnotation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=23, + message="LabelAnnotation", + ) + shot_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="LabelAnnotation", + ) + shot_presence_label_annotations: MutableSequence[ + "LabelAnnotation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=24, + message="LabelAnnotation", + ) + frame_label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="LabelAnnotation", + ) + face_detection_annotations: MutableSequence[ + "FaceDetectionAnnotation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message="FaceDetectionAnnotation", + ) + shot_annotations: MutableSequence["VideoSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="VideoSegment", + ) + explicit_annotation: "ExplicitContentAnnotation" = proto.Field( + proto.MESSAGE, + number=7, + message="ExplicitContentAnnotation", + ) + speech_transcriptions: MutableSequence["SpeechTranscription"] = proto.RepeatedField( + proto.MESSAGE, + number=11, + message="SpeechTranscription", + ) + text_annotations: MutableSequence["TextAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="TextAnnotation", + ) + object_annotations: MutableSequence[ + "ObjectTrackingAnnotation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=14, + message="ObjectTrackingAnnotation", + ) + logo_recognition_annotations: MutableSequence[ + "LogoRecognitionAnnotation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=19, + message="LogoRecognitionAnnotation", + ) + person_detection_annotations: MutableSequence[ + "PersonDetectionAnnotation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message="PersonDetectionAnnotation", + ) + celebrity_recognition_annotations: "CelebrityRecognitionAnnotation" = proto.Field( + proto.MESSAGE, + number=21, + message="CelebrityRecognitionAnnotation", + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=9, + message=status_pb2.Status, + ) + + +class AnnotateVideoResponse(proto.Message): + r"""Video annotation response. Included in the ``response`` field of the + ``Operation`` returned by the ``GetOperation`` call of the + ``google::longrunning::Operations`` service. + + Attributes: + annotation_results (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.VideoAnnotationResults]): + Annotation results for all videos specified in + ``AnnotateVideoRequest``. + """ + + annotation_results: MutableSequence["VideoAnnotationResults"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoAnnotationResults", + ) + + +class VideoAnnotationProgress(proto.Message): + r"""Annotation progress for a single video. + + Attributes: + input_uri (str): + Video file location in `Cloud + Storage `__. + progress_percent (int): + Approximate percentage processed thus far. + Guaranteed to be 100 when fully processed. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the request was received. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Time of the most recent update. + feature (google.cloud.videointelligence_v1p3beta1.types.Feature): + Specifies which feature is being tracked if + the request contains more than one feature. + segment (google.cloud.videointelligence_v1p3beta1.types.VideoSegment): + Specifies which segment is being tracked if + the request contains more than one segment. + """ + + input_uri: str = proto.Field( + proto.STRING, + number=1, + ) + progress_percent: int = proto.Field( + proto.INT32, + number=2, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + feature: "Feature" = proto.Field( + proto.ENUM, + number=5, + enum="Feature", + ) + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=6, + message="VideoSegment", + ) + + +class AnnotateVideoProgress(proto.Message): + r"""Video annotation progress. Included in the ``metadata`` field of the + ``Operation`` returned by the ``GetOperation`` call of the + ``google::longrunning::Operations`` service. + + Attributes: + annotation_progress (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.VideoAnnotationProgress]): + Progress metadata for all videos specified in + ``AnnotateVideoRequest``. + """ + + annotation_progress: MutableSequence[ + "VideoAnnotationProgress" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoAnnotationProgress", + ) + + +class SpeechTranscriptionConfig(proto.Message): + r"""Config for SPEECH_TRANSCRIPTION. + + Attributes: + language_code (str): + Required. *Required* The language of the supplied audio as a + `BCP-47 `__ + language tag. Example: "en-US". See `Language + Support `__ + for a list of the currently supported language codes. + max_alternatives (int): + Optional. Maximum number of recognition hypotheses to be + returned. Specifically, the maximum number of + ``SpeechRecognitionAlternative`` messages within each + ``SpeechTranscription``. The server may return fewer than + ``max_alternatives``. Valid values are ``0``-``30``. A value + of ``0`` or ``1`` will return a maximum of one. If omitted, + will return a maximum of one. + filter_profanity (bool): + Optional. If set to ``true``, the server will attempt to + filter out profanities, replacing all but the initial + character in each filtered word with asterisks, e.g. "f***". + If set to ``false`` or omitted, profanities won't be + filtered out. + speech_contexts (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.SpeechContext]): + Optional. A means to provide context to + assist the speech recognition. + enable_automatic_punctuation (bool): + Optional. If 'true', adds punctuation to + recognition result hypotheses. This feature is + only available in select languages. Setting this + for requests in other languages has no effect at + all. The default 'false' value does not add + punctuation to result hypotheses. NOTE: "This is + currently offered as an experimental service, + complimentary to all users. In the future this + may be exclusively available as a premium + feature.". + audio_tracks (MutableSequence[int]): + Optional. For file formats, such as MXF or + MKV, supporting multiple audio tracks, specify + up to two tracks. Default: track 0. + enable_speaker_diarization (bool): + Optional. If 'true', enables speaker detection for each + recognized word in the top alternative of the recognition + result using a speaker_tag provided in the WordInfo. Note: + When this is true, we send all the words from the beginning + of the audio for the top alternative in every consecutive + response. This is done in order to improve our speaker tags + as our models learn to identify the speakers in the + conversation over time. + diarization_speaker_count (int): + Optional. If set, specifies the estimated number of speakers + in the conversation. If not set, defaults to '2'. Ignored + unless enable_speaker_diarization is set to true. + enable_word_confidence (bool): + Optional. If ``true``, the top result includes a list of + words and the confidence for those words. If ``false``, no + word-level confidence information is returned. The default + is ``false``. + """ + + language_code: str = proto.Field( + proto.STRING, + number=1, + ) + max_alternatives: int = proto.Field( + proto.INT32, + number=2, + ) + filter_profanity: bool = proto.Field( + proto.BOOL, + number=3, + ) + speech_contexts: MutableSequence["SpeechContext"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="SpeechContext", + ) + enable_automatic_punctuation: bool = proto.Field( + proto.BOOL, + number=5, + ) + audio_tracks: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=6, + ) + enable_speaker_diarization: bool = proto.Field( + proto.BOOL, + number=7, + ) + diarization_speaker_count: int = proto.Field( + proto.INT32, + number=8, + ) + enable_word_confidence: bool = proto.Field( + proto.BOOL, + number=9, + ) + + +class SpeechContext(proto.Message): + r"""Provides "hints" to the speech recognizer to favor specific + words and phrases in the results. + + Attributes: + phrases (MutableSequence[str]): + Optional. A list of strings containing words and phrases + "hints" so that the speech recognition is more likely to + recognize them. This can be used to improve the accuracy for + specific words and phrases, for example, if specific + commands are typically spoken by the user. This can also be + used to add additional words to the vocabulary of the + recognizer. See `usage + limits `__. + """ + + phrases: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class SpeechTranscription(proto.Message): + r"""A speech recognition result corresponding to a portion of the + audio. + + Attributes: + alternatives (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.SpeechRecognitionAlternative]): + May contain one or more recognition hypotheses (up to the + maximum specified in ``max_alternatives``). These + alternatives are ordered in terms of accuracy, with the top + (first) alternative being the most probable, as ranked by + the recognizer. + language_code (str): + Output only. The + `BCP-47 `__ + language tag of the language in this result. This language + code was detected to have the most likelihood of being + spoken in the audio. + """ + + alternatives: MutableSequence["SpeechRecognitionAlternative"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="SpeechRecognitionAlternative", + ) + language_code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SpeechRecognitionAlternative(proto.Message): + r"""Alternative hypotheses (a.k.a. n-best list). + + Attributes: + transcript (str): + Transcript text representing the words that + the user spoke. + confidence (float): + Output only. The confidence estimate between 0.0 and 1.0. A + higher number indicates an estimated greater likelihood that + the recognized words are correct. This field is set only for + the top alternative. This field is not guaranteed to be + accurate and users should not rely on it to be always + provided. The default of 0.0 is a sentinel value indicating + ``confidence`` was not set. + words (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.WordInfo]): + Output only. A list of word-specific information for each + recognized word. Note: When ``enable_speaker_diarization`` + is set to true, you will see all the words from the + beginning of the audio. + """ + + transcript: str = proto.Field( + proto.STRING, + number=1, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + words: MutableSequence["WordInfo"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="WordInfo", + ) + + +class WordInfo(proto.Message): + r"""Word-specific information for recognized words. Word information is + only included in the response when certain request parameters are + set, such as ``enable_word_time_offsets``. + + Attributes: + start_time (google.protobuf.duration_pb2.Duration): + Time offset relative to the beginning of the audio, and + corresponding to the start of the spoken word. This field is + only set if ``enable_word_time_offsets=true`` and only in + the top hypothesis. This is an experimental feature and the + accuracy of the time offset can vary. + end_time (google.protobuf.duration_pb2.Duration): + Time offset relative to the beginning of the audio, and + corresponding to the end of the spoken word. This field is + only set if ``enable_word_time_offsets=true`` and only in + the top hypothesis. This is an experimental feature and the + accuracy of the time offset can vary. + word (str): + The word corresponding to this set of + information. + confidence (float): + Output only. The confidence estimate between 0.0 and 1.0. A + higher number indicates an estimated greater likelihood that + the recognized words are correct. This field is set only for + the top alternative. This field is not guaranteed to be + accurate and users should not rely on it to be always + provided. The default of 0.0 is a sentinel value indicating + ``confidence`` was not set. + speaker_tag (int): + Output only. A distinct integer value is assigned for every + speaker within the audio. This field specifies which one of + those speakers was detected to have spoken this word. Value + ranges from 1 up to diarization_speaker_count, and is only + set if speaker diarization is enabled. + """ + + start_time: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + end_time: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + word: str = proto.Field( + proto.STRING, + number=3, + ) + confidence: float = proto.Field( + proto.FLOAT, + number=4, + ) + speaker_tag: int = proto.Field( + proto.INT32, + number=5, + ) + + +class NormalizedVertex(proto.Message): + r"""A vertex represents a 2D point in the image. + NOTE: the normalized vertex coordinates are relative to the + original image and range from 0 to 1. + + Attributes: + x (float): + X coordinate. + y (float): + Y coordinate. + """ + + x: float = proto.Field( + proto.FLOAT, + number=1, + ) + y: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class NormalizedBoundingPoly(proto.Message): + r"""Normalized bounding polygon for text (that might not be aligned with + axis). Contains list of the corner points in clockwise order + starting from top-left corner. For example, for a rectangular + bounding box: When the text is horizontal it might look like: 0----1 + \| \| 3----2 + + When it's clockwise rotated 180 degrees around the top-left corner + it becomes: 2----3 \| \| 1----0 + + and the vertex order will still be (0, 1, 2, 3). Note that values + can be less than 0, or greater than 1 due to trignometric + calculations for location of the box. + + Attributes: + vertices (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.NormalizedVertex]): + Normalized vertices of the bounding polygon. + """ + + vertices: MutableSequence["NormalizedVertex"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="NormalizedVertex", + ) + + +class TextSegment(proto.Message): + r"""Video segment level annotation results for text detection. + + Attributes: + segment (google.cloud.videointelligence_v1p3beta1.types.VideoSegment): + Video segment where a text snippet was + detected. + confidence (float): + Confidence for the track of detected text. It + is calculated as the highest over all frames + where OCR detected text appears. + frames (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.TextFrame]): + Information related to the frames where OCR + detected text appears. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=2, + ) + frames: MutableSequence["TextFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="TextFrame", + ) + + +class TextFrame(proto.Message): + r"""Video frame level annotation results for text annotation + (OCR). Contains information regarding timestamp and bounding box + locations for the frames containing detected OCR text snippets. + + Attributes: + rotated_bounding_box (google.cloud.videointelligence_v1p3beta1.types.NormalizedBoundingPoly): + Bounding polygon of the detected text for + this frame. + time_offset (google.protobuf.duration_pb2.Duration): + Timestamp of this frame. + """ + + rotated_bounding_box: "NormalizedBoundingPoly" = proto.Field( + proto.MESSAGE, + number=1, + message="NormalizedBoundingPoly", + ) + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class TextAnnotation(proto.Message): + r"""Annotations related to one detected OCR text snippet. This + will contain the corresponding text, confidence value, and frame + level information for each detection. + + Attributes: + text (str): + The detected text. + segments (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.TextSegment]): + All video segments where OCR detected text + appears. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + segments: MutableSequence["TextSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="TextSegment", + ) + + +class ObjectTrackingFrame(proto.Message): + r"""Video frame level annotations for object detection and + tracking. This field stores per frame location, time offset, and + confidence. + + Attributes: + normalized_bounding_box (google.cloud.videointelligence_v1p3beta1.types.NormalizedBoundingBox): + The normalized bounding box location of this + object track for the frame. + time_offset (google.protobuf.duration_pb2.Duration): + The timestamp of the frame in microseconds. + """ + + normalized_bounding_box: "NormalizedBoundingBox" = proto.Field( + proto.MESSAGE, + number=1, + message="NormalizedBoundingBox", + ) + time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class ObjectTrackingAnnotation(proto.Message): + r"""Annotations corresponding to one tracked object. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + segment (google.cloud.videointelligence_v1p3beta1.types.VideoSegment): + Non-streaming batch mode ONLY. + Each object track corresponds to one video + segment where it appears. + + This field is a member of `oneof`_ ``track_info``. + track_id (int): + Streaming mode ONLY. In streaming mode, we do not know the + end time of a tracked object before it is completed. Hence, + there is no VideoSegment info returned. Instead, we provide + a unique identifiable integer track_id so that the customers + can correlate the results of the ongoing + ObjectTrackAnnotation of the same track_id over time. + + This field is a member of `oneof`_ ``track_info``. + entity (google.cloud.videointelligence_v1p3beta1.types.Entity): + Entity to specify the object category that + this track is labeled as. + confidence (float): + Object category's labeling confidence of this + track. + frames (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.ObjectTrackingFrame]): + Information corresponding to all frames where + this object track appears. Non-streaming batch + mode: it may be one or multiple + ObjectTrackingFrame messages in frames. + Streaming mode: it can only be one + ObjectTrackingFrame message in frames. + """ + + segment: "VideoSegment" = proto.Field( + proto.MESSAGE, + number=3, + oneof="track_info", + message="VideoSegment", + ) + track_id: int = proto.Field( + proto.INT64, + number=5, + oneof="track_info", + ) + entity: "Entity" = proto.Field( + proto.MESSAGE, + number=1, + message="Entity", + ) + confidence: float = proto.Field( + proto.FLOAT, + number=4, + ) + frames: MutableSequence["ObjectTrackingFrame"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ObjectTrackingFrame", + ) + + +class LogoRecognitionAnnotation(proto.Message): + r"""Annotation corresponding to one detected, tracked and + recognized logo class. + + Attributes: + entity (google.cloud.videointelligence_v1p3beta1.types.Entity): + Entity category information to specify the + logo class that all the logo tracks within this + LogoRecognitionAnnotation are recognized as. + tracks (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.Track]): + All logo tracks where the recognized logo + appears. Each track corresponds to one logo + instance appearing in consecutive frames. + segments (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.VideoSegment]): + All video segments where the recognized logo + appears. There might be multiple instances of + the same logo class appearing in one + VideoSegment. + """ + + entity: "Entity" = proto.Field( + proto.MESSAGE, + number=1, + message="Entity", + ) + tracks: MutableSequence["Track"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Track", + ) + segments: MutableSequence["VideoSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="VideoSegment", + ) + + +class StreamingAnnotateVideoRequest(proto.Message): + r"""The top-level message sent by the client for the + ``StreamingAnnotateVideo`` method. Multiple + ``StreamingAnnotateVideoRequest`` messages are sent. The first + message must only contain a ``StreamingVideoConfig`` message. All + subsequent messages must only contain ``input_content`` data. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + video_config (google.cloud.videointelligence_v1p3beta1.types.StreamingVideoConfig): + Provides information to the annotator, specifing how to + process the request. The first + ``AnnotateStreamingVideoRequest`` message must only contain + a ``video_config`` message. + + This field is a member of `oneof`_ ``streaming_request``. + input_content (bytes): + The video data to be annotated. Chunks of video data are + sequentially sent in ``StreamingAnnotateVideoRequest`` + messages. Except the initial + ``StreamingAnnotateVideoRequest`` message containing only + ``video_config``, all subsequent + ``AnnotateStreamingVideoRequest`` messages must only contain + ``input_content`` field. Note: as with all bytes fields, + protobuffers use a pure binary representation (not base64). + + This field is a member of `oneof`_ ``streaming_request``. + """ + + video_config: "StreamingVideoConfig" = proto.Field( + proto.MESSAGE, + number=1, + oneof="streaming_request", + message="StreamingVideoConfig", + ) + input_content: bytes = proto.Field( + proto.BYTES, + number=2, + oneof="streaming_request", + ) + + +class StreamingVideoConfig(proto.Message): + r"""Provides information to the annotator that specifies how to + process the request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + shot_change_detection_config (google.cloud.videointelligence_v1p3beta1.types.StreamingShotChangeDetectionConfig): + Config for STREAMING_SHOT_CHANGE_DETECTION. + + This field is a member of `oneof`_ ``streaming_config``. + label_detection_config (google.cloud.videointelligence_v1p3beta1.types.StreamingLabelDetectionConfig): + Config for STREAMING_LABEL_DETECTION. + + This field is a member of `oneof`_ ``streaming_config``. + explicit_content_detection_config (google.cloud.videointelligence_v1p3beta1.types.StreamingExplicitContentDetectionConfig): + Config for STREAMING_EXPLICIT_CONTENT_DETECTION. + + This field is a member of `oneof`_ ``streaming_config``. + object_tracking_config (google.cloud.videointelligence_v1p3beta1.types.StreamingObjectTrackingConfig): + Config for STREAMING_OBJECT_TRACKING. + + This field is a member of `oneof`_ ``streaming_config``. + automl_action_recognition_config (google.cloud.videointelligence_v1p3beta1.types.StreamingAutomlActionRecognitionConfig): + Config for STREAMING_AUTOML_ACTION_RECOGNITION. + + This field is a member of `oneof`_ ``streaming_config``. + automl_classification_config (google.cloud.videointelligence_v1p3beta1.types.StreamingAutomlClassificationConfig): + Config for STREAMING_AUTOML_CLASSIFICATION. + + This field is a member of `oneof`_ ``streaming_config``. + automl_object_tracking_config (google.cloud.videointelligence_v1p3beta1.types.StreamingAutomlObjectTrackingConfig): + Config for STREAMING_AUTOML_OBJECT_TRACKING. + + This field is a member of `oneof`_ ``streaming_config``. + feature (google.cloud.videointelligence_v1p3beta1.types.StreamingFeature): + Requested annotation feature. + storage_config (google.cloud.videointelligence_v1p3beta1.types.StreamingStorageConfig): + Streaming storage option. By default: storage + is disabled. + """ + + shot_change_detection_config: "StreamingShotChangeDetectionConfig" = proto.Field( + proto.MESSAGE, + number=2, + oneof="streaming_config", + message="StreamingShotChangeDetectionConfig", + ) + label_detection_config: "StreamingLabelDetectionConfig" = proto.Field( + proto.MESSAGE, + number=3, + oneof="streaming_config", + message="StreamingLabelDetectionConfig", + ) + explicit_content_detection_config: "StreamingExplicitContentDetectionConfig" = ( + proto.Field( + proto.MESSAGE, + number=4, + oneof="streaming_config", + message="StreamingExplicitContentDetectionConfig", + ) + ) + object_tracking_config: "StreamingObjectTrackingConfig" = proto.Field( + proto.MESSAGE, + number=5, + oneof="streaming_config", + message="StreamingObjectTrackingConfig", + ) + automl_action_recognition_config: "StreamingAutomlActionRecognitionConfig" = ( + proto.Field( + proto.MESSAGE, + number=23, + oneof="streaming_config", + message="StreamingAutomlActionRecognitionConfig", + ) + ) + automl_classification_config: "StreamingAutomlClassificationConfig" = proto.Field( + proto.MESSAGE, + number=21, + oneof="streaming_config", + message="StreamingAutomlClassificationConfig", + ) + automl_object_tracking_config: "StreamingAutomlObjectTrackingConfig" = proto.Field( + proto.MESSAGE, + number=22, + oneof="streaming_config", + message="StreamingAutomlObjectTrackingConfig", + ) + feature: "StreamingFeature" = proto.Field( + proto.ENUM, + number=1, + enum="StreamingFeature", + ) + storage_config: "StreamingStorageConfig" = proto.Field( + proto.MESSAGE, + number=30, + message="StreamingStorageConfig", + ) + + +class StreamingAnnotateVideoResponse(proto.Message): + r"""``StreamingAnnotateVideoResponse`` is the only message returned to + the client by ``StreamingAnnotateVideo``. A series of zero or more + ``StreamingAnnotateVideoResponse`` messages are streamed back to the + client. + + Attributes: + error (google.rpc.status_pb2.Status): + If set, returns a [google.rpc.Status][google.rpc.Status] + message that specifies the error for the operation. + annotation_results (google.cloud.videointelligence_v1p3beta1.types.StreamingVideoAnnotationResults): + Streaming annotation results. + annotation_results_uri (str): + Google Cloud Storage(GCS) URI that stores annotation results + of one streaming session in JSON format. It is the + annotation_result_storage_directory from the request + followed by '/cloud_project_number-session_id'. + """ + + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + annotation_results: "StreamingVideoAnnotationResults" = proto.Field( + proto.MESSAGE, + number=2, + message="StreamingVideoAnnotationResults", + ) + annotation_results_uri: str = proto.Field( + proto.STRING, + number=3, + ) + + +class StreamingVideoAnnotationResults(proto.Message): + r"""Streaming annotation results corresponding to a portion of + the video that is currently being processed. + + Attributes: + shot_annotations (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.VideoSegment]): + Shot annotation results. Each shot is + represented as a video segment. + label_annotations (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.LabelAnnotation]): + Label annotation results. + explicit_annotation (google.cloud.videointelligence_v1p3beta1.types.ExplicitContentAnnotation): + Explicit content annotation results. + object_annotations (MutableSequence[google.cloud.videointelligence_v1p3beta1.types.ObjectTrackingAnnotation]): + Object tracking results. + """ + + shot_annotations: MutableSequence["VideoSegment"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="VideoSegment", + ) + label_annotations: MutableSequence["LabelAnnotation"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="LabelAnnotation", + ) + explicit_annotation: "ExplicitContentAnnotation" = proto.Field( + proto.MESSAGE, + number=3, + message="ExplicitContentAnnotation", + ) + object_annotations: MutableSequence[ + "ObjectTrackingAnnotation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="ObjectTrackingAnnotation", + ) + + +class StreamingShotChangeDetectionConfig(proto.Message): + r"""Config for STREAMING_SHOT_CHANGE_DETECTION.""" + + +class StreamingLabelDetectionConfig(proto.Message): + r"""Config for STREAMING_LABEL_DETECTION. + + Attributes: + stationary_camera (bool): + Whether the video has been captured from a + stationary (i.e. non-moving) camera. When set to + true, might improve detection accuracy for + moving objects. Default: false. + """ + + stationary_camera: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class StreamingExplicitContentDetectionConfig(proto.Message): + r"""Config for STREAMING_EXPLICIT_CONTENT_DETECTION.""" + + +class StreamingObjectTrackingConfig(proto.Message): + r"""Config for STREAMING_OBJECT_TRACKING.""" + + +class StreamingAutomlActionRecognitionConfig(proto.Message): + r"""Config for STREAMING_AUTOML_ACTION_RECOGNITION. + + Attributes: + model_name (str): + Resource name of AutoML model. Format: + ``projects/{project_id}/locations/{location_id}/models/{model_id}`` + """ + + model_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class StreamingAutomlClassificationConfig(proto.Message): + r"""Config for STREAMING_AUTOML_CLASSIFICATION. + + Attributes: + model_name (str): + Resource name of AutoML model. Format: + ``projects/{project_number}/locations/{location_id}/models/{model_id}`` + """ + + model_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class StreamingAutomlObjectTrackingConfig(proto.Message): + r"""Config for STREAMING_AUTOML_OBJECT_TRACKING. + + Attributes: + model_name (str): + Resource name of AutoML model. Format: + ``projects/{project_id}/locations/{location_id}/models/{model_id}`` + """ + + model_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class StreamingStorageConfig(proto.Message): + r"""Config for streaming storage option. + + Attributes: + enable_storage_annotation_result (bool): + Enable streaming storage. Default: false. + annotation_result_storage_directory (str): + Cloud Storage URI to store all annotation results for one + client. Client should specify this field as the top-level + storage directory. Annotation results of different sessions + will be put into different sub-directories denoted by + project_name and session_id. All sub-directories will be + auto generated by program and will be made accessible to + client in response proto. URIs must be specified in the + following format: ``gs://bucket-id/object-id`` ``bucket-id`` + should be a valid Cloud Storage bucket created by client and + bucket permission shall also be configured properly. + ``object-id`` can be arbitrary string that make sense to + client. Other URI formats will return error and cause Cloud + Storage write failure. + """ + + enable_storage_annotation_result: bool = proto.Field( + proto.BOOL, + number=1, + ) + annotation_result_storage_directory: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-videointelligence/mypy.ini b/packages/google-cloud-videointelligence/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/google-cloud-videointelligence/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/google-cloud-videointelligence/noxfile.py b/packages/google-cloud-videointelligence/noxfile.py new file mode 100644 index 000000000000..be54712bfa8f --- /dev/null +++ b/packages/google-cloud-videointelligence/noxfile.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +import warnings + +import nox + +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.9" + +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.11") +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-videointelligence/pylint.config.py b/packages/google-cloud-videointelligence/pylint.config.py new file mode 100644 index 000000000000..5d64b9d2f256 --- /dev/null +++ b/packages/google-cloud-videointelligence/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) diff --git a/packages/google-cloud-videointelligence/renovate.json b/packages/google-cloud-videointelligence/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-videointelligence/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-videointelligence/scripts/decrypt-secrets.sh b/packages/google-cloud-videointelligence/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..0018b421ddf8 --- /dev/null +++ b/packages/google-cloud-videointelligence/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2023 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-videointelligence/scripts/fixup_keywords.py b/packages/google-cloud-videointelligence/scripts/fixup_keywords.py new file mode 100644 index 000000000000..cda96fc572ce --- /dev/null +++ b/packages/google-cloud-videointelligence/scripts/fixup_keywords.py @@ -0,0 +1,179 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class videointelligenceCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'annotate_video': ('features', 'input_uri', 'input_content', 'video_context', 'output_uri', 'location_id', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=videointelligenceCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the videointelligence client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1_keywords.py b/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1_keywords.py new file mode 100644 index 000000000000..e064f0ca3cfc --- /dev/null +++ b/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1_keywords.py @@ -0,0 +1,176 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class videointelligenceCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'annotate_video': ('features', 'input_uri', 'input_content', 'video_context', 'output_uri', 'location_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=videointelligenceCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the videointelligence client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1beta2_keywords.py b/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1beta2_keywords.py new file mode 100644 index 000000000000..e064f0ca3cfc --- /dev/null +++ b/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1beta2_keywords.py @@ -0,0 +1,176 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class videointelligenceCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'annotate_video': ('features', 'input_uri', 'input_content', 'video_context', 'output_uri', 'location_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=videointelligenceCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the videointelligence client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1p1beta1_keywords.py b/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1p1beta1_keywords.py new file mode 100644 index 000000000000..e064f0ca3cfc --- /dev/null +++ b/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1p1beta1_keywords.py @@ -0,0 +1,176 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class videointelligenceCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'annotate_video': ('features', 'input_uri', 'input_content', 'video_context', 'output_uri', 'location_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=videointelligenceCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the videointelligence client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1p2beta1_keywords.py b/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1p2beta1_keywords.py new file mode 100644 index 000000000000..e064f0ca3cfc --- /dev/null +++ b/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1p2beta1_keywords.py @@ -0,0 +1,176 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class videointelligenceCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'annotate_video': ('features', 'input_uri', 'input_content', 'video_context', 'output_uri', 'location_id', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=videointelligenceCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the videointelligence client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1p3beta1_keywords.py b/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1p3beta1_keywords.py new file mode 100644 index 000000000000..a6a6814b368a --- /dev/null +++ b/packages/google-cloud-videointelligence/scripts/fixup_videointelligence_v1p3beta1_keywords.py @@ -0,0 +1,177 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class videointelligenceCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'annotate_video': ('features', 'input_uri', 'input_content', 'video_context', 'output_uri', 'location_id', ), + 'streaming_annotate_video': ('video_config', 'input_content', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=videointelligenceCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the videointelligence client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-videointelligence/scripts/readme-gen/readme_gen.py b/packages/google-cloud-videointelligence/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..1acc119835b5 --- /dev/null +++ b/packages/google-cloud-videointelligence/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-videointelligence/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-videointelligence/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-videointelligence/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-videointelligence/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-videointelligence/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-videointelligence/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-videointelligence/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-videointelligence/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-videointelligence/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-videointelligence/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-videointelligence/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-videointelligence/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-videointelligence/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-videointelligence/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-videointelligence/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-videointelligence/setup.cfg b/packages/google-cloud-videointelligence/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-videointelligence/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-videointelligence/setup.py b/packages/google-cloud-videointelligence/setup.py new file mode 100644 index 000000000000..6348516b4b34 --- /dev/null +++ b/packages/google-cloud-videointelligence/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-videointelligence" + + +description = "Google Cloud Videointelligence API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/videointelligence/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/google-cloud-python" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-videointelligence/testing/.gitignore b/packages/google-cloud-videointelligence/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-videointelligence/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-videointelligence/testing/constraints-3.10.txt b/packages/google-cloud-videointelligence/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-videointelligence/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-videointelligence/testing/constraints-3.11.txt b/packages/google-cloud-videointelligence/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-videointelligence/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-videointelligence/testing/constraints-3.12.txt b/packages/google-cloud-videointelligence/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-videointelligence/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-videointelligence/testing/constraints-3.7.txt b/packages/google-cloud-videointelligence/testing/constraints-3.7.txt new file mode 100644 index 000000000000..6c44adfea7ee --- /dev/null +++ b/packages/google-cloud-videointelligence/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/google-cloud-videointelligence/testing/constraints-3.8.txt b/packages/google-cloud-videointelligence/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-videointelligence/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-videointelligence/testing/constraints-3.9.txt b/packages/google-cloud-videointelligence/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/google-cloud-videointelligence/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/google-cloud-videointelligence/tests/__init__.py b/packages/google-cloud-videointelligence/tests/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-videointelligence/tests/system/__init__.py b/packages/google-cloud-videointelligence/tests/system/__init__.py new file mode 100644 index 000000000000..6cfb48f4cdf8 --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/system/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-videointelligence/tests/system/smoke_test.py b/packages/google-cloud-videointelligence/tests/system/smoke_test.py new file mode 100644 index 000000000000..147b69189734 --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/system/smoke_test.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + +from google.cloud import videointelligence_v1 + +INPUT_URI = "gs://cloud-samples-data/video/cat.mp4" + + +@pytest.fixture(scope="session") +def project_id(): + return os.environ["PROJECT_ID"] + + +@pytest.mark.parametrize("transport", ["grpc", "rest"]) +def test_annotate_video(transport: str): + client = videointelligence_v1.VideoIntelligenceServiceClient(transport=transport) + + features = [videointelligence_v1.Feature.LABEL_DETECTION] + client.annotate_video(features=features, input_uri=INPUT_URI).result() + + # The purpose of this smoke test is to test the communication with the API server, + # rather than API-specific functionality. + # If the smoke test fails, we won't reach this line. + assert True diff --git a/packages/google-cloud-videointelligence/tests/unit/__init__.py b/packages/google-cloud-videointelligence/tests/unit/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/__init__.py b/packages/google-cloud-videointelligence/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1/__init__.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1/test_video_intelligence_service.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1/test_video_intelligence_service.py new file mode 100644 index 000000000000..f726ba36aac1 --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1/test_video_intelligence_service.py @@ -0,0 +1,1999 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.videointelligence_v1.services.video_intelligence_service import ( + VideoIntelligenceServiceAsyncClient, + VideoIntelligenceServiceClient, + transports, +) +from google.cloud.videointelligence_v1.types import video_intelligence + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert VideoIntelligenceServiceClient._get_default_mtls_endpoint(None) is None + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (VideoIntelligenceServiceClient, "grpc"), + (VideoIntelligenceServiceAsyncClient, "grpc_asyncio"), + (VideoIntelligenceServiceClient, "rest"), + ], +) +def test_video_intelligence_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "videointelligence.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.VideoIntelligenceServiceGrpcTransport, "grpc"), + (transports.VideoIntelligenceServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.VideoIntelligenceServiceRestTransport, "rest"), + ], +) +def test_video_intelligence_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (VideoIntelligenceServiceClient, "grpc"), + (VideoIntelligenceServiceAsyncClient, "grpc_asyncio"), + (VideoIntelligenceServiceClient, "rest"), + ], +) +def test_video_intelligence_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "videointelligence.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com" + ) + + +def test_video_intelligence_service_client_get_transport_class(): + transport = VideoIntelligenceServiceClient.get_transport_class() + available_transports = [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceRestTransport, + ] + assert transport in available_transports + + transport = VideoIntelligenceServiceClient.get_transport_class("grpc") + assert transport == transports.VideoIntelligenceServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +def test_video_intelligence_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + VideoIntelligenceServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + VideoIntelligenceServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + "true", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + "false", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + "true", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_video_intelligence_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [VideoIntelligenceServiceClient, VideoIntelligenceServiceAsyncClient], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +def test_video_intelligence_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + ), + ], +) +def test_video_intelligence_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + None, + ), + ], +) +def test_video_intelligence_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_video_intelligence_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.videointelligence_v1.services.video_intelligence_service.transports.VideoIntelligenceServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = VideoIntelligenceServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_video_intelligence_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "videointelligence.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="videointelligence.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + video_intelligence.AnnotateVideoRequest, + dict, + ], +) +def test_annotate_video(request_type, transport: str = "grpc"): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.annotate_video(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_annotate_video_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + client.annotate_video() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + +@pytest.mark.asyncio +async def test_annotate_video_async( + transport: str = "grpc_asyncio", + request_type=video_intelligence.AnnotateVideoRequest, +): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.annotate_video(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_annotate_video_async_from_dict(): + await test_annotate_video_async(request_type=dict) + + +def test_annotate_video_flattened(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.annotate_video( + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].input_uri + mock_val = "input_uri_value" + assert arg == mock_val + arg = args[0].features + mock_val = [video_intelligence.Feature.LABEL_DETECTION] + assert arg == mock_val + + +def test_annotate_video_flattened_error(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_video( + video_intelligence.AnnotateVideoRequest(), + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + +@pytest.mark.asyncio +async def test_annotate_video_flattened_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.annotate_video( + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].input_uri + mock_val = "input_uri_value" + assert arg == mock_val + arg = args[0].features + mock_val = [video_intelligence.Feature.LABEL_DETECTION] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_annotate_video_flattened_error_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.annotate_video( + video_intelligence.AnnotateVideoRequest(), + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + video_intelligence.AnnotateVideoRequest, + dict, + ], +) +def test_annotate_video_rest(request_type): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.annotate_video(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_annotate_video_rest_required_fields( + request_type=video_intelligence.AnnotateVideoRequest, +): + transport_class = transports.VideoIntelligenceServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).annotate_video._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).annotate_video._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.annotate_video(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_annotate_video_rest_unset_required_fields(): + transport = transports.VideoIntelligenceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.annotate_video._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("features",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_annotate_video_rest_interceptors(null_interceptor): + transport = transports.VideoIntelligenceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VideoIntelligenceServiceRestInterceptor(), + ) + client = VideoIntelligenceServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VideoIntelligenceServiceRestInterceptor, "post_annotate_video" + ) as post, mock.patch.object( + transports.VideoIntelligenceServiceRestInterceptor, "pre_annotate_video" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = video_intelligence.AnnotateVideoRequest.pb( + video_intelligence.AnnotateVideoRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = video_intelligence.AnnotateVideoRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.annotate_video( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_annotate_video_rest_bad_request( + transport: str = "rest", request_type=video_intelligence.AnnotateVideoRequest +): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.annotate_video(request) + + +def test_annotate_video_rest_flattened(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.annotate_video(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/videos:annotate" % client.transport._host, args[1] + ) + + +def test_annotate_video_rest_flattened_error(transport: str = "rest"): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_video( + video_intelligence.AnnotateVideoRequest(), + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + +def test_annotate_video_rest_error(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = VideoIntelligenceServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.VideoIntelligenceServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + transports.VideoIntelligenceServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = VideoIntelligenceServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.VideoIntelligenceServiceGrpcTransport, + ) + + +def test_video_intelligence_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.VideoIntelligenceServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_video_intelligence_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.videointelligence_v1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.VideoIntelligenceServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("annotate_video",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_video_intelligence_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.videointelligence_v1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.VideoIntelligenceServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_video_intelligence_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.videointelligence_v1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.VideoIntelligenceServiceTransport() + adc.assert_called_once() + + +def test_video_intelligence_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + VideoIntelligenceServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + transports.VideoIntelligenceServiceRestTransport, + ], +) +def test_video_intelligence_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.VideoIntelligenceServiceGrpcTransport, grpc_helpers), + (transports.VideoIntelligenceServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_video_intelligence_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "videointelligence.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="videointelligence.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_video_intelligence_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.VideoIntelligenceServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_video_intelligence_service_rest_lro_client(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_video_intelligence_service_host_no_port(transport_name): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="videointelligence.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "videointelligence.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_video_intelligence_service_host_with_port(transport_name): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="videointelligence.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "videointelligence.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_video_intelligence_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = VideoIntelligenceServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = VideoIntelligenceServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.annotate_video._session + session2 = client2.transport.annotate_video._session + assert session1 != session2 + + +def test_video_intelligence_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.VideoIntelligenceServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_video_intelligence_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.VideoIntelligenceServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_video_intelligence_service_grpc_lro_client(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_video_intelligence_service_grpc_lro_async_client(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = VideoIntelligenceServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = VideoIntelligenceServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = VideoIntelligenceServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = VideoIntelligenceServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = VideoIntelligenceServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = VideoIntelligenceServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = VideoIntelligenceServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = VideoIntelligenceServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = VideoIntelligenceServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = VideoIntelligenceServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.VideoIntelligenceServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.VideoIntelligenceServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = VideoIntelligenceServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1beta2/__init__.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1beta2/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1beta2/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1beta2/test_video_intelligence_service.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1beta2/test_video_intelligence_service.py new file mode 100644 index 000000000000..a88e6bf4a69a --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1beta2/test_video_intelligence_service.py @@ -0,0 +1,1999 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.videointelligence_v1beta2.services.video_intelligence_service import ( + VideoIntelligenceServiceAsyncClient, + VideoIntelligenceServiceClient, + transports, +) +from google.cloud.videointelligence_v1beta2.types import video_intelligence + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert VideoIntelligenceServiceClient._get_default_mtls_endpoint(None) is None + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (VideoIntelligenceServiceClient, "grpc"), + (VideoIntelligenceServiceAsyncClient, "grpc_asyncio"), + (VideoIntelligenceServiceClient, "rest"), + ], +) +def test_video_intelligence_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "videointelligence.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.VideoIntelligenceServiceGrpcTransport, "grpc"), + (transports.VideoIntelligenceServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.VideoIntelligenceServiceRestTransport, "rest"), + ], +) +def test_video_intelligence_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (VideoIntelligenceServiceClient, "grpc"), + (VideoIntelligenceServiceAsyncClient, "grpc_asyncio"), + (VideoIntelligenceServiceClient, "rest"), + ], +) +def test_video_intelligence_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "videointelligence.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com" + ) + + +def test_video_intelligence_service_client_get_transport_class(): + transport = VideoIntelligenceServiceClient.get_transport_class() + available_transports = [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceRestTransport, + ] + assert transport in available_transports + + transport = VideoIntelligenceServiceClient.get_transport_class("grpc") + assert transport == transports.VideoIntelligenceServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +def test_video_intelligence_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + VideoIntelligenceServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + VideoIntelligenceServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + "true", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + "false", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + "true", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_video_intelligence_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [VideoIntelligenceServiceClient, VideoIntelligenceServiceAsyncClient], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +def test_video_intelligence_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + ), + ], +) +def test_video_intelligence_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + None, + ), + ], +) +def test_video_intelligence_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_video_intelligence_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.videointelligence_v1beta2.services.video_intelligence_service.transports.VideoIntelligenceServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = VideoIntelligenceServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_video_intelligence_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "videointelligence.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="videointelligence.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + video_intelligence.AnnotateVideoRequest, + dict, + ], +) +def test_annotate_video(request_type, transport: str = "grpc"): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.annotate_video(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_annotate_video_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + client.annotate_video() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + +@pytest.mark.asyncio +async def test_annotate_video_async( + transport: str = "grpc_asyncio", + request_type=video_intelligence.AnnotateVideoRequest, +): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.annotate_video(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_annotate_video_async_from_dict(): + await test_annotate_video_async(request_type=dict) + + +def test_annotate_video_flattened(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.annotate_video( + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].input_uri + mock_val = "input_uri_value" + assert arg == mock_val + arg = args[0].features + mock_val = [video_intelligence.Feature.LABEL_DETECTION] + assert arg == mock_val + + +def test_annotate_video_flattened_error(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_video( + video_intelligence.AnnotateVideoRequest(), + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + +@pytest.mark.asyncio +async def test_annotate_video_flattened_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.annotate_video( + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].input_uri + mock_val = "input_uri_value" + assert arg == mock_val + arg = args[0].features + mock_val = [video_intelligence.Feature.LABEL_DETECTION] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_annotate_video_flattened_error_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.annotate_video( + video_intelligence.AnnotateVideoRequest(), + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + video_intelligence.AnnotateVideoRequest, + dict, + ], +) +def test_annotate_video_rest(request_type): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.annotate_video(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_annotate_video_rest_required_fields( + request_type=video_intelligence.AnnotateVideoRequest, +): + transport_class = transports.VideoIntelligenceServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).annotate_video._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).annotate_video._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.annotate_video(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_annotate_video_rest_unset_required_fields(): + transport = transports.VideoIntelligenceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.annotate_video._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("features",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_annotate_video_rest_interceptors(null_interceptor): + transport = transports.VideoIntelligenceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VideoIntelligenceServiceRestInterceptor(), + ) + client = VideoIntelligenceServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VideoIntelligenceServiceRestInterceptor, "post_annotate_video" + ) as post, mock.patch.object( + transports.VideoIntelligenceServiceRestInterceptor, "pre_annotate_video" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = video_intelligence.AnnotateVideoRequest.pb( + video_intelligence.AnnotateVideoRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = video_intelligence.AnnotateVideoRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.annotate_video( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_annotate_video_rest_bad_request( + transport: str = "rest", request_type=video_intelligence.AnnotateVideoRequest +): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.annotate_video(request) + + +def test_annotate_video_rest_flattened(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.annotate_video(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/videos:annotate" % client.transport._host, args[1] + ) + + +def test_annotate_video_rest_flattened_error(transport: str = "rest"): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_video( + video_intelligence.AnnotateVideoRequest(), + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + +def test_annotate_video_rest_error(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = VideoIntelligenceServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.VideoIntelligenceServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + transports.VideoIntelligenceServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = VideoIntelligenceServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.VideoIntelligenceServiceGrpcTransport, + ) + + +def test_video_intelligence_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.VideoIntelligenceServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_video_intelligence_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.videointelligence_v1beta2.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.VideoIntelligenceServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("annotate_video",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_video_intelligence_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.videointelligence_v1beta2.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.VideoIntelligenceServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_video_intelligence_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.videointelligence_v1beta2.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.VideoIntelligenceServiceTransport() + adc.assert_called_once() + + +def test_video_intelligence_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + VideoIntelligenceServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + transports.VideoIntelligenceServiceRestTransport, + ], +) +def test_video_intelligence_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.VideoIntelligenceServiceGrpcTransport, grpc_helpers), + (transports.VideoIntelligenceServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_video_intelligence_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "videointelligence.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="videointelligence.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_video_intelligence_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.VideoIntelligenceServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_video_intelligence_service_rest_lro_client(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_video_intelligence_service_host_no_port(transport_name): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="videointelligence.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "videointelligence.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_video_intelligence_service_host_with_port(transport_name): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="videointelligence.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "videointelligence.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_video_intelligence_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = VideoIntelligenceServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = VideoIntelligenceServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.annotate_video._session + session2 = client2.transport.annotate_video._session + assert session1 != session2 + + +def test_video_intelligence_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.VideoIntelligenceServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_video_intelligence_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.VideoIntelligenceServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_video_intelligence_service_grpc_lro_client(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_video_intelligence_service_grpc_lro_async_client(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = VideoIntelligenceServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = VideoIntelligenceServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = VideoIntelligenceServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = VideoIntelligenceServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = VideoIntelligenceServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = VideoIntelligenceServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = VideoIntelligenceServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = VideoIntelligenceServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = VideoIntelligenceServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = VideoIntelligenceServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.VideoIntelligenceServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.VideoIntelligenceServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = VideoIntelligenceServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p1beta1/__init__.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p1beta1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p1beta1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p1beta1/test_video_intelligence_service.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p1beta1/test_video_intelligence_service.py new file mode 100644 index 000000000000..840f95a94782 --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p1beta1/test_video_intelligence_service.py @@ -0,0 +1,1999 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.videointelligence_v1p1beta1.services.video_intelligence_service import ( + VideoIntelligenceServiceAsyncClient, + VideoIntelligenceServiceClient, + transports, +) +from google.cloud.videointelligence_v1p1beta1.types import video_intelligence + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert VideoIntelligenceServiceClient._get_default_mtls_endpoint(None) is None + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (VideoIntelligenceServiceClient, "grpc"), + (VideoIntelligenceServiceAsyncClient, "grpc_asyncio"), + (VideoIntelligenceServiceClient, "rest"), + ], +) +def test_video_intelligence_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "videointelligence.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.VideoIntelligenceServiceGrpcTransport, "grpc"), + (transports.VideoIntelligenceServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.VideoIntelligenceServiceRestTransport, "rest"), + ], +) +def test_video_intelligence_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (VideoIntelligenceServiceClient, "grpc"), + (VideoIntelligenceServiceAsyncClient, "grpc_asyncio"), + (VideoIntelligenceServiceClient, "rest"), + ], +) +def test_video_intelligence_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "videointelligence.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com" + ) + + +def test_video_intelligence_service_client_get_transport_class(): + transport = VideoIntelligenceServiceClient.get_transport_class() + available_transports = [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceRestTransport, + ] + assert transport in available_transports + + transport = VideoIntelligenceServiceClient.get_transport_class("grpc") + assert transport == transports.VideoIntelligenceServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +def test_video_intelligence_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + VideoIntelligenceServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + VideoIntelligenceServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + "true", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + "false", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + "true", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_video_intelligence_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [VideoIntelligenceServiceClient, VideoIntelligenceServiceAsyncClient], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +def test_video_intelligence_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + ), + ], +) +def test_video_intelligence_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + None, + ), + ], +) +def test_video_intelligence_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_video_intelligence_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.videointelligence_v1p1beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = VideoIntelligenceServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_video_intelligence_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "videointelligence.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="videointelligence.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + video_intelligence.AnnotateVideoRequest, + dict, + ], +) +def test_annotate_video(request_type, transport: str = "grpc"): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.annotate_video(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_annotate_video_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + client.annotate_video() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + +@pytest.mark.asyncio +async def test_annotate_video_async( + transport: str = "grpc_asyncio", + request_type=video_intelligence.AnnotateVideoRequest, +): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.annotate_video(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_annotate_video_async_from_dict(): + await test_annotate_video_async(request_type=dict) + + +def test_annotate_video_flattened(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.annotate_video( + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].input_uri + mock_val = "input_uri_value" + assert arg == mock_val + arg = args[0].features + mock_val = [video_intelligence.Feature.LABEL_DETECTION] + assert arg == mock_val + + +def test_annotate_video_flattened_error(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_video( + video_intelligence.AnnotateVideoRequest(), + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + +@pytest.mark.asyncio +async def test_annotate_video_flattened_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.annotate_video( + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].input_uri + mock_val = "input_uri_value" + assert arg == mock_val + arg = args[0].features + mock_val = [video_intelligence.Feature.LABEL_DETECTION] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_annotate_video_flattened_error_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.annotate_video( + video_intelligence.AnnotateVideoRequest(), + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + video_intelligence.AnnotateVideoRequest, + dict, + ], +) +def test_annotate_video_rest(request_type): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.annotate_video(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_annotate_video_rest_required_fields( + request_type=video_intelligence.AnnotateVideoRequest, +): + transport_class = transports.VideoIntelligenceServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).annotate_video._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).annotate_video._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.annotate_video(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_annotate_video_rest_unset_required_fields(): + transport = transports.VideoIntelligenceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.annotate_video._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("features",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_annotate_video_rest_interceptors(null_interceptor): + transport = transports.VideoIntelligenceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VideoIntelligenceServiceRestInterceptor(), + ) + client = VideoIntelligenceServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VideoIntelligenceServiceRestInterceptor, "post_annotate_video" + ) as post, mock.patch.object( + transports.VideoIntelligenceServiceRestInterceptor, "pre_annotate_video" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = video_intelligence.AnnotateVideoRequest.pb( + video_intelligence.AnnotateVideoRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = video_intelligence.AnnotateVideoRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.annotate_video( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_annotate_video_rest_bad_request( + transport: str = "rest", request_type=video_intelligence.AnnotateVideoRequest +): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.annotate_video(request) + + +def test_annotate_video_rest_flattened(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.annotate_video(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1p1beta1/videos:annotate" % client.transport._host, args[1] + ) + + +def test_annotate_video_rest_flattened_error(transport: str = "rest"): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_video( + video_intelligence.AnnotateVideoRequest(), + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + +def test_annotate_video_rest_error(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = VideoIntelligenceServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.VideoIntelligenceServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + transports.VideoIntelligenceServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = VideoIntelligenceServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.VideoIntelligenceServiceGrpcTransport, + ) + + +def test_video_intelligence_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.VideoIntelligenceServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_video_intelligence_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.videointelligence_v1p1beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.VideoIntelligenceServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("annotate_video",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_video_intelligence_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.videointelligence_v1p1beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.VideoIntelligenceServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_video_intelligence_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.videointelligence_v1p1beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.VideoIntelligenceServiceTransport() + adc.assert_called_once() + + +def test_video_intelligence_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + VideoIntelligenceServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + transports.VideoIntelligenceServiceRestTransport, + ], +) +def test_video_intelligence_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.VideoIntelligenceServiceGrpcTransport, grpc_helpers), + (transports.VideoIntelligenceServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_video_intelligence_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "videointelligence.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="videointelligence.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_video_intelligence_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.VideoIntelligenceServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_video_intelligence_service_rest_lro_client(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_video_intelligence_service_host_no_port(transport_name): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="videointelligence.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "videointelligence.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_video_intelligence_service_host_with_port(transport_name): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="videointelligence.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "videointelligence.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_video_intelligence_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = VideoIntelligenceServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = VideoIntelligenceServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.annotate_video._session + session2 = client2.transport.annotate_video._session + assert session1 != session2 + + +def test_video_intelligence_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.VideoIntelligenceServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_video_intelligence_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.VideoIntelligenceServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_video_intelligence_service_grpc_lro_client(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_video_intelligence_service_grpc_lro_async_client(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = VideoIntelligenceServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = VideoIntelligenceServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = VideoIntelligenceServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = VideoIntelligenceServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = VideoIntelligenceServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = VideoIntelligenceServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = VideoIntelligenceServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = VideoIntelligenceServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = VideoIntelligenceServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = VideoIntelligenceServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.VideoIntelligenceServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.VideoIntelligenceServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = VideoIntelligenceServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p2beta1/__init__.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p2beta1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p2beta1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p2beta1/test_video_intelligence_service.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p2beta1/test_video_intelligence_service.py new file mode 100644 index 000000000000..c4f9bc3bbc9f --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p2beta1/test_video_intelligence_service.py @@ -0,0 +1,1999 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service import ( + VideoIntelligenceServiceAsyncClient, + VideoIntelligenceServiceClient, + transports, +) +from google.cloud.videointelligence_v1p2beta1.types import video_intelligence + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert VideoIntelligenceServiceClient._get_default_mtls_endpoint(None) is None + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (VideoIntelligenceServiceClient, "grpc"), + (VideoIntelligenceServiceAsyncClient, "grpc_asyncio"), + (VideoIntelligenceServiceClient, "rest"), + ], +) +def test_video_intelligence_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "videointelligence.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.VideoIntelligenceServiceGrpcTransport, "grpc"), + (transports.VideoIntelligenceServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.VideoIntelligenceServiceRestTransport, "rest"), + ], +) +def test_video_intelligence_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (VideoIntelligenceServiceClient, "grpc"), + (VideoIntelligenceServiceAsyncClient, "grpc_asyncio"), + (VideoIntelligenceServiceClient, "rest"), + ], +) +def test_video_intelligence_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "videointelligence.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com" + ) + + +def test_video_intelligence_service_client_get_transport_class(): + transport = VideoIntelligenceServiceClient.get_transport_class() + available_transports = [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceRestTransport, + ] + assert transport in available_transports + + transport = VideoIntelligenceServiceClient.get_transport_class("grpc") + assert transport == transports.VideoIntelligenceServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +def test_video_intelligence_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + VideoIntelligenceServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + VideoIntelligenceServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + "true", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + "false", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + "true", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_video_intelligence_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [VideoIntelligenceServiceClient, VideoIntelligenceServiceAsyncClient], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +def test_video_intelligence_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + ), + ], +) +def test_video_intelligence_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceRestTransport, + "rest", + None, + ), + ], +) +def test_video_intelligence_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_video_intelligence_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = VideoIntelligenceServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_video_intelligence_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "videointelligence.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="videointelligence.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + video_intelligence.AnnotateVideoRequest, + dict, + ], +) +def test_annotate_video(request_type, transport: str = "grpc"): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.annotate_video(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_annotate_video_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + client.annotate_video() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + +@pytest.mark.asyncio +async def test_annotate_video_async( + transport: str = "grpc_asyncio", + request_type=video_intelligence.AnnotateVideoRequest, +): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.annotate_video(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_annotate_video_async_from_dict(): + await test_annotate_video_async(request_type=dict) + + +def test_annotate_video_flattened(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.annotate_video( + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].input_uri + mock_val = "input_uri_value" + assert arg == mock_val + arg = args[0].features + mock_val = [video_intelligence.Feature.LABEL_DETECTION] + assert arg == mock_val + + +def test_annotate_video_flattened_error(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_video( + video_intelligence.AnnotateVideoRequest(), + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + +@pytest.mark.asyncio +async def test_annotate_video_flattened_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.annotate_video( + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].input_uri + mock_val = "input_uri_value" + assert arg == mock_val + arg = args[0].features + mock_val = [video_intelligence.Feature.LABEL_DETECTION] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_annotate_video_flattened_error_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.annotate_video( + video_intelligence.AnnotateVideoRequest(), + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + video_intelligence.AnnotateVideoRequest, + dict, + ], +) +def test_annotate_video_rest(request_type): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.annotate_video(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_annotate_video_rest_required_fields( + request_type=video_intelligence.AnnotateVideoRequest, +): + transport_class = transports.VideoIntelligenceServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).annotate_video._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).annotate_video._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.annotate_video(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_annotate_video_rest_unset_required_fields(): + transport = transports.VideoIntelligenceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.annotate_video._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("features",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_annotate_video_rest_interceptors(null_interceptor): + transport = transports.VideoIntelligenceServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VideoIntelligenceServiceRestInterceptor(), + ) + client = VideoIntelligenceServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VideoIntelligenceServiceRestInterceptor, "post_annotate_video" + ) as post, mock.patch.object( + transports.VideoIntelligenceServiceRestInterceptor, "pre_annotate_video" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = video_intelligence.AnnotateVideoRequest.pb( + video_intelligence.AnnotateVideoRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = video_intelligence.AnnotateVideoRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.annotate_video( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_annotate_video_rest_bad_request( + transport: str = "rest", request_type=video_intelligence.AnnotateVideoRequest +): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.annotate_video(request) + + +def test_annotate_video_rest_flattened(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.annotate_video(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1p2beta1/videos:annotate" % client.transport._host, args[1] + ) + + +def test_annotate_video_rest_flattened_error(transport: str = "rest"): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_video( + video_intelligence.AnnotateVideoRequest(), + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + +def test_annotate_video_rest_error(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = VideoIntelligenceServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.VideoIntelligenceServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + transports.VideoIntelligenceServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = VideoIntelligenceServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.VideoIntelligenceServiceGrpcTransport, + ) + + +def test_video_intelligence_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.VideoIntelligenceServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_video_intelligence_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.VideoIntelligenceServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("annotate_video",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_video_intelligence_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.VideoIntelligenceServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_video_intelligence_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.VideoIntelligenceServiceTransport() + adc.assert_called_once() + + +def test_video_intelligence_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + VideoIntelligenceServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + transports.VideoIntelligenceServiceRestTransport, + ], +) +def test_video_intelligence_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.VideoIntelligenceServiceGrpcTransport, grpc_helpers), + (transports.VideoIntelligenceServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_video_intelligence_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "videointelligence.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="videointelligence.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_video_intelligence_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.VideoIntelligenceServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_video_intelligence_service_rest_lro_client(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_video_intelligence_service_host_no_port(transport_name): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="videointelligence.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "videointelligence.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_video_intelligence_service_host_with_port(transport_name): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="videointelligence.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "videointelligence.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://videointelligence.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_video_intelligence_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = VideoIntelligenceServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = VideoIntelligenceServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.annotate_video._session + session2 = client2.transport.annotate_video._session + assert session1 != session2 + + +def test_video_intelligence_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.VideoIntelligenceServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_video_intelligence_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.VideoIntelligenceServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_video_intelligence_service_grpc_lro_client(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_video_intelligence_service_grpc_lro_async_client(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = VideoIntelligenceServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = VideoIntelligenceServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = VideoIntelligenceServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = VideoIntelligenceServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = VideoIntelligenceServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = VideoIntelligenceServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = VideoIntelligenceServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = VideoIntelligenceServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = VideoIntelligenceServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = VideoIntelligenceServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.VideoIntelligenceServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.VideoIntelligenceServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = VideoIntelligenceServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/__init__.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/test_streaming_video_intelligence_service.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/test_streaming_video_intelligence_service.py new file mode 100644 index 000000000000..aaeaf26e48b5 --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/test_streaming_video_intelligence_service.py @@ -0,0 +1,1519 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.rpc import status_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.videointelligence_v1p3beta1.services.streaming_video_intelligence_service import ( + StreamingVideoIntelligenceServiceAsyncClient, + StreamingVideoIntelligenceServiceClient, + transports, +) +from google.cloud.videointelligence_v1p3beta1.types import video_intelligence + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ( + StreamingVideoIntelligenceServiceClient._get_default_mtls_endpoint(None) is None + ) + assert ( + StreamingVideoIntelligenceServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + StreamingVideoIntelligenceServiceClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + StreamingVideoIntelligenceServiceClient._get_default_mtls_endpoint( + sandbox_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + StreamingVideoIntelligenceServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + StreamingVideoIntelligenceServiceClient._get_default_mtls_endpoint( + non_googleapi + ) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (StreamingVideoIntelligenceServiceClient, "grpc"), + (StreamingVideoIntelligenceServiceAsyncClient, "grpc_asyncio"), + ], +) +def test_streaming_video_intelligence_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("videointelligence.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.StreamingVideoIntelligenceServiceGrpcTransport, "grpc"), + ( + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_streaming_video_intelligence_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (StreamingVideoIntelligenceServiceClient, "grpc"), + (StreamingVideoIntelligenceServiceAsyncClient, "grpc_asyncio"), + ], +) +def test_streaming_video_intelligence_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("videointelligence.googleapis.com:443") + + +def test_streaming_video_intelligence_service_client_get_transport_class(): + transport = StreamingVideoIntelligenceServiceClient.get_transport_class() + available_transports = [ + transports.StreamingVideoIntelligenceServiceGrpcTransport, + ] + assert transport in available_transports + + transport = StreamingVideoIntelligenceServiceClient.get_transport_class("grpc") + assert transport == transports.StreamingVideoIntelligenceServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + StreamingVideoIntelligenceServiceClient, + transports.StreamingVideoIntelligenceServiceGrpcTransport, + "grpc", + ), + ( + StreamingVideoIntelligenceServiceAsyncClient, + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + StreamingVideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(StreamingVideoIntelligenceServiceClient), +) +@mock.patch.object( + StreamingVideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(StreamingVideoIntelligenceServiceAsyncClient), +) +def test_streaming_video_intelligence_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + StreamingVideoIntelligenceServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + StreamingVideoIntelligenceServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + StreamingVideoIntelligenceServiceClient, + transports.StreamingVideoIntelligenceServiceGrpcTransport, + "grpc", + "true", + ), + ( + StreamingVideoIntelligenceServiceAsyncClient, + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + StreamingVideoIntelligenceServiceClient, + transports.StreamingVideoIntelligenceServiceGrpcTransport, + "grpc", + "false", + ), + ( + StreamingVideoIntelligenceServiceAsyncClient, + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + StreamingVideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(StreamingVideoIntelligenceServiceClient), +) +@mock.patch.object( + StreamingVideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(StreamingVideoIntelligenceServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_streaming_video_intelligence_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [ + StreamingVideoIntelligenceServiceClient, + StreamingVideoIntelligenceServiceAsyncClient, + ], +) +@mock.patch.object( + StreamingVideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(StreamingVideoIntelligenceServiceClient), +) +@mock.patch.object( + StreamingVideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(StreamingVideoIntelligenceServiceAsyncClient), +) +def test_streaming_video_intelligence_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + StreamingVideoIntelligenceServiceClient, + transports.StreamingVideoIntelligenceServiceGrpcTransport, + "grpc", + ), + ( + StreamingVideoIntelligenceServiceAsyncClient, + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_streaming_video_intelligence_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + StreamingVideoIntelligenceServiceClient, + transports.StreamingVideoIntelligenceServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + StreamingVideoIntelligenceServiceAsyncClient, + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_streaming_video_intelligence_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_streaming_video_intelligence_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.videointelligence_v1p3beta1.services.streaming_video_intelligence_service.transports.StreamingVideoIntelligenceServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = StreamingVideoIntelligenceServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + StreamingVideoIntelligenceServiceClient, + transports.StreamingVideoIntelligenceServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + StreamingVideoIntelligenceServiceAsyncClient, + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_streaming_video_intelligence_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "videointelligence.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="videointelligence.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + video_intelligence.StreamingAnnotateVideoRequest, + dict, + ], +) +def test_streaming_annotate_video(request_type, transport: str = "grpc"): + client = StreamingVideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_annotate_video), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter([video_intelligence.StreamingAnnotateVideoResponse()]) + response = client.streaming_annotate_video(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, video_intelligence.StreamingAnnotateVideoResponse) + + +@pytest.mark.asyncio +async def test_streaming_annotate_video_async( + transport: str = "grpc_asyncio", + request_type=video_intelligence.StreamingAnnotateVideoRequest, +): + client = StreamingVideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_annotate_video), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[video_intelligence.StreamingAnnotateVideoResponse()] + ) + response = await client.streaming_annotate_video(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, video_intelligence.StreamingAnnotateVideoResponse) + + +@pytest.mark.asyncio +async def test_streaming_annotate_video_async_from_dict(): + await test_streaming_annotate_video_async(request_type=dict) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.StreamingVideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StreamingVideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.StreamingVideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StreamingVideoIntelligenceServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.StreamingVideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = StreamingVideoIntelligenceServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = StreamingVideoIntelligenceServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.StreamingVideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StreamingVideoIntelligenceServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.StreamingVideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = StreamingVideoIntelligenceServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.StreamingVideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.StreamingVideoIntelligenceServiceGrpcTransport, + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = StreamingVideoIntelligenceServiceClient.get_transport_class( + transport_name + )( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = StreamingVideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.StreamingVideoIntelligenceServiceGrpcTransport, + ) + + +def test_streaming_video_intelligence_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.StreamingVideoIntelligenceServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_streaming_video_intelligence_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.videointelligence_v1p3beta1.services.streaming_video_intelligence_service.transports.StreamingVideoIntelligenceServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.StreamingVideoIntelligenceServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("streaming_annotate_video",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_streaming_video_intelligence_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.videointelligence_v1p3beta1.services.streaming_video_intelligence_service.transports.StreamingVideoIntelligenceServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.StreamingVideoIntelligenceServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_streaming_video_intelligence_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.videointelligence_v1p3beta1.services.streaming_video_intelligence_service.transports.StreamingVideoIntelligenceServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.StreamingVideoIntelligenceServiceTransport() + adc.assert_called_once() + + +def test_streaming_video_intelligence_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + StreamingVideoIntelligenceServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.StreamingVideoIntelligenceServiceGrpcTransport, + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_streaming_video_intelligence_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.StreamingVideoIntelligenceServiceGrpcTransport, + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_streaming_video_intelligence_service_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.StreamingVideoIntelligenceServiceGrpcTransport, grpc_helpers), + ( + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_streaming_video_intelligence_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "videointelligence.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="videointelligence.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.StreamingVideoIntelligenceServiceGrpcTransport, + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_streaming_video_intelligence_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_streaming_video_intelligence_service_host_no_port(transport_name): + client = StreamingVideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="videointelligence.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ("videointelligence.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_streaming_video_intelligence_service_host_with_port(transport_name): + client = StreamingVideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="videointelligence.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ("videointelligence.googleapis.com:8000") + + +def test_streaming_video_intelligence_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.StreamingVideoIntelligenceServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_streaming_video_intelligence_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.StreamingVideoIntelligenceServiceGrpcTransport, + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_streaming_video_intelligence_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.StreamingVideoIntelligenceServiceGrpcTransport, + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_streaming_video_intelligence_service_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = StreamingVideoIntelligenceServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = StreamingVideoIntelligenceServiceClient.common_billing_account_path( + **expected + ) + + # Check that the path construction is reversible. + actual = StreamingVideoIntelligenceServiceClient.parse_common_billing_account_path( + path + ) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = StreamingVideoIntelligenceServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = StreamingVideoIntelligenceServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = StreamingVideoIntelligenceServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = StreamingVideoIntelligenceServiceClient.common_organization_path( + organization + ) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = StreamingVideoIntelligenceServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = StreamingVideoIntelligenceServiceClient.parse_common_organization_path( + path + ) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = StreamingVideoIntelligenceServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = StreamingVideoIntelligenceServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = StreamingVideoIntelligenceServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = StreamingVideoIntelligenceServiceClient.common_location_path( + project, location + ) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = StreamingVideoIntelligenceServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = StreamingVideoIntelligenceServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.StreamingVideoIntelligenceServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = StreamingVideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.StreamingVideoIntelligenceServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = StreamingVideoIntelligenceServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = StreamingVideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = StreamingVideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = StreamingVideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + StreamingVideoIntelligenceServiceClient, + transports.StreamingVideoIntelligenceServiceGrpcTransport, + ), + ( + StreamingVideoIntelligenceServiceAsyncClient, + transports.StreamingVideoIntelligenceServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/test_video_intelligence_service.py b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/test_video_intelligence_service.py new file mode 100644 index 000000000000..0be997086192 --- /dev/null +++ b/packages/google-cloud-videointelligence/tests/unit/gapic/videointelligence_v1p3beta1/test_video_intelligence_service.py @@ -0,0 +1,1630 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.videointelligence_v1p3beta1.services.video_intelligence_service import ( + VideoIntelligenceServiceAsyncClient, + VideoIntelligenceServiceClient, + transports, +) +from google.cloud.videointelligence_v1p3beta1.types import video_intelligence + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert VideoIntelligenceServiceClient._get_default_mtls_endpoint(None) is None + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + VideoIntelligenceServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (VideoIntelligenceServiceClient, "grpc"), + (VideoIntelligenceServiceAsyncClient, "grpc_asyncio"), + ], +) +def test_video_intelligence_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("videointelligence.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.VideoIntelligenceServiceGrpcTransport, "grpc"), + (transports.VideoIntelligenceServiceGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_video_intelligence_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (VideoIntelligenceServiceClient, "grpc"), + (VideoIntelligenceServiceAsyncClient, "grpc_asyncio"), + ], +) +def test_video_intelligence_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ("videointelligence.googleapis.com:443") + + +def test_video_intelligence_service_client_get_transport_class(): + transport = VideoIntelligenceServiceClient.get_transport_class() + available_transports = [ + transports.VideoIntelligenceServiceGrpcTransport, + ] + assert transport in available_transports + + transport = VideoIntelligenceServiceClient.get_transport_class("grpc") + assert transport == transports.VideoIntelligenceServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +def test_video_intelligence_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + VideoIntelligenceServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + VideoIntelligenceServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + "true", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + "false", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_video_intelligence_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [VideoIntelligenceServiceClient, VideoIntelligenceServiceAsyncClient], +) +@mock.patch.object( + VideoIntelligenceServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceClient), +) +@mock.patch.object( + VideoIntelligenceServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(VideoIntelligenceServiceAsyncClient), +) +def test_video_intelligence_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_video_intelligence_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_video_intelligence_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_video_intelligence_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.videointelligence_v1p3beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = VideoIntelligenceServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_video_intelligence_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "videointelligence.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="videointelligence.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + video_intelligence.AnnotateVideoRequest, + dict, + ], +) +def test_annotate_video(request_type, transport: str = "grpc"): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.annotate_video(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_annotate_video_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + client.annotate_video() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + +@pytest.mark.asyncio +async def test_annotate_video_async( + transport: str = "grpc_asyncio", + request_type=video_intelligence.AnnotateVideoRequest, +): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.annotate_video(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == video_intelligence.AnnotateVideoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_annotate_video_async_from_dict(): + await test_annotate_video_async(request_type=dict) + + +def test_annotate_video_flattened(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.annotate_video( + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].input_uri + mock_val = "input_uri_value" + assert arg == mock_val + arg = args[0].features + mock_val = [video_intelligence.Feature.LABEL_DETECTION] + assert arg == mock_val + + +def test_annotate_video_flattened_error(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_video( + video_intelligence.AnnotateVideoRequest(), + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + +@pytest.mark.asyncio +async def test_annotate_video_flattened_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.annotate_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.annotate_video( + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].input_uri + mock_val = "input_uri_value" + assert arg == mock_val + arg = args[0].features + mock_val = [video_intelligence.Feature.LABEL_DETECTION] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_annotate_video_flattened_error_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.annotate_video( + video_intelligence.AnnotateVideoRequest(), + input_uri="input_uri_value", + features=[video_intelligence.Feature.LABEL_DETECTION], + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = VideoIntelligenceServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = VideoIntelligenceServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.VideoIntelligenceServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.VideoIntelligenceServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = VideoIntelligenceServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.VideoIntelligenceServiceGrpcTransport, + ) + + +def test_video_intelligence_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.VideoIntelligenceServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_video_intelligence_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.videointelligence_v1p3beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.VideoIntelligenceServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ("annotate_video",) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_video_intelligence_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.videointelligence_v1p3beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.VideoIntelligenceServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_video_intelligence_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.videointelligence_v1p3beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.VideoIntelligenceServiceTransport() + adc.assert_called_once() + + +def test_video_intelligence_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + VideoIntelligenceServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.VideoIntelligenceServiceGrpcTransport, grpc_helpers), + (transports.VideoIntelligenceServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_video_intelligence_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "videointelligence.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="videointelligence.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_video_intelligence_service_host_no_port(transport_name): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="videointelligence.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ("videointelligence.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_video_intelligence_service_host_with_port(transport_name): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="videointelligence.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ("videointelligence.googleapis.com:8000") + + +def test_video_intelligence_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.VideoIntelligenceServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_video_intelligence_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.VideoIntelligenceServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.VideoIntelligenceServiceGrpcTransport, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ], +) +def test_video_intelligence_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_video_intelligence_service_grpc_lro_client(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_video_intelligence_service_grpc_lro_async_client(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = VideoIntelligenceServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = VideoIntelligenceServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = VideoIntelligenceServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = VideoIntelligenceServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = VideoIntelligenceServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = VideoIntelligenceServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = VideoIntelligenceServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = VideoIntelligenceServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = VideoIntelligenceServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = VideoIntelligenceServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = VideoIntelligenceServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.VideoIntelligenceServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.VideoIntelligenceServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = VideoIntelligenceServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = VideoIntelligenceServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = VideoIntelligenceServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + VideoIntelligenceServiceClient, + transports.VideoIntelligenceServiceGrpcTransport, + ), + ( + VideoIntelligenceServiceAsyncClient, + transports.VideoIntelligenceServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/release-please-config.json b/release-please-config.json index 190c89c5e1f3..fa8bc8cf1874 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -357,6 +357,16 @@ ], "release-type": "python" }, + "packages/google-cloud-bigquery-connection": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-bigquery-connection", + "extra-files": [ + "google/cloud/bigquery_connection/gapic_version.py", + "google/cloud/bigquery_connection_v1/gapic_version.py" + ], + "release-type": "python" + }, "packages/google-cloud-bigquery-data-exchange": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -393,6 +403,16 @@ ], "release-type": "python" }, + "packages/google-cloud-bigquery-datatransfer": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-bigquery-datatransfer", + "extra-files": [ + "google/cloud/bigquery_datatransfer/gapic_version.py", + "google/cloud/bigquery_datatransfer_v1/gapic_version.py" + ], + "release-type": "python" + }, "packages/google-cloud-bigquery-logging": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -424,6 +444,16 @@ ], "release-type": "python" }, + "packages/google-cloud-bigquery-reservation": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-bigquery-reservation", + "extra-files": [ + "google/cloud/bigquery_reservation/gapic_version.py", + "google/cloud/bigquery_reservation_v1/gapic_version.py" + ], + "release-type": "python" + }, "packages/google-cloud-billing": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -556,6 +586,17 @@ ], "release-type": "python" }, + "packages/google-cloud-container": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-container", + "extra-files": [ + "google/cloud/container/gapic_version.py", + "google/cloud/container_v1/gapic_version.py", + "google/cloud/container_v1beta1/gapic_version.py" + ], + "release-type": "python" + }, "packages/google-cloud-contentwarehouse": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -682,6 +723,16 @@ ], "release-type": "python" }, + "packages/google-cloud-dataproc": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-dataproc", + "extra-files": [ + "google/cloud/dataproc/gapic_version.py", + "google/cloud/dataproc_v1/gapic_version.py" + ], + "release-type": "python" + }, "packages/google-cloud-dataproc-metastore": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -746,19 +797,21 @@ "release-type": "python" }, "packages/google-cloud-dialogflow": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, "component": "google-cloud-dialogflow", "extra-files": [ - "google/cloud/dialogflow_v2beta1/gapic_version.py", "google/cloud/dialogflow/gapic_version.py", "google/cloud/dialogflow_v2/gapic_version.py", + "google/cloud/dialogflow_v2beta1/gapic_version.py", { "jsonpath": "$.clientLibrary.version", - "path": "samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json", + "path": "samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json", "type": "json" }, { "jsonpath": "$.clientLibrary.version", - "path": "samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json", + "path": "samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json", "type": "json" } ], @@ -806,6 +859,16 @@ ], "release-type": "python" }, + "packages/google-cloud-dlp": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-dlp", + "extra-files": [ + "google/cloud/dlp/gapic_version.py", + "google/cloud/dlp_v2/gapic_version.py" + ], + "release-type": "python" + }, "packages/google-cloud-dms": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -1005,6 +1068,16 @@ ], "release-type": "python" }, + "packages/google-cloud-gsuiteaddons": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-gsuiteaddons", + "extra-files": [ + "google/cloud/gsuiteaddons/gapic_version.py", + "google/cloud/gsuiteaddons_v1/gapic_version.py" + ], + "release-type": "python" + }, "packages/google-cloud-iam": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -1289,7 +1362,12 @@ "component": "google-cloud-network-services", "extra-files": [ "google/cloud/network_services/gapic_version.py", - "google/cloud/network_services_v1/gapic_version.py" + "google/cloud/network_services_v1/gapic_version.py", + { + "jsonpath": "$.clientLibrary.version", + "path": "samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json", + "type": "json" + } ], "release-type": "python" }, @@ -1351,6 +1429,17 @@ ], "release-type": "python" }, + "packages/google-cloud-os-config": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-os-config", + "extra-files": [ + "google/cloud/osconfig/gapic_version.py", + "google/cloud/osconfig_v1/gapic_version.py", + "google/cloud/osconfig_v1alpha/gapic_version.py" + ], + "release-type": "python" + }, "packages/google-cloud-phishing-protection": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -1576,6 +1665,17 @@ ], "release-type": "python" }, + "packages/google-cloud-scheduler": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-scheduler", + "extra-files": [ + "google/cloud/scheduler/gapic_version.py", + "google/cloud/scheduler_v1/gapic_version.py", + "google/cloud/scheduler_v1beta1/gapic_version.py" + ], + "release-type": "python" + }, "packages/google-cloud-secret-manager": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -1707,15 +1807,12 @@ "release-type": "python" }, "packages/google-cloud-storage-transfer": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, "component": "google-cloud-storage-transfer", "extra-files": [ - "google/cloud/storage_transfer_v1/gapic_version.py", "google/cloud/storage_transfer/gapic_version.py", - { - "jsonpath": "$.clientLibrary.version", - "path": "samples/generated_samples/snippet_metadata_google.storagetransfer.v1.json", - "type": "json" - } + "google/cloud/storage_transfer_v1/gapic_version.py" ], "release-type": "python" }, @@ -1770,6 +1867,29 @@ ], "release-type": "python" }, + "packages/google-cloud-tasks": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-tasks", + "extra-files": [ + "google/cloud/tasks/gapic_version.py", + "google/cloud/tasks_v2/gapic_version.py", + "google/cloud/tasks_v2beta2/gapic_version.py", + "google/cloud/tasks_v2beta3/gapic_version.py" + ], + "release-type": "python" + }, + "packages/google-cloud-texttospeech": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-texttospeech", + "extra-files": [ + "google/cloud/texttospeech/gapic_version.py", + "google/cloud/texttospeech_v1/gapic_version.py", + "google/cloud/texttospeech_v1beta1/gapic_version.py" + ], + "release-type": "python" + }, "packages/google-cloud-tpu": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, @@ -1827,6 +1947,20 @@ ], "release-type": "python" }, + "packages/google-cloud-videointelligence": { + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": true, + "component": "google-cloud-videointelligence", + "extra-files": [ + "google/cloud/videointelligence/gapic_version.py", + "google/cloud/videointelligence_v1/gapic_version.py", + "google/cloud/videointelligence_v1beta2/gapic_version.py", + "google/cloud/videointelligence_v1p1beta1/gapic_version.py", + "google/cloud/videointelligence_v1p2beta1/gapic_version.py", + "google/cloud/videointelligence_v1p3beta1/gapic_version.py" + ], + "release-type": "python" + }, "packages/google-cloud-vm-migration": { "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, diff --git a/scripts/split_repo_migration/single-library.post-process.api-files.sh b/scripts/split_repo_migration/single-library.post-process.api-files.sh index 25426d6116c0..a4335ddd3021 100755 --- a/scripts/split_repo_migration/single-library.post-process.api-files.sh +++ b/scripts/split_repo_migration/single-library.post-process.api-files.sh @@ -127,22 +127,6 @@ EOF pushd "${PATH_MONOREPO}" >& /dev/null -## START system tests check ######################################## -# variable prefix: TST_* - -# If there are integration tests, do not proceed with the script. - -TST_MONO_TESTDIR="${MONOREPO_PATH_PACKAGE}/tests/" -TST_MONO_SYSTEM_DIR="${TST_MONO_TESTDIR}system" -TST_MONO_SYSTEM_FILE="${TST_MONO_TESTDIR}system.py" -echo "Checking for system tests in ${TST_MONO_TESTDIR}" - -[[ ! -f ${TST_MONO_SYSTEM_FILE} ]] || \ - { echo "ERROR: ${TST_MONO_SYSTEM_FILE} exists. Need to manually deal with that." ; return -10 ; } -[[ ! -d ${TST_MONO_SYSTEM_DIR} ]] || \ - { echo "ERROR: ${TST_MONO_SYSTEM_DIR} exists. Need to manually deal with that." ; return -11 ; } -## END system tests check - ## START owlbot.yaml migration ######################################## # variable prefix: OWY_* # FIXME: KEEP? diff --git a/scripts/split_repo_migration/single-library.post-process.common-files.sh b/scripts/split_repo_migration/single-library.post-process.common-files.sh index cd25c91c6a10..d79546bd234e 100755 --- a/scripts/split_repo_migration/single-library.post-process.common-files.sh +++ b/scripts/split_repo_migration/single-library.post-process.common-files.sh @@ -132,22 +132,6 @@ EOF pushd "${PATH_MONOREPO}" >& /dev/null -## START system tests check ######################################## -# variable prefix: TST_* - -# If there are integration tests, do not proceed with the script. - -TST_MONO_TESTDIR="${MONOREPO_PATH_PACKAGE}/tests/" -TST_MONO_SYSTEM_DIR="${TST_MONO_TESTDIR}system" -TST_MONO_SYSTEM_FILE="${TST_MONO_TESTDIR}system.py" -echo "Checking for system tests in ${TST_MONO_TESTDIR}" - -[[ ! -f ${TST_MONO_SYSTEM_FILE} ]] || \ - { echo "ERROR: ${TST_MONO_SYSTEM_FILE} exists. Need to manually deal with that." ; return -10 ; } -[[ ! -d ${TST_MONO_SYSTEM_DIR} ]] || \ - { echo "ERROR: ${TST_MONO_SYSTEM_DIR} exists. Need to manually deal with that." ; return -11 ; } -## END system tests check - ## START release-please config migration ######################################## # variable prefix: RPC_*